From 4f0d2e15c6da1d45b2999f5bed11e47abc87c345 Mon Sep 17 00:00:00 2001 From: Enrico Vianello Date: Mon, 10 Jul 2023 16:48:39 +0200 Subject: [PATCH 1/3] Format all files + LICENSE update --- LICENSE | 3 +- pom.xml | 24 + src/main/assemblies/assembly.xml | 3 +- src/main/java/it/grid/storm/Constants.java | 15 +- src/main/java/it/grid/storm/Main.java | 7 +- src/main/java/it/grid/storm/ShutdownHook.java | 6 +- src/main/java/it/grid/storm/StoRM.java | 115 +- .../StoRMDefaultUncaughtExceptionHandler.java | 18 +- .../storm/acl/AclManagementInterface.java | 50 +- .../java/it/grid/storm/acl/AclManager.java | 80 +- .../java/it/grid/storm/acl/AclManagerFS.java | 157 +- .../it/grid/storm/asynch/AdvancedPicker.java | 118 +- src/main/java/it/grid/storm/asynch/BoL.java | 75 +- .../java/it/grid/storm/asynch/BoLFeeder.java | 153 +- .../grid/storm/asynch/BoLPersistentChunk.java | 31 +- .../grid/storm/asynch/BuilderException.java | 16 +- .../storm/asynch/GlobalStatusManager.java | 288 +- .../InvalidBoLChunkAttributesException.java | 12 +- .../InvalidBoLFeederAttributesException.java | 9 +- ...validOverallRequestAttributeException.java | 17 +- ...dPersistentRequestAttributesException.java | 27 +- .../asynch/InvalidPtGAttributesException.java | 17 +- .../InvalidPtGChunkAttributesException.java | 23 +- .../InvalidPtGFeederAttributesException.java | 13 +- .../InvalidPtPFeederAttributesException.java | 13 +- ...InvalidPutDoneReplyAttributeException.java | 9 +- .../InvalidPutReplyAttributeException.java | 9 +- .../InvalidPutStatusAttributesException.java | 15 +- .../InvalidRequestAttributesException.java | 20 +- src/main/java/it/grid/storm/asynch/PtG.java | 381 +- .../java/it/grid/storm/asynch/PtGBuilder.java | 40 +- .../java/it/grid/storm/asynch/PtGFeeder.java | 141 +- .../grid/storm/asynch/PtGPersistentChunk.java | 55 +- src/main/java/it/grid/storm/asynch/PtP.java | 372 +- .../java/it/grid/storm/asynch/PtPBuilder.java | 53 +- .../java/it/grid/storm/asynch/PtPFeeder.java | 46 +- .../grid/storm/asynch/PtPPersistentChunk.java | 62 +- .../java/it/grid/storm/asynch/Request.java | 8 +- .../it/grid/storm/asynch/RequestChunk.java | 3 +- .../storm/asynch/SRMPrepareToPutReply.java | 12 +- .../it/grid/storm/asynch/SRMPutDoneReply.java | 16 +- .../asynch/SRMStatusOfPutRequestReply.java | 21 +- .../it/grid/storm/asynch/SchedulerFacade.java | 21 +- .../grid/storm/asynch/SchedulerFactory.java | 14 +- .../it/grid/storm/asynch/Suspendedable.java | 4 +- .../asynch/UnsupportedOperationFeeder.java | 5 +- .../it/grid/storm/authz/AuthzDecision.java | 16 +- .../it/grid/storm/authz/AuthzDirector.java | 276 +- .../it/grid/storm/authz/AuthzException.java | 42 +- .../grid/storm/authz/DirectorException.java | 32 +- .../grid/storm/authz/PathAuthzInterface.java | 27 +- .../grid/storm/authz/SpaceAuthzInterface.java | 14 +- .../it/grid/storm/authz/path/PathAuthz.java | 63 +- .../storm/authz/path/conf/PathAuthzDB.java | 20 +- .../authz/path/conf/PathAuthzDBReader.java | 85 +- .../grid/storm/authz/path/model/PathACE.java | 59 +- .../authz/path/model/PathAccessMask.java | 20 +- .../path/model/PathAuthzAlgBestMatch.java | 62 +- .../model/PathAuthzEvaluationAlgorithm.java | 29 +- .../storm/authz/path/model/PathOperation.java | 31 +- .../storm/authz/path/model/PathPrincipal.java | 13 +- .../authz/path/model/SRMFileRequest.java | 292 +- .../it/grid/storm/authz/remote/Constants.java | 10 +- .../resource/AuthorizationResource.java | 713 ++-- .../AuthorizationResourceCompat_1_0.java | 38 +- .../remote/resource/PermissionEvaluator.java | 279 +- .../remote/resource/RequestParameters.java | 117 +- .../grid/storm/authz/sa/AuthzDBInterface.java | 5 +- .../authz/sa/AuthzDBReaderException.java | 7 +- .../authz/sa/AuthzDBReaderInterface.java | 15 +- .../it/grid/storm/authz/sa/SpaceAuthz.java | 5 +- .../it/grid/storm/authz/sa/SpaceDBAuthz.java | 51 +- .../grid/storm/authz/sa/SpaceFixedAuthz.java | 49 +- .../it/grid/storm/authz/sa/model/AceType.java | 10 +- .../storm/authz/sa/model/AuthzDBFixed.java | 18 +- .../authz/sa/model/DNEveryonePattern.java | 22 +- .../grid/storm/authz/sa/model/DNPattern.java | 60 +- .../storm/authz/sa/model/EGEEFQANPattern.java | 29 +- .../grid/storm/authz/sa/model/Everyone.java | 13 +- .../authz/sa/model/FQANEveryonePattern.java | 17 +- .../storm/authz/sa/model/FQANPattern.java | 7 +- .../storm/authz/sa/model/FileAuthzDB.java | 22 +- .../storm/authz/sa/model/SRMSpaceRequest.java | 77 +- .../grid/storm/authz/sa/model/SpaceACE.java | 32 +- .../storm/authz/sa/model/SpaceAccessMask.java | 13 +- .../storm/authz/sa/model/SpaceOperation.java | 25 +- .../storm/authz/sa/model/SubjectPattern.java | 13 +- .../authz/sa/model/SubjectPatternOld.java | 4 +- .../storm/authz/sa/model/SubjectType.java | 9 +- .../storm/authz/sa/test/MockSpaceAuthz.java | 12 +- .../authz/util/ConfigurationWatcher.java | 12 +- .../util/PathAuthzConfigurationWatcher.java | 19 +- .../storm/balancer/BalancingStrategy.java | 3 +- .../storm/balancer/BalancingStrategyType.java | 7 +- .../java/it/grid/storm/balancer/Node.java | 4 +- .../storm/balancer/cache/Responsiveness.java | 7 +- .../balancer/cache/ResponsivenessCache.java | 15 +- .../cache/ResponsivenessCacheEntry.java | 6 +- .../exception/BalancingStrategyException.java | 15 +- .../storm/balancer/node/AbstractNode.java | 27 +- .../it/grid/storm/balancer/node/FTPNode.java | 3 +- .../it/grid/storm/balancer/node/HttpNode.java | 3 +- .../grid/storm/balancer/node/HttpsNode.java | 3 +- .../strategy/AbstractBalancingStrategy.java | 13 +- .../strategy/BalancingStrategyFactory.java | 11 +- .../balancer/strategy/CyclicCounter.java | 25 +- .../balancer/strategy/RandomStrategy.java | 7 +- .../balancer/strategy/RoundRobinStrategy.java | 9 +- .../strategy/SmartRoundRobinStrategy.java | 11 +- .../balancer/strategy/WeightStrategy.java | 10 +- .../catalogs/AnonymousFileTransferData.java | 84 +- .../grid/storm/catalogs/AnonymousPtGData.java | 403 +- .../grid/storm/catalogs/AnonymousPtPData.java | 441 +- .../grid/storm/catalogs/BoLChunkCatalog.java | 1501 ++++--- .../it/grid/storm/catalogs/BoLChunkDAO.java | 3371 ++++++++-------- .../grid/storm/catalogs/BoLChunkDataTO.java | 333 +- .../java/it/grid/storm/catalogs/BoLData.java | 224 +- .../catalogs/BoLPersistentChunkData.java | 160 +- .../it/grid/storm/catalogs/ChunkDAOUtils.java | 10 +- .../it/grid/storm/catalogs/ChunkData.java | 11 +- .../grid/storm/catalogs/CopyChunkCatalog.java | 873 ++-- .../it/grid/storm/catalogs/CopyChunkDAO.java | 1531 +++---- .../grid/storm/catalogs/CopyChunkDataTO.java | 368 +- .../java/it/grid/storm/catalogs/CopyData.java | 229 +- .../catalogs/CopyGlobalFlagConverter.java | 134 +- .../catalogs/CopyPersistentChunkData.java | 144 +- .../catalogs/CopySpecificFlagConverter.java | 134 +- .../storm/catalogs/DirOptionConverter.java | 72 +- .../storm/catalogs/FileLifetimeConverter.java | 73 +- .../catalogs/FileStorageTypeConverter.java | 122 +- .../grid/storm/catalogs/FileTransferData.java | 30 +- .../grid/storm/catalogs/IdentityPtGData.java | 92 +- .../grid/storm/catalogs/IdentityPtPData.java | 103 +- ...nvalidBoLChunkDataAttributesException.java | 118 +- .../InvalidBoLDataAttributesException.java | 158 +- ...ersistentChunkDataAttributesException.java | 113 +- ...validCopyChunkDataAttributesException.java | 118 +- .../InvalidCopyDataAttributesException.java | 173 +- ...ersistentChunkDataAttributesException.java | 220 +- ...idFileTransferDataAttributesException.java | 120 +- .../InvalidPtGDataAttributesException.java | 110 +- ...ersistentChunkDataAttributesException.java | 112 +- .../InvalidPtPDataAttributesException.java | 264 +- ...ersistentChunkDataAttributesException.java | 146 +- ...educedBoLChunkDataAttributesException.java | 63 +- ...ducedCopyChunkDataAttributesException.java | 64 +- ...educedPtGChunkDataAttributesException.java | 63 +- ...educedPtPChunkDataAttributesException.java | 86 +- ...RequestSummaryDataAttributesException.java | 75 +- .../InvalidRetrievedDataException.java | 92 +- .../InvalidSpaceDataAttributesException.java | 34 +- ...lidSurlRequestDataAttributesException.java | 92 +- .../java/it/grid/storm/catalogs/JiTData.java | 68 +- .../catalogs/MalformedGridUserException.java | 5 +- .../MultipleDataEntriesException.java | 31 +- .../storm/catalogs/NoDataFoundException.java | 31 +- .../catalogs/OverwriteModeConverter.java | 118 +- .../storm/catalogs/PersistentChunkData.java | 11 +- .../storm/catalogs/PinLifetimeConverter.java | 99 +- .../grid/storm/catalogs/PtGChunkCatalog.java | 1572 ++++---- .../it/grid/storm/catalogs/PtGChunkDAO.java | 3555 +++++++++-------- .../grid/storm/catalogs/PtGChunkDataTO.java | 395 +- .../java/it/grid/storm/catalogs/PtGData.java | 45 +- .../catalogs/PtGPersistentChunkData.java | 352 +- .../grid/storm/catalogs/PtPChunkCatalog.java | 1112 +++--- .../it/grid/storm/catalogs/PtPChunkDAO.java | 3169 ++++++++------- .../grid/storm/catalogs/PtPChunkDataTO.java | 432 +- .../java/it/grid/storm/catalogs/PtPData.java | 80 +- .../catalogs/PtPPersistentChunkData.java | 343 +- .../storm/catalogs/ReducedBoLChunkData.java | 212 +- .../storm/catalogs/ReducedBoLChunkDataTO.java | 145 +- .../grid/storm/catalogs/ReducedChunkData.java | 14 +- .../storm/catalogs/ReducedCopyChunkData.java | 218 +- .../catalogs/ReducedCopyChunkDataTO.java | 227 +- .../storm/catalogs/ReducedPtGChunkData.java | 210 +- .../storm/catalogs/ReducedPtGChunkDataTO.java | 145 +- .../storm/catalogs/ReducedPtPChunkData.java | 269 +- .../storm/catalogs/ReducedPtPChunkDataTO.java | 190 +- .../it/grid/storm/catalogs/RequestData.java | 9 +- .../storm/catalogs/RequestSummaryCatalog.java | 796 ++-- .../storm/catalogs/RequestSummaryDAO.java | 2697 ++++++------- .../storm/catalogs/RequestSummaryData.java | 829 ++-- .../storm/catalogs/RequestSummaryDataTO.java | 826 ++-- .../storm/catalogs/RequestTypeConverter.java | 17 +- .../storm/catalogs/ReservedSpaceCatalog.java | 1262 +++--- .../catalogs/SizeInBytesIntConverter.java | 65 +- .../catalogs/SpaceTokenStringConverter.java | 64 +- .../storm/catalogs/StatusCodeConverter.java | 23 +- .../grid/storm/catalogs/StoRMDataSource.java | 131 +- .../SurlMultyOperationRequestData.java | 47 +- .../grid/storm/catalogs/SurlRequestData.java | 431 +- .../SynchMultyOperationRequestData.java | 7 +- .../it/grid/storm/catalogs/TURLConverter.java | 62 +- .../TransferProtocolListConverter.java | 70 +- .../storm/catalogs/VolatileAndJiTCatalog.java | 1151 +++--- .../storm/catalogs/VolatileAndJiTDAO.java | 1643 ++++---- .../surl/DelegatingSURLStatusManager.java | 40 +- .../surl/InMemorySURLStatusManager.java | 148 +- .../storm/catalogs/surl/SURLStatusDAO.java | 303 +- .../catalogs/surl/SURLStatusManager.java | 230 +- .../surl/SURLStatusManagerFactory.java | 11 +- .../catalogs/surl/SURLStatusManagerImpl.java | 128 +- .../storm/catalogs/surl/package-info.java | 14 +- .../timertasks/ExpiredPutRequestsAgent.java | 112 +- .../timertasks/RequestsGarbageCollector.java | 244 +- .../catalogs/timertasks/TGarbageData.java | 37 +- src/main/java/it/grid/storm/check/Check.java | 63 +- .../it/grid/storm/check/CheckManager.java | 127 +- .../it/grid/storm/check/CheckResponse.java | 79 +- .../java/it/grid/storm/check/CheckStatus.java | 99 +- .../storm/check/GenericCheckException.java | 15 +- .../grid/storm/check/SimpleCheckManager.java | 159 +- .../check/SimpleClassLoaderCheckManager.java | 165 +- .../check/sanity/filesystem/FakeGridUser.java | 15 +- .../NamespaceFSAssociationCheck.java | 522 +-- .../NamespaceFSExtendedACLUsageCheck.java | 138 +- ...ceFSExtendedAttributeDeclarationCheck.java | 61 +- ...amespaceFSExtendedAttributeUsageCheck.java | 115 +- .../sanity/filesystem/SupportedFSType.java | 46 +- .../storm/checksum/ChecksumAlgorithm.java | 13 +- .../grid/storm/checksum/ChecksumManager.java | 24 +- .../checksum/ChecksumRuntimeException.java | 27 +- src/main/java/it/grid/storm/common/GUID.java | 355 +- .../java/it/grid/storm/common/HostLookup.java | 23 +- .../it/grid/storm/common/OperationType.java | 12 +- .../it/grid/storm/common/SRMConstants.java | 23 +- .../common/exception/StoRMException.java | 8 +- .../it/grid/storm/common/types/EndPoint.java | 230 +- .../InvalidEndPointAttributeException.java | 41 +- .../InvalidMachineAttributeException.java | 32 +- .../types/InvalidPFNAttributeException.java | 47 +- .../InvalidPFNRootAttributeException.java | 40 +- .../types/InvalidPortAttributeException.java | 27 +- .../types/InvalidSFNAttributesException.java | 154 +- .../types/InvalidStFNAttributeException.java | 54 +- .../InvalidStFNRootAttributeException.java | 40 +- .../types/InvalidTFNAttributesException.java | 68 +- .../it/grid/storm/common/types/Machine.java | 101 +- .../java/it/grid/storm/common/types/PFN.java | 147 +- .../it/grid/storm/common/types/PFNRoot.java | 61 +- .../types/ParsingSFNAttributesException.java | 41 +- .../java/it/grid/storm/common/types/Port.java | 160 +- .../java/it/grid/storm/common/types/SFN.java | 1137 +++--- .../grid/storm/common/types/SiteProtocol.java | 82 +- .../it/grid/storm/common/types/SizeUnit.java | 224 +- .../java/it/grid/storm/common/types/StFN.java | 430 +- .../it/grid/storm/common/types/StFNRoot.java | 60 +- .../java/it/grid/storm/common/types/TFN.java | 505 ++- .../grid/storm/common/types/TURLPrefix.java | 200 +- .../it/grid/storm/common/types/TimeUnit.java | 225 +- .../storm/common/types/TransferProtocol.java | 187 +- .../java/it/grid/storm/common/types/VO.java | 74 +- .../grid/storm/concurrency/NamedThread.java | 82 +- .../storm/concurrency/NamedThreadFactory.java | 18 +- .../storm/concurrency/TimingThreadPool.java | 96 +- .../it/grid/storm/config/ConfigReader.java | 17 +- .../it/grid/storm/config/Configuration.java | 137 +- .../it/grid/storm/config/DefaultValue.java | 180 +- .../it/grid/storm/ea/ExtendedAttributes.java | 106 +- .../storm/ea/ExtendedAttributesException.java | 28 +- .../storm/ea/ExtendedAttributesFactory.java | 9 +- .../storm/ea/ExtendedAttributesSwigImpl.java | 83 +- .../it/grid/storm/ea/MetricsEAAdapter.java | 7 +- src/main/java/it/grid/storm/ea/StormEA.java | 32 +- .../it/grid/storm/ea/remote/Constants.java | 18 +- .../ea/remote/resource/RequestParameters.java | 228 +- .../ea/remote/resource/StormEAResource.java | 123 +- .../it/grid/storm/filesystem/AclLockPool.java | 94 +- .../storm/filesystem/AclLockPoolElement.java | 85 +- .../grid/storm/filesystem/CannotGiveAway.java | 28 +- .../it/grid/storm/filesystem/FSException.java | 28 +- .../storm/filesystem/FileSystemChecker.java | 22 +- .../FileSystemCheckerException.java | 15 +- .../filesystem/FileSystemCheckerFactory.java | 198 +- .../filesystem/FileSystemCheckerFromFile.java | 318 +- .../filesystem/FileSystemCheckerMounts.java | 188 +- .../FileSystemCheckerMountsMonolithic.java | 305 +- .../filesystem/FileSystemCheckerMtab.java | 172 +- .../FileSystemCheckerMtabMonolithic.java | 303 +- .../it/grid/storm/filesystem/Filesystem.java | 380 +- .../grid/storm/filesystem/FilesystemIF.java | 531 +-- .../filesystem/FilesystemPermission.java | 789 ++-- .../storm/filesystem/GPFSSpaceSystem.java | 159 +- .../InvalidSpaceAttributesException.java | 123 +- .../it/grid/storm/filesystem/LocalFile.java | 1361 +++---- .../filesystem/MetricsFilesystemAdapter.java | 83 +- .../storm/filesystem/MockSpaceSystem.java | 14 +- .../it/grid/storm/filesystem/MtabRow.java | 323 +- .../it/grid/storm/filesystem/MtabUtil.java | 295 +- .../NullGPFSFilesystemException.java | 15 +- .../RandomWaitFilesystemAdapter.java | 43 +- .../filesystem/ReservationException.java | 30 +- .../java/it/grid/storm/filesystem/Space.java | 520 ++- .../it/grid/storm/filesystem/SpaceSystem.java | 83 +- .../filesystem/SpaceSystemException.java | 31 +- .../storm/filesystem/WrongFilesystemType.java | 29 +- .../it/grid/storm/filesystem/swig/test.java | 10 +- .../grid/storm/griduser/AbstractGridUser.java | 149 +- .../griduser/CannotMapUserException.java | 32 +- .../grid/storm/griduser/DNMatchingRule.java | 701 ++-- .../storm/griduser/DistinguishedName.java | 513 ++- .../java/it/grid/storm/griduser/DnMatch.java | 20 +- .../it/grid/storm/griduser/ExactDnMatch.java | 50 +- .../java/it/grid/storm/griduser/FQAN.java | 532 ++- .../java/it/grid/storm/griduser/GridUser.java | 90 +- .../storm/griduser/GridUserException.java | 32 +- .../grid/storm/griduser/GridUserFactory.java | 444 +- .../storm/griduser/GridUserInterface.java | 25 +- .../grid/storm/griduser/GridUserManager.java | 229 +- .../storm/griduser/InvalidFqanSyntax.java | 48 +- .../InvalidGridUserAttributesException.java | 7 +- .../griduser/InvalidSubjectDnSyntax.java | 49 +- .../grid/storm/griduser/LcmapsJNAMapper.java | 252 +- .../it/grid/storm/griduser/LocalUser.java | 122 +- .../grid/storm/griduser/MapperInterface.java | 9 +- .../grid/storm/griduser/SimpleUserMapper.java | 166 +- .../storm/griduser/StormLcmapsJNAMapper.java | 98 +- .../grid/storm/griduser/SubjectAttribute.java | 28 +- .../storm/griduser/VONameMatchingRule.java | 122 +- .../it/grid/storm/griduser/VomsGridUser.java | 328 +- .../storm/griduser/swig/lcmaps_interface.java | 8 +- .../griduser/swig/lcmaps_interfaceJNI.java | 70 +- .../storm/griduser/swig/localuser_info.java | 74 +- .../java/it/grid/storm/health/BookKeeper.java | 41 +- .../it/grid/storm/health/DetectiveGlance.java | 216 +- .../it/grid/storm/health/HealthDirector.java | 396 +- .../it/grid/storm/health/HealthMonitor.java | 111 +- .../java/it/grid/storm/health/Hearthbeat.java | 54 +- .../java/it/grid/storm/health/LogEvent.java | 294 +- .../it/grid/storm/health/OperationType.java | 268 +- .../storm/health/OperationTypeCategory.java | 233 +- .../storm/health/PerformanceBookKeeper.java | 217 +- .../grid/storm/health/PerformanceEvent.java | 104 +- .../grid/storm/health/PerformanceGlance.java | 49 +- .../grid/storm/health/PerformancePulse.java | 42 +- .../grid/storm/health/PerformanceStatus.java | 118 +- .../it/grid/storm/health/PerformanceTask.java | 51 +- .../grid/storm/health/SimpleBookKeeper.java | 215 +- .../it/grid/storm/health/StoRMStatus.java | 345 +- .../health/external/FSMetadataStatus.java | 114 +- .../java/it/grid/storm/info/InfoService.java | 10 +- .../it/grid/storm/info/SAInfoException.java | 12 +- .../it/grid/storm/info/SpaceInfoManager.java | 29 +- .../storm/info/du/DiskUsageExecCommand.java | 10 +- .../grid/storm/info/du/DiskUsageService.java | 30 +- .../it/grid/storm/info/du/DiskUsageTask.java | 26 +- .../it/grid/storm/info/du/DiskUsageUtils.java | 14 +- .../java/it/grid/storm/info/model/SATree.java | 7 +- .../storm/info/model/SpaceStatusDetailed.java | 230 +- .../storm/info/model/SpaceStatusSummary.java | 554 ++- .../it/grid/storm/info/model/TreeNode.java | 806 ++-- .../info/model/TreeStructureSummary.java | 113 +- .../storm/info/model/VOInfoStatusData.java | 56 +- .../it/grid/storm/info/remote/Constants.java | 60 +- .../storm/info/remote/resources/Ping.java | 118 +- .../remote/resources/SpaceStatusResource.java | 20 +- src/main/java/it/grid/storm/jna/Errno.java | 24 +- .../jna/lcmaps/LcmapsAccountInterface.java | 46 +- .../storm/jna/lcmaps/LcmapsInterface.java | 66 +- .../jna/lcmaps/LcmapsPoolindexInterface.java | 78 +- .../storm/jna/lcmaps/StormLcmapsLibrary.java | 137 +- .../jna/lcmaps/lcmaps_account_info_t.java | 120 +- .../java/it/grid/storm/logging/Files.java | 201 +- .../it/grid/storm/logging/StoRMLoggers.java | 129 +- .../java/it/grid/storm/logging/Strings.java | 137 +- ...medInstrumentedSelectChannelConnector.java | 13 +- .../metrics/NamedInstrumentedThreadPool.java | 86 +- .../storm/metrics/StormMetricRegistry.java | 11 +- .../storm/metrics/StormMetricsReporter.java | 98 +- .../namespace/DefaultValuesInterface.java | 28 +- .../namespace/ExpiredSpaceTokenException.java | 27 +- ...validDescendantsEmptyRequestException.java | 26 +- ...nvalidDescendantsFileRequestException.java | 25 +- ...nvalidDescendantsPathRequestException.java | 26 +- ...DescendantsTDirOptionRequestException.java | 40 +- .../InvalidGetTURLProtocolException.java | 13 +- .../InvalidProtocolForTURLException.java | 32 +- .../storm/namespace/InvalidSURLException.java | 32 +- .../namespace/MalformedSURLException.java | 32 +- .../it/grid/storm/namespace/Namespace.java | 140 +- .../storm/namespace/NamespaceDirector.java | 132 +- .../storm/namespace/NamespaceException.java | 27 +- .../storm/namespace/NamespaceInterface.java | 60 +- .../storm/namespace/NamespaceValidator.java | 168 +- .../storm/namespace/PropertyInterface.java | 24 +- .../java/it/grid/storm/namespace/StoRI.java | 80 +- .../it/grid/storm/namespace/StoRIImpl.java | 1252 +++--- .../it/grid/storm/namespace/TURLBuilder.java | 92 +- .../namespace/TURLBuildingException.java | 25 +- .../namespace/UnapprochableSurlException.java | 12 +- ...validConfigurationFileFormatException.java | 23 +- .../namespace/config/NamespaceCheck.java | 328 +- .../namespace/config/NamespaceLoader.java | 28 +- .../namespace/config/NamespaceParser.java | 52 +- .../storm/namespace/config/xml/XMLConst.java | 491 ++- .../config/xml/XMLNamespaceLoader.java | 602 ++- .../config/xml/XMLNamespaceParser.java | 1573 ++++---- .../namespace/config/xml/XMLParserUtil.java | 2190 +++++----- .../config/xml/XMLReloadingStrategy.java | 160 +- .../grid/storm/namespace/model/ACLEntry.java | 175 +- .../grid/storm/namespace/model/ACLMode.java | 3 +- .../storm/namespace/model/AccessLatency.java | 70 +- .../namespace/model/ApproachableRule.java | 449 +-- .../grid/storm/namespace/model/Authority.java | 145 +- .../storm/namespace/model/Capability.java | 83 +- .../storm/namespace/model/DefaultACL.java | 76 +- .../storm/namespace/model/DefaultValues.java | 566 ++- .../storm/namespace/model/ExpirationMode.java | 93 +- .../namespace/model/FilePermissionType.java | 202 +- .../storm/namespace/model/MappingRule.java | 56 +- .../storm/namespace/model/PathCreator.java | 212 +- .../namespace/model/PermissionException.java | 27 +- .../storm/namespace/model/PoolMember.java | 54 +- .../grid/storm/namespace/model/Property.java | 381 +- .../grid/storm/namespace/model/Protocol.java | 301 +- .../storm/namespace/model/ProtocolPool.java | 79 +- .../it/grid/storm/namespace/model/Quota.java | 123 +- .../grid/storm/namespace/model/QuotaInfo.java | 120 +- .../grid/storm/namespace/model/QuotaType.java | 255 +- .../namespace/model/RetentionPolicy.java | 70 +- .../storm/namespace/model/SAAuthzType.java | 93 +- .../it/grid/storm/namespace/model/SAInfo.java | 24 +- .../grid/storm/namespace/model/SAInfoV13.java | 285 +- .../grid/storm/namespace/model/StoRIType.java | 218 +- .../namespace/model/StorageClassType.java | 93 +- .../storm/namespace/model/SubjectRules.java | 68 +- .../namespace/model/TransportProtocol.java | 147 +- .../grid/storm/namespace/model/VirtualFS.java | 550 ++- .../storm/namespace/naming/NameParser.java | 317 +- .../storm/namespace/naming/NamespaceUtil.java | 1095 +++-- .../storm/namespace/naming/NamingConst.java | 49 +- .../grid/storm/namespace/naming/SRMURL.java | 550 ++- .../it/grid/storm/namespace/naming/SURL.java | 500 ++- .../it/grid/storm/namespace/naming/TURL.java | 28 +- .../storm/namespace/remote/Constants.java | 52 +- .../remote/resource/VirtualFSResource.java | 28 +- .../resource/VirtualFSResourceCompat_1_0.java | 35 +- .../resource/VirtualFSResourceCompat_1_1.java | 37 +- .../resource/VirtualFSResourceCompat_1_2.java | 53 +- .../resource/VirtualFSResourceCompat_1_3.java | 28 +- .../namespace/util/userinfo/LocalGroups.java | 163 +- .../util/userinfo/UserInfoCommand.java | 472 ++- .../util/userinfo/UserInfoException.java | 27 +- .../util/userinfo/UserInfoExecutor.java | 75 +- .../util/userinfo/UserInfoParameters.java | 64 +- .../it/grid/storm/persistence/DAOFactory.java | 34 +- .../DataSourceConnectionFactory.java | 10 +- .../storm/persistence/MySqlDAOFactory.java | 226 +- .../persistence/PersistenceDirector.java | 75 +- .../storm/persistence/dao/AbstractDAO.java | 297 +- .../storm/persistence/dao/PtGChunkDAO.java | 15 +- .../storm/persistence/dao/PtPChunkDAO.java | 16 +- .../persistence/dao/RequestSummaryDAO.java | 12 +- .../storm/persistence/dao/StorageAreaDAO.java | 6 +- .../persistence/dao/StorageSpaceDAO.java | 76 +- .../storm/persistence/dao/TapeRecallDAO.java | 330 +- .../exceptions/DataAccessException.java | 16 +- .../exceptions/InfrastructureException.java | 30 +- .../exceptions/PersistenceException.java | 34 +- .../impl/mysql/StorageSpaceDAOMySql.java | 1169 +++--- .../impl/mysql/TapeRecallDAOMySql.java | 1413 ++++--- .../it/grid/storm/persistence/model/GUID.java | 268 +- ...nvalidPtGChunkDataAttributesException.java | 184 +- ...RequestSummaryDataAttributesException.java | 89 +- .../storm/persistence/model/PtGChunkTO.java | 926 +++-- .../storm/persistence/model/PtPChunkTO.java | 8 +- .../persistence/model/RecallTaskType.java | 4 +- .../persistence/model/RequestSummaryTO.java | 450 +-- .../persistence/model/ResourceRuleData.java | 6 +- .../persistence/model/StorageSpaceTO.java | 755 ++-- .../storm/persistence/model/TapeRecallTO.java | 722 ++-- .../TransferObjectDecodingException.java | 20 +- .../persistence/util/db/DBConnection.java | 145 +- .../persistence/util/db/DBConnectionPool.java | 280 +- .../persistence/util/db/DataBaseStrategy.java | 158 +- .../storm/persistence/util/db/Databases.java | 12 +- .../persistence/util/db/InsertBuilder.java | 88 +- .../persistence/util/db/MySqlFormat.java | 52 +- .../storm/persistence/util/db/SQLBuilder.java | 18 +- .../storm/persistence/util/db/SQLFormat.java | 6 +- .../storm/persistence/util/db/SQLHelper.java | 124 +- .../util/helper/StorageSpaceSQLHelper.java | 1542 ++++--- .../util/helper/TapeRecallMySQLHelper.java | 1276 +++--- .../java/it/grid/storm/rest/JettyThread.java | 11 +- .../java/it/grid/storm/rest/RestServer.java | 81 +- .../grid/storm/rest/auth/RestTokenFilter.java | 74 +- .../it/grid/storm/rest/metadata/Metadata.java | 16 +- .../rest/metadata/model/FileAttributes.java | 39 +- .../rest/metadata/model/StoriMetadata.java | 39 +- .../metadata/model/VirtualFsMetadata.java | 3 +- .../service/ResourceNotFoundException.java | 12 +- .../metadata/service/ResourceService.java | 16 +- .../service/StoriMetadataService.java | 44 +- .../java/it/grid/storm/scheduler/Chooser.java | 29 +- .../grid/storm/scheduler/ChunkScheduler.java | 30 +- .../it/grid/storm/scheduler/ChunkTask.java | 437 +- .../it/grid/storm/scheduler/ChunkType.java | 95 +- .../it/grid/storm/scheduler/CruncherTask.java | 147 +- .../storm/scheduler/CrusherScheduler.java | 22 +- .../it/grid/storm/scheduler/Delegable.java | 33 +- .../scheduler/PersistentRequestChunk.java | 8 +- .../it/grid/storm/scheduler/Scheduler.java | 73 +- .../storm/scheduler/SchedulerException.java | 61 +- .../grid/storm/scheduler/SchedulerStatus.java | 214 +- .../java/it/grid/storm/scheduler/Streets.java | 33 +- .../java/it/grid/storm/scheduler/Task.java | 129 +- .../it/grid/storm/scheduler/WorkerPool.java | 37 +- .../java/it/grid/storm/space/DUResult.java | 52 +- .../java/it/grid/storm/space/ExitStatus.java | 11 +- .../storm/space/IllegalSRMSpaceParameter.java | 39 +- .../storm/space/NullSpaceUpdaterHelper.java | 30 +- .../storm/space/SimpleSpaceUpdaterHelper.java | 136 +- .../java/it/grid/storm/space/SpaceHelper.java | 728 ++-- .../space/SpaceUpdaterHelperFactory.java | 57 +- .../space/SpaceUpdaterHelperInterface.java | 8 +- .../it/grid/storm/space/StorageSpaceData.java | 248 +- .../StorageSpaceNotInitializedException.java | 20 +- .../space/gpfsquota/GPFSFilesetQuotaInfo.java | 23 +- .../storm/space/gpfsquota/GPFSQuotaInfo.java | 167 +- .../space/gpfsquota/GPFSQuotaManager.java | 100 +- .../GetGPFSFilesetQuotaInfoCommand.java | 64 +- .../grid/storm/space/init/UsedSpaceFile.java | 304 +- .../it/grid/storm/srm/types/ArrayOfSURLs.java | 13 +- .../storm/srm/types/ArrayOfTExtraInfo.java | 230 +- .../srm/types/ArrayOfTMetaDataPathDetail.java | 103 +- .../srm/types/ArrayOfTMetaDataSpace.java | 72 +- .../ArrayOfTSURLLifetimeReturnStatus.java | 212 +- .../srm/types/ArrayOfTSURLReturnStatus.java | 286 +- .../storm/srm/types/ArrayOfTSizeInBytes.java | 91 +- .../storm/srm/types/ArrayOfTSpaceToken.java | 140 +- ...InvalidArrayOfSURLsAttributeException.java | 20 +- ...idArrayOfTExtraInfoAttributeException.java | 25 +- ...TMetaDataPathDetailAttributeException.java | 29 +- ...rayOfTMetaDataSpaceAttributeException.java | 26 +- ...OfTSURLReturnStatusAttributeException.java | 23 +- ...dArrayOfTSpaceTokenAttributeException.java | 25 +- .../InvalidTDirOptionAttributesException.java | 33 +- .../InvalidTExtraInfoAttributeException.java | 20 +- ...validTMetaDataSpaceAttributeException.java | 29 +- ...validTRequestTokenAttributesException.java | 23 +- .../InvalidTSURLAttributesException.java | 67 +- .../InvalidTSURLInfoAttributeException.java | 22 +- ...ifetimeReturnStatusAttributeException.java | 27 +- ...idTSURLReturnStatusAttributeException.java | 22 +- .../InvalidTSizeAttributesException.java | 52 +- ...InvalidTSpaceTokenAttributesException.java | 19 +- .../InvalidTTURLAttributesException.java | 68 +- .../InvalidTUserIDAttributeException.java | 30 +- .../storm/srm/types/SRMCommandException.java | 6 +- .../grid/storm/srm/types/TAccessLatency.java | 113 +- .../grid/storm/srm/types/TAccessPattern.java | 113 +- .../grid/storm/srm/types/TCheckSumType.java | 39 +- .../grid/storm/srm/types/TCheckSumValue.java | 40 +- .../grid/storm/srm/types/TConnectionType.java | 78 +- .../it/grid/storm/srm/types/TDirOption.java | 165 +- .../it/grid/storm/srm/types/TExtraInfo.java | 161 +- .../grid/storm/srm/types/TFileLocality.java | 242 +- .../storm/srm/types/TFileStorageType.java | 196 +- .../it/grid/storm/srm/types/TFileType.java | 80 +- .../it/grid/storm/srm/types/TGroupID.java | 38 +- .../storm/srm/types/TGroupPermission.java | 75 +- .../storm/srm/types/TLifeTimeInSeconds.java | 457 +-- .../storm/srm/types/TMetaDataPathDetail.java | 742 ++-- .../grid/storm/srm/types/TMetaDataSpace.java | 548 ++- .../grid/storm/srm/types/TOverwriteMode.java | 73 +- .../grid/storm/srm/types/TPermissionMode.java | 229 +- .../grid/storm/srm/types/TRequestToken.java | 256 +- .../it/grid/storm/srm/types/TRequestType.java | 70 +- .../storm/srm/types/TRetentionPolicy.java | 156 +- .../storm/srm/types/TRetentionPolicyInfo.java | 231 +- .../grid/storm/srm/types/TReturnStatus.java | 290 +- .../java/it/grid/storm/srm/types/TSURL.java | 811 ++-- .../it/grid/storm/srm/types/TSURLInfo.java | 71 +- .../srm/types/TSURLLifetimeReturnStatus.java | 257 +- .../storm/srm/types/TSURLReturnStatus.java | 256 +- .../it/grid/storm/srm/types/TSizeInBytes.java | 317 +- .../it/grid/storm/srm/types/TSpaceToken.java | 219 +- .../it/grid/storm/srm/types/TSpaceType.java | 84 +- .../it/grid/storm/srm/types/TStatusCode.java | 200 +- .../storm/srm/types/TStorageSystemInfo.java | 47 +- .../java/it/grid/storm/srm/types/TTURL.java | 326 +- .../storm/srm/types/TTransferParameters.java | 272 +- .../java/it/grid/storm/srm/types/TUserID.java | 63 +- .../grid/storm/srm/types/TUserPermission.java | 80 +- .../java/it/grid/storm/startup/Bootstrap.java | 127 +- .../storm/startup/BootstrapException.java | 28 +- .../storm/synchcall/FileSystemUtility.java | 76 +- .../synchcall/SimpleSynchcallDispatcher.java | 28 +- .../storm/synchcall/SynchcallDispatcher.java | 39 +- .../synchcall/SynchcallDispatcherFactory.java | 21 +- .../synchcall/command/AbstractCommand.java | 22 +- .../grid/storm/synchcall/command/Command.java | 19 +- .../synchcall/command/CommandFactory.java | 108 +- .../synchcall/command/CommandHelper.java | 290 +- .../command/DataTransferCommand.java | 8 +- .../synchcall/command/DirectoryCommand.java | 8 +- .../synchcall/command/DiscoveryCommand.java | 8 +- .../storm/synchcall/command/SpaceCommand.java | 8 +- .../datatransfer/AbortExecutorInterface.java | 8 +- .../datatransfer/AbortFilesCommand.java | 193 +- .../datatransfer/AbortRequestCommand.java | 92 +- .../datatransfer/CommandException.java | 37 +- .../ExtendFileLifeTimeCommand.java | 1183 +++--- .../FileTransferRequestStatusCommand.java | 131 +- .../PrepareToGetRequestCommand.java | 85 +- .../PrepareToGetRequestStatusCommand.java | 137 +- .../PrepareToPutRequestCommand.java | 84 +- .../PrepareToPutRequestStatusCommand.java | 138 +- .../datatransfer/PtGAbortExecutor.java | 410 +- .../datatransfer/PtPAbortExecutor.java | 308 +- .../command/datatransfer/PutDoneCommand.java | 352 +- .../datatransfer/PutDoneCommandException.java | 44 +- .../datatransfer/ReleaseFilesCommand.java | 160 +- .../datatransfer/RequestUnknownException.java | 32 +- .../command/directory/LsCommand.java | 237 +- .../command/directory/MkdirCommand.java | 37 +- .../command/directory/MvCommand.java | 1125 +++--- .../command/directory/RmCommand.java | 48 +- .../command/directory/RmdirCommand.java | 540 ++- .../command/discovery/PingCommand.java | 101 +- .../space/GetSpaceMetaDataCommand.java | 374 +- .../command/space/GetSpaceTokensCommand.java | 104 +- .../command/space/ReleaseSpaceCommand.java | 115 +- .../command/space/ReserveSpaceCommand.java | 448 ++- .../common/HiddenFileT1D1Plugin.java | 63 +- .../synchcall/common/T1D1PluginInterface.java | 13 +- .../synchcall/data/AbstractInputData.java | 30 +- .../grid/storm/synchcall/data/DataHelper.java | 27 +- .../synchcall/data/IdentityInputData.java | 22 +- .../grid/storm/synchcall/data/InputData.java | 20 +- .../grid/storm/synchcall/data/OutputData.java | 8 +- .../datatransfer/AbortFilesInputData.java | 6 +- .../datatransfer/AbortFilesOutputData.java | 180 +- .../datatransfer/AbortGeneralOutputData.java | 117 +- .../data/datatransfer/AbortInputData.java | 15 +- .../datatransfer/AbortRequestOutputData.java | 84 +- .../AnonymousAbortFilesInputData.java | 67 +- .../AnonymousAbortGeneralInputData.java | 89 +- .../AnonymousAbortRequestInputData.java | 19 +- .../AnonymousExtendFileLifeTimeInputData.java | 134 +- .../AnonymousFileTransferInputData.java | 153 +- ...ymousManageFileTransferFilesInputData.java | 33 +- ...nageFileTransferRequestFilesInputData.java | 64 +- .../AnonymousPrepareToPutInputData.java | 182 +- .../AnonymousPutDoneInputData.java | 86 +- .../AnonymousReleaseRequestInputData.java | 31 +- .../ExtendFileLifeTimeInputData.java | 28 +- .../ExtendFileLifeTimeOutputData.java | 129 +- .../datatransfer/FileTransferInputData.java | 40 +- .../datatransfer/FileTransferOutputData.java | 281 +- .../IdentityAbortFilesInputData.java | 50 +- .../IdentityAbortRequestInputData.java | 49 +- .../IdentityExtendFileLifeTimeInputData.java | 78 +- .../IdentityFileTransferInputData.java | 79 +- ...ntityManageFileTransferFilesInputData.java | 44 +- ...nageFileTransferRequestFilesInputData.java | 46 +- .../IdentityPrepareToPutInputData.java | 48 +- .../IdentityPutDoneInputData.java | 49 +- .../IdentityReleaseRequestInputData.java | 44 +- .../ManageFileTransferFilesInputData.java | 10 +- .../ManageFileTransferOutputData.java | 155 +- ...nageFileTransferRequestFilesInputData.java | 17 +- .../ManageFileTransferRequestInputData.java | 9 +- .../datatransfer/PrepareToGetOutputData.java | 88 +- .../datatransfer/PrepareToPutInputData.java | 40 +- .../datatransfer/PrepareToPutOutputData.java | 24 +- .../data/directory/AnonymousLSInputData.java | 231 +- .../directory/AnonymousMkdirInputData.java | 31 +- .../data/directory/AnonymousMvInputData.java | 82 +- .../data/directory/AnonymousRmInputData.java | 41 +- .../directory/AnonymousRmdirInputData.java | 82 +- .../data/directory/IdentityLSInputData.java | 85 +- .../directory/IdentityMkdirInputData.java | 56 +- .../data/directory/IdentityMvInputData.java | 51 +- .../data/directory/IdentityRmInputData.java | 49 +- .../directory/IdentityRmdirInputData.java | 48 +- .../synchcall/data/directory/LSInputData.java | 53 +- .../data/directory/LSOutputData.java | 117 +- .../data/directory/MkdirInputData.java | 11 +- .../data/directory/MkdirOutputData.java | 63 +- .../synchcall/data/directory/MvInputData.java | 16 +- .../data/directory/MvOutputData.java | 65 +- .../synchcall/data/directory/RmInputData.java | 10 +- .../data/directory/RmOutputData.java | 84 +- .../data/directory/RmdirInputData.java | 16 +- .../data/directory/RmdirOutputData.java | 63 +- .../discovery/AnonymousPingInputData.java | 43 +- .../data/discovery/IdentityPingInputData.java | 70 +- .../data/discovery/PingInputData.java | 16 +- .../data/discovery/PingOutputData.java | 121 +- ...AbortFilesInputDataAttributeException.java | 23 +- ...bortFilesOutputDataAttributeException.java | 25 +- ...ortGeneralInputDataAttributeException.java | 23 +- ...rtGeneralOutputDataAttributeException.java | 25 +- ...ortRequestInputDataAttributeException.java | 23 +- ...rtRequestOutputDataAttributeException.java | 24 +- ...tSpaceMetaDataInputAttributeException.java | 31 +- ...SpaceMetaDataOutputAttributeException.java | 29 +- .../InvalidLSOutputAttributeException.java | 23 +- .../InvalidMkdirInputAttributeException.java | 23 +- .../InvalidMvInputAttributeException.java | 27 +- .../InvalidMvOutputAttributeException.java | 18 +- ...nvalidPutDoneOutputAttributeException.java | 25 +- ...idReleaseFilesInputAttributeException.java | 23 +- ...easeFilesOutputDataAttributeException.java | 19 +- ...nvalidReleaseSpaceAttributesException.java | 30 +- ...aseSpaceOutputDataAttributesException.java | 23 +- ...erveSpaceInputDataAttributesException.java | 40 +- ...rveSpaceOutputDataAttributesException.java | 53 +- .../InvalidRmInputAttributeException.java | 25 +- .../InvalidRmOutputAttributeException.java | 25 +- .../InvalidRmdirInputAttributeException.java | 23 +- .../AnonymousGetSpaceMetaDataInputData.java | 57 +- .../AnonymousGetSpaceTokensInputData.java | 55 +- .../space/AnonymousReleaseSpaceInputData.java | 90 +- .../space/AnonymousReserveSpaceInputData.java | 240 +- .../data/space/GetSpaceMetaDataInputData.java | 10 +- .../space/GetSpaceMetaDataOutputData.java | 112 +- .../data/space/GetSpaceTokensInputData.java | 10 +- .../data/space/GetSpaceTokensOutputData.java | 83 +- .../IdentityGetSpaceMetaDataInputData.java | 68 +- .../IdentityGetSpaceTokensInputData.java | 54 +- .../space/IdentityReleaseSpaceInputData.java | 58 +- .../space/IdentityReserveSpaceInputData.java | 83 +- .../data/space/ReleaseSpaceInputData.java | 16 +- .../data/space/ReleaseSpaceOutputData.java | 69 +- .../data/space/ReserveSpaceInputData.java | 56 +- .../data/space/ReserveSpaceOutputData.java | 177 +- .../synchcall/surl/ExpiredTokenException.java | 31 +- .../storm/synchcall/surl/SURLStatusStore.java | 185 +- .../synchcall/surl/SURLStatusStoreIF.java | 39 +- .../surl/TokenDuplicationException.java | 36 +- .../synchcall/surl/UnknownSurlException.java | 31 +- .../synchcall/surl/UnknownTokenException.java | 27 +- .../tape/recalltable/TapeRecallCatalog.java | 108 +- .../tape/recalltable/TapeRecallException.java | 21 +- .../model/PutTapeRecallStatusLogic.java | 175 +- .../model/PutTapeRecallStatusValidator.java | 175 +- .../recalltable/model/RequestValidator.java | 5 +- .../recalltable/model/TapeRecallStatus.java | 109 +- .../model/TaskInsertRequestValidator.java | 94 +- .../recalltable/persistence/PropertiesDB.java | 381 +- .../persistence/TapeRecallBuilder.java | 48 +- .../TapeRecallTOListMessageBodyWriter.java | 84 +- .../resources/TaskInsertRequest.java | 238 +- .../recalltable/resources/TaskResource.java | 118 +- .../resources/TasksCardinality.java | 141 +- .../recalltable/resources/TasksResource.java | 507 ++- .../it/grid/storm/util/GPFSSizeHelper.java | 26 +- .../it/grid/storm/util/SURLValidator.java | 24 +- .../it/grid/storm/util/TokenValidator.java | 24 +- .../it/grid/storm/util/VirtualFSHelper.java | 11 +- .../storm/xmlrpc/StoRMXmlRpcException.java | 37 +- .../it/grid/storm/xmlrpc/XMLRPCExecutor.java | 265 +- .../grid/storm/xmlrpc/XMLRPCHttpServer.java | 64 +- .../it/grid/storm/xmlrpc/XMLRPCMethods.java | 220 +- .../grid/storm/xmlrpc/XmlRpcTokenFilter.java | 150 +- .../storm/xmlrpc/converter/Converter.java | 36 +- .../xmlrpc/converter/ConveterFactory.java | 137 +- .../converter/ParameterDisplayHelper.java | 57 +- .../datatransfer/AbortFilesConverter.java | 152 +- .../datatransfer/AbortRequestConverter.java | 119 +- .../ExtendFileLifeTimeConverter.java | 188 +- .../FileTransferRequestInputConverter.java | 286 +- .../ManageFileTransferConverter.java | 61 +- .../ManageFileTransferRequestConverter.java | 132 +- .../PrepareToGetRequestConverter.java | 56 +- .../PrepareToPutRequestConverter.java | 143 +- .../datatransfer/PutDoneConverter.java | 123 +- .../converter/directory/LsConverter.java | 226 +- .../converter/directory/MkdirConverter.java | 97 +- .../converter/directory/MvConverter.java | 135 +- .../converter/directory/RmConverter.java | 136 +- .../converter/directory/RmdirConverter.java | 153 +- .../converter/discovery/PingConverter.java | 76 +- .../space/GetSpaceMetaDataConverter.java | 159 +- .../space/GetSpaceTokensConverter.java | 104 +- .../space/ReleaseSpaceConverter.java | 100 +- .../space/ReserveSpaceConverter.java | 298 +- src/main/resources/logback.xml | 3 +- .../xmlrpc/webserver/XmlRpcServlet.properties | 3 +- .../it/grid/storm/balancer/BalancerUtils.java | 20 +- .../cache/ResponsivenessCacheTest.java | 9 +- .../strategy/BalancingStrategiesTests.java | 14 +- .../balancer/strategy/CyclicCounterTest.java | 4 +- .../storm/info/du/DiskUsageUtilsTest.java | 14 +- .../storm/namespace/model/SAInfoV13Test.java | 17 +- .../storm/namespace/model/SAInfoV14Test.java | 17 +- .../storm/rest/auth/RestTokenFilterTest.java | 11 +- .../storm/rest/metadata/MetadataTests.java | 91 +- .../rest/metadata/ResourceServiceTest.java | 36 +- .../metadata/StoriMetadataServiceTest.java | 54 +- .../model/TapeRecallStatusTest.java | 3 +- .../model/TaskInsertRequestValidatorTest.java | 52 +- .../resources/TaskInsertRequestTest.java | 17 +- .../resources/TaskResourceTest.java | 139 +- .../storm/test/TestSURLValidator.java | 44 +- .../storm/test/TestTokenValidator.java | 26 +- 798 files changed, 61591 insertions(+), 65889 deletions(-) diff --git a/LICENSE b/LICENSE index 55cfebec..e9b33747 100644 --- a/LICENSE +++ b/LICENSE @@ -1,2 +1 @@ -Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 diff --git a/pom.xml b/pom.xml index de61913c..2ccf1bcf 100644 --- a/pom.xml +++ b/pom.xml @@ -49,6 +49,7 @@ 3.3.0 3.6.1 3.0.0 + 2.9 1.9.0 2.20 @@ -68,6 +69,29 @@ + + + com.coveo + fmt-maven-plugin + ${plugin.format.version} + + src/main/java + src/test/java + false + .*\.java + false + false + + + + + + check + + + + + org.apache.maven.plugins diff --git a/src/main/assemblies/assembly.xml b/src/main/assemblies/assembly.xml index 48771a72..8d6eef6c 100644 --- a/src/main/assemblies/assembly.xml +++ b/src/main/assemblies/assembly.xml @@ -1,8 +1,7 @@ getPlatformKernel() { Map map = Maps.newHashMap(); diff --git a/src/main/java/it/grid/storm/Main.java b/src/main/java/it/grid/storm/Main.java index 7e636ad5..0df935bf 100644 --- a/src/main/java/it/grid/storm/Main.java +++ b/src/main/java/it/grid/storm/Main.java @@ -1,16 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm; import static java.lang.System.exit; +import it.grid.storm.startup.BootstrapException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.startup.BootstrapException; - public class Main { private static final Logger log = LoggerFactory.getLogger(Main.class); @@ -44,5 +42,4 @@ public static void main(String[] args) { exit(1); } } - } diff --git a/src/main/java/it/grid/storm/ShutdownHook.java b/src/main/java/it/grid/storm/ShutdownHook.java index f3dc275e..53e5baf9 100644 --- a/src/main/java/it/grid/storm/ShutdownHook.java +++ b/src/main/java/it/grid/storm/ShutdownHook.java @@ -1,14 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm; +import it.grid.storm.space.gpfsquota.GPFSQuotaManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.space.gpfsquota.GPFSQuotaManager; - public class ShutdownHook extends Thread { private static final Logger log = LoggerFactory.getLogger(ShutdownHook.class); diff --git a/src/main/java/it/grid/storm/StoRM.java b/src/main/java/it/grid/storm/StoRM.java index 76d44116..bfff51ef 100644 --- a/src/main/java/it/grid/storm/StoRM.java +++ b/src/main/java/it/grid/storm/StoRM.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm; @@ -8,15 +7,6 @@ import static java.lang.String.valueOf; import static java.security.Security.setProperty; -import java.util.List; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.asynch.AdvancedPicker; import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.catalogs.StoRMDataSource; @@ -39,15 +29,21 @@ import it.grid.storm.synchcall.SimpleSynchcallDispatcher; import it.grid.storm.xmlrpc.StoRMXmlRpcException; import it.grid.storm.xmlrpc.XMLRPCHttpServer; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class represents a StoRM as a whole: it sets the configuration file which contains * properties necessary for other classes of StoRM, it sets up logging, as well as the advanced * picker. - * + * * @author EGRID - ICTP Trieste; INFN - CNAF Bologna @date March 28th, 2005 @version 7.0 */ - public class StoRM { private static final Logger log = LoggerFactory.getLogger(StoRM.class); @@ -86,7 +82,6 @@ public StoRM() { config = Configuration.getInstance(); picker = new AdvancedPicker(); spaceCatalog = new ReservedSpaceCatalog(); - } public void init() throws BootstrapException { @@ -116,14 +111,16 @@ public void init() throws BootstrapException { configureDiskUsageService(); performSanityChecks(); - } private void configureIPv6() { - log.debug("java.net.preferIPv6Addresses is {}", System.getProperty("java.net.preferIPv6Addresses")); - System.setProperty("java.net.preferIPv6Addresses", String.valueOf(config.getPreferIPv6Addresses())); - log.info("java.net.preferIPv6Addresses is {}", System.getProperty("java.net.preferIPv6Addresses")); + log.debug( + "java.net.preferIPv6Addresses is {}", System.getProperty("java.net.preferIPv6Addresses")); + System.setProperty( + "java.net.preferIPv6Addresses", String.valueOf(config.getPreferIPv6Addresses())); + log.info( + "java.net.preferIPv6Addresses is {}", System.getProperty("java.net.preferIPv6Addresses")); } private void configureLogging() { @@ -152,13 +149,11 @@ private void configureMetricsReporting() { StormMetricsReporter.forRegistry(METRIC_REGISTRY.getRegistry()).build(); metricsReporter.start(1, TimeUnit.MINUTES); - } private void loadNamespaceConfiguration() { NamespaceDirector.initializeDirector(); - } private void loadPathAuthzDBConfiguration() throws BootstrapException { @@ -172,14 +167,16 @@ private void configureXMLRPCService() throws BootstrapException { try { - xmlrpcServer = new XMLRPCHttpServer(config.getXmlRpcServerPort(), config.getXMLRPCMaxThread(), - config.getXMLRPCMaxQueueSize()); + xmlrpcServer = + new XMLRPCHttpServer( + config.getXmlRpcServerPort(), + config.getXMLRPCMaxThread(), + config.getXMLRPCMaxQueueSize()); } catch (StoRMXmlRpcException e) { throw new BootstrapException(e.getMessage(), e); } - } private void performSanityChecks() throws BootstrapException { @@ -211,7 +208,6 @@ private void performSanityChecks() throws BootstrapException { } else { log.warn("Sanity checks disabled. Unable to determine if the environment is sane"); } - } private void configureStoRMDataSource() { @@ -219,9 +215,7 @@ private void configureStoRMDataSource() { StoRMDataSource.init(); } - /** - * Method used to start the picker. - */ + /** Method used to start the picker. */ public synchronized void startPicker() { if (isPickerRunning) { @@ -232,9 +226,7 @@ public synchronized void startPicker() { isPickerRunning = true; } - /** - * Method used to stop the picker. - */ + /** Method used to stop the picker. */ public synchronized void stopPicker() { if (!isPickerRunning) { @@ -245,9 +237,7 @@ public synchronized void stopPicker() { isPickerRunning = false; } - /** - * @return - */ + /** @return */ public synchronized boolean pickerIsRunning() { return isPickerRunning; @@ -255,7 +245,7 @@ public synchronized boolean pickerIsRunning() { /** * Method used to start xmlrpcServer. - * + * * @throws Exception */ public synchronized void startXmlRpcServer() { @@ -268,9 +258,7 @@ public synchronized void startXmlRpcServer() { isXmlrpcServerRunning = true; } - /** - * Method used to stop xmlrpcServer. - */ + /** Method used to stop xmlrpcServer. */ public synchronized void stopXmlRpcServer() { if (!isXmlrpcServerRunning) { @@ -293,9 +281,7 @@ private void configureRestService() { restServer = new RestServer(restServicePort, maxThreads, maxQueueSize, isTokenEnabled, token); } - /** - * RESTFul Service Start-up - */ + /** RESTFul Service Start-up */ public synchronized void startRestServer() throws Exception { if (isRestServerRunning) { @@ -307,9 +293,7 @@ public synchronized void startRestServer() throws Exception { isRestServerRunning = true; } - /** - * @throws Exception - */ + /** @throws Exception */ public synchronized void stopRestServer() { if (isRestServerRunning) { @@ -324,14 +308,14 @@ public synchronized void stopRestServer() { } catch (Exception e) { - log.error("Unable to stop internal HTTP Server listening for RESTFul services: {}", - e.getMessage(), e); + log.error( + "Unable to stop internal HTTP Server listening for RESTFul services: {}", + e.getMessage(), + e); } } - /** - * Method use to start the space Garbage Collection Thread. - */ + /** Method use to start the space Garbage Collection Thread. */ public synchronized void startSpaceGC() { if (isSpaceGCRunning) { @@ -348,22 +332,21 @@ public synchronized void startSpaceGC() { long period = config.getCleaningTimeInterval() * 1000; // Set to 1 hour - cleaningTask = new TimerTask() { + cleaningTask = + new TimerTask() { - @Override - public void run() { + @Override + public void run() { - spaceCatalog.purge(); - } - }; + spaceCatalog.purge(); + } + }; gc.scheduleAtFixedRate(cleaningTask, delay, period); isSpaceGCRunning = true; log.debug("Space Garbage Collector started."); } - /** - * - */ + /** */ public synchronized void stopSpaceGC() { if (!isSpaceGCRunning) { @@ -380,9 +363,7 @@ public synchronized void stopSpaceGC() { isSpaceGCRunning = false; } - /** - * @return - */ + /** @return */ public synchronized boolean spaceGCIsRunning() { return isSpaceGCRunning; @@ -443,10 +424,10 @@ private void configureDiskUsageService() { NamespaceInterface namespace = NamespaceDirector.getNamespace(); List quotaEnabledVfs = namespace.getVFSWithQuotaEnabled(); - List sas = namespace.getAllDefinedVFS() - .stream() - .filter(vfs -> !quotaEnabledVfs.contains(vfs)) - .collect(Collectors.toList()); + List sas = + namespace.getAllDefinedVFS().stream() + .filter(vfs -> !quotaEnabledVfs.contains(vfs)) + .collect(Collectors.toList()); if (config.getDiskUsageServiceTasksParallel()) { duService = DiskUsageService.getScheduledThreadPoolService(sas); @@ -465,7 +446,9 @@ public synchronized void startDiskUsageService() { if (isDiskUsageServiceEnabled) { - log.info("Starting DiskUsage Service (delay: {}s, period: {}s)", duService.getDelay(), + log.info( + "Starting DiskUsage Service (delay: {}s, period: {}s)", + duService.getDelay(), duService.getPeriod()); duService.start(); @@ -475,7 +458,6 @@ public synchronized void startDiskUsageService() { } else { log.info("DiskUsage Service is disabled."); - } } @@ -492,7 +474,6 @@ public synchronized void stopDiskUsageService() { } else { log.info("DiskUsage Service is not running."); - } } diff --git a/src/main/java/it/grid/storm/StoRMDefaultUncaughtExceptionHandler.java b/src/main/java/it/grid/storm/StoRMDefaultUncaughtExceptionHandler.java index 5cd19a74..d06535e0 100644 --- a/src/main/java/it/grid/storm/StoRMDefaultUncaughtExceptionHandler.java +++ b/src/main/java/it/grid/storm/StoRMDefaultUncaughtExceptionHandler.java @@ -1,15 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm; import java.lang.Thread.UncaughtExceptionHandler; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class StoRMDefaultUncaughtExceptionHandler implements UncaughtExceptionHandler { public static final Logger log = @@ -18,12 +15,15 @@ public class StoRMDefaultUncaughtExceptionHandler implements UncaughtExceptionHa @Override public void uncaughtException(Thread t, Throwable e) { - String errorMessage = String.format("Thread (%d - '%s') uncaught exception: %s at line %d (%s)", - t.getId(), t.getName(), e.toString(), e.getStackTrace()[0].getLineNumber(), - e.getStackTrace()[0].getFileName()); + String errorMessage = + String.format( + "Thread (%d - '%s') uncaught exception: %s at line %d (%s)", + t.getId(), + t.getName(), + e.toString(), + e.getStackTrace()[0].getLineNumber(), + e.getStackTrace()[0].getFileName()); log.error(errorMessage, e); - } - } diff --git a/src/main/java/it/grid/storm/acl/AclManagementInterface.java b/src/main/java/it/grid/storm/acl/AclManagementInterface.java index b72d4685..4fbef4f9 100644 --- a/src/main/java/it/grid/storm/acl/AclManagementInterface.java +++ b/src/main/java/it/grid/storm/acl/AclManagementInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.acl; @@ -8,36 +7,32 @@ import it.grid.storm.filesystem.LocalFile; import it.grid.storm.griduser.LocalUser; -/** - * @author Michele Dibenedetto - * - */ - +/** @author Michele Dibenedetto */ public interface AclManagementInterface { /** * Grants the provided permission on the provided file to the provided group - * + * * @param localFile * @param localUser a local user representing a group on the operating system * @param permission */ - void grantGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission); + void grantGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission); /** * Grants the provided permission on the provided file to the provided user - * + * * @param localFile * @param localUser a local user representing an user on the operating system * @param permission */ - void grantUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission); + void grantUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission); /** * Removes all the permission eventually assigned to the provided group on the provided file - * + * * @param localFile a local user representing a group on the operating system * @param localUser */ @@ -45,7 +40,7 @@ void grantUserPermission(LocalFile localFile, LocalUser localUser, /** * Removes all the permission eventually assigned to the provided user on the provided file - * + * * @param localFile * @param localUser a local user representing an user on the operating system */ @@ -53,37 +48,37 @@ void grantUserPermission(LocalFile localFile, LocalUser localUser, /** * Revokes the provided permission on the provided file to the provided group - * + * * @param localFile * @param localUser a local user representing a group on the operating system * @param permission */ - void revokeGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission); + void revokeGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission); /** * Revokes the provided permission on the provided file to the provided user - * + * * @param localFile * @param localUser a local user representing an user on the operating system * @param permission */ - void revokeUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission); + void revokeUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission); /** * Sets the provided permission on the provided file to the provided group - * + * * @param localFile * @param localUser a local user representing a group on the operating system * @param permission */ - void setGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission); + void setGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission); /** * Sets the provided permission on the provided file to the provided user - * + * * @param localFile * @param localUser a local user representing an user on the operating system * @param permission @@ -92,7 +87,7 @@ void setGroupPermission(LocalFile localFile, LocalUser localUser, /** * Removes all the permission from any user/group from the provided file - * + * * @param localFile */ void removeAllPermissions(LocalFile localFile); @@ -100,10 +95,9 @@ void setGroupPermission(LocalFile localFile, LocalUser localUser, /** * Moves all the permission from any user/group from the provided fromLocalFile to the new * toLocalFile (NOTE: can be assumed that toLocalFile has no ACL) - * + * * @param fromLocalFile * @param toLocalFile */ void moveAllPermissions(LocalFile fromLocalFile, LocalFile toLocalFile); - } diff --git a/src/main/java/it/grid/storm/acl/AclManager.java b/src/main/java/it/grid/storm/acl/AclManager.java index 42b5a831..990a93b7 100644 --- a/src/main/java/it/grid/storm/acl/AclManager.java +++ b/src/main/java/it/grid/storm/acl/AclManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.acl; @@ -8,11 +7,7 @@ import it.grid.storm.filesystem.LocalFile; import it.grid.storm.griduser.LocalUser; -/** - * @author Michele Dibenedetto - * - */ - +/** @author Michele Dibenedetto */ public interface AclManager { /** @@ -21,29 +16,31 @@ public interface AclManager { * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission grantGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission grantGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file * @param localUser * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission grantUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission grantUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file * @param localUser * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ FilesystemPermission removeGroupPermission(LocalFile localFile, LocalUser localUser) throws IllegalArgumentException; @@ -53,7 +50,7 @@ FilesystemPermission removeGroupPermission(LocalFile localFile, LocalUser localU * @param localUser * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ FilesystemPermission removeUserPermission(LocalFile localFile, LocalUser localUser) throws IllegalArgumentException; @@ -64,10 +61,11 @@ FilesystemPermission removeUserPermission(LocalFile localFile, LocalUser localUs * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission revokeGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission revokeGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file @@ -75,10 +73,11 @@ FilesystemPermission revokeGroupPermission(LocalFile localFile, LocalUser localU * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission revokeUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission revokeUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file @@ -86,10 +85,11 @@ FilesystemPermission revokeUserPermission(LocalFile localFile, LocalUser localUs * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission setGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission setGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file @@ -97,15 +97,16 @@ FilesystemPermission setGroupPermission(LocalFile localFile, LocalUser localUser * @param permission * @return * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - FilesystemPermission setUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + FilesystemPermission setUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ void removeHttpsPermissions(LocalFile localFile) throws IllegalArgumentException; @@ -114,17 +115,18 @@ FilesystemPermission setUserPermission(LocalFile localFile, LocalUser localUser, * @param localUser * @param permission * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - void grantHttpsUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + void grantHttpsUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file * @param localUser * @param permission * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ void grantHttpsServiceGroupPermission(LocalFile localFile, FilesystemPermission permission) throws IllegalArgumentException; @@ -134,17 +136,18 @@ void grantHttpsServiceGroupPermission(LocalFile localFile, FilesystemPermission * @param localUser * @param permission * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ - void grantHttpsGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException; + void grantHttpsGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException; /** * @param localFile an existent file * @param localUser * @param permission * @throws IllegalArgumentException if received null parameters or the LocalFile object refers to - * a not existent file + * a not existent file */ void grantHttpsServiceUserPermission(LocalFile localFile, FilesystemPermission permission) throws IllegalArgumentException; @@ -153,9 +156,8 @@ void grantHttpsServiceUserPermission(LocalFile localFile, FilesystemPermission p * @param oldLocalFile an existent source file * @param newLocalFile an existent destination file * @throws IllegalArgumentException if received null parameters or the LocalFile objects refers to - * not existent files + * not existent files */ void moveHttpsPermissions(LocalFile oldLocalFile, LocalFile newLocalFile) throws IllegalArgumentException; - } diff --git a/src/main/java/it/grid/storm/acl/AclManagerFS.java b/src/main/java/it/grid/storm/acl/AclManagerFS.java index e51fdd6c..f43c5e96 100644 --- a/src/main/java/it/grid/storm/acl/AclManagerFS.java +++ b/src/main/java/it/grid/storm/acl/AclManagerFS.java @@ -1,31 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.acl; import it.grid.storm.filesystem.FilesystemPermission; import it.grid.storm.filesystem.LocalFile; import it.grid.storm.griduser.LocalUser; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class AclManagerFS implements AclManager { private static AclManagerFS instance = new AclManagerFS(); - private AclManagerFS() { + private AclManagerFS() {} - } - - /** - * @return - */ + /** @return */ public static AclManager getInstance() { return instance; @@ -33,18 +23,23 @@ public static AclManager getInstance() { /* * (non-Javadoc) - * + * * @see it.grid.storm.acl.AclManager#grantGroupPermission(it.grid.storm.griduser .LocalUser, * it.grid.storm.filesystem.FilesystemPermission) */ @Override - public FilesystemPermission grantGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission grantGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -56,18 +51,23 @@ public FilesystemPermission grantGroupPermission(LocalFile localFile, LocalUser /* * (non-Javadoc) - * + * * @see it.grid.storm.acl.AclManager#grantUserPermission(it.grid.storm.filesystem .LocalFile, * it.grid.storm.griduser.LocalUser, it.grid.storm.filesystem.FilesystemPermission) */ @Override - public FilesystemPermission grantUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission grantUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -83,8 +83,10 @@ public FilesystemPermission removeGroupPermission(LocalFile localFile, LocalUser if (localFile == null || localUser == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -100,8 +102,10 @@ public FilesystemPermission removeUserPermission(LocalFile localFile, LocalUser if (localFile == null || localUser == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -112,13 +116,18 @@ public FilesystemPermission removeUserPermission(LocalFile localFile, LocalUser } @Override - public FilesystemPermission revokeGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission revokeGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -129,13 +138,18 @@ public FilesystemPermission revokeGroupPermission(LocalFile localFile, LocalUser } @Override - public FilesystemPermission revokeUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission revokeUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -146,13 +160,18 @@ public FilesystemPermission revokeUserPermission(LocalFile localFile, LocalUser } @Override - public FilesystemPermission setGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission setGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -163,13 +182,18 @@ public FilesystemPermission setGroupPermission(LocalFile localFile, LocalUser lo } @Override - public FilesystemPermission setUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public FilesystemPermission setUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } if (!localFile.exists()) { throw new IllegalArgumentException( @@ -189,13 +213,18 @@ public void removeHttpsPermissions(LocalFile localFile) throws IllegalArgumentEx } @Override - public void grantHttpsUserPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public void grantHttpsUserPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } } @@ -205,19 +234,26 @@ public void grantHttpsServiceUserPermission(LocalFile localFile, FilesystemPermi if (localFile == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " permission=" + + permission); } } @Override - public void grantHttpsGroupPermission(LocalFile localFile, LocalUser localUser, - FilesystemPermission permission) throws IllegalArgumentException { + public void grantHttpsGroupPermission( + LocalFile localFile, LocalUser localUser, FilesystemPermission permission) + throws IllegalArgumentException { if (localFile == null || localUser == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " localUser=" + localUser + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " localUser=" + + localUser + + " permission=" + + permission); } } @@ -227,8 +263,10 @@ public void grantHttpsServiceGroupPermission(LocalFile localFile, FilesystemPerm if (localFile == null || permission == null) { throw new IllegalArgumentException( - "Unable to perform the operation. The received null parameters: localFile=" + localFile - + " permission=" + permission); + "Unable to perform the operation. The received null parameters: localFile=" + + localFile + + " permission=" + + permission); } } @@ -239,8 +277,9 @@ public void moveHttpsPermissions(LocalFile fromLocalFile, LocalFile toLocalFile) if (fromLocalFile == null || toLocalFile == null) { throw new IllegalArgumentException( "Unable to perform the operation. The received null parameters: fromLocalFile=" - + fromLocalFile + " toLocalFile=" + toLocalFile); + + fromLocalFile + + " toLocalFile=" + + toLocalFile); } } - } diff --git a/src/main/java/it/grid/storm/asynch/AdvancedPicker.java b/src/main/java/it/grid/storm/asynch/AdvancedPicker.java index 121a32d3..b9a0d335 100644 --- a/src/main/java/it/grid/storm/asynch/AdvancedPicker.java +++ b/src/main/java/it/grid/storm/asynch/AdvancedPicker.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -13,17 +12,15 @@ import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TRequestType; import it.grid.storm.srm.types.TSURL; - import java.util.Collection; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is in charge of periodically polling the DB for newly added requests. - * + * * @author EGRID ICTP Trieste * @version 1.0 * @date October 2006 @@ -67,25 +64,23 @@ public void stopIt() { retrievingTask.cancel(); retriever.cancel(); } - } - /** - * Method used to command This AdvancedPicker to start periodic reading of data from the DB. - */ + /** Method used to command This AdvancedPicker to start periodic reading of data from the DB. */ public void startIt() { log.debug("ADVANCED PICKER: started"); retriever = new Timer(); - retrievingTask = new TimerTask() { + retrievingTask = + new TimerTask() { - @Override - public void run() { - retrieve(); - } - }; // retrieving task + @Override + public void run() { + retrieve(); + } + }; // retrieving task retriever.scheduleAtFixedRate(retrievingTask, delay, period); } @@ -95,25 +90,24 @@ public void run() { * fetching the TRequestToken, VomsGridUser and TRequestType; the global status of each request * then changes to SRM_SUCCESS, appropriate Feeders get created and forwarded to the crusher * scheduler. - * - * There could be internal errors that get handled as follows: - * - * (1) If the request type is not supported, the request is dropped and the global status transits - * to SRM_NOT_SUPPORTED; however each chunk data status remains in SRM_REQUEST_QUEUED because it - * is impossible to know where in the DB tables to update the chunk status! - * - * (2) If the request type is supported, but the corresponding Feeder cannot be created, then the - * global status transits to SRM_FAILURE, as well as the status of each chunk. - * - * (3) If the Scheduler throws any exception, then the global status transits to SRM_FAILURE, as - * well as that of each chunk. Under anomalous circumstances it could be that it is not possible - * to update the status of each chunk, in which case the chunk status remains SRM_REQUEST_QUEUED. - * This last case is particularly pernicious, so a FATAL log is signalled: it means the code was - * not updated! + * + *

There could be internal errors that get handled as follows: + * + *

(1) If the request type is not supported, the request is dropped and the global status + * transits to SRM_NOT_SUPPORTED; however each chunk data status remains in SRM_REQUEST_QUEUED + * because it is impossible to know where in the DB tables to update the chunk status! + * + *

(2) If the request type is supported, but the corresponding Feeder cannot be created, then + * the global status transits to SRM_FAILURE, as well as the status of each chunk. + * + *

(3) If the Scheduler throws any exception, then the global status transits to SRM_FAILURE, + * as well as that of each chunk. Under anomalous circumstances it could be that it is not + * possible to update the status of each chunk, in which case the chunk status remains + * SRM_REQUEST_QUEUED. This last case is particularly pernicious, so a FATAL log is signalled: it + * means the code was not updated! */ public void retrieve() { - int crusherCapacity = -1; SchedulerStatus status = s.getStatus(); @@ -139,7 +133,6 @@ public void retrieve() { } else { log.info("ADVANCED PICKER: dispatching {} requests.", requests.size()); - } TRequestType rtype = null; @@ -159,7 +152,7 @@ public void retrieve() { } else { RequestSummaryCatalog.getInstance() - .abortChunksOfInProgressRequest(abortToken, abortSURLS); + .abortChunksOfInProgressRequest(abortToken, abortSURLS); } abortToken = null; @@ -186,49 +179,61 @@ public void retrieve() { } else { s.schedule(new UnsupportedOperationFeeder()); - } } catch (InvalidPtGFeederAttributesException e) { - log.error("ADVANCED PICKER ERROR! PtGFeeder could not be created " - + "because of invalid attributes: {}", e.getMessage(), e); + log.error( + "ADVANCED PICKER ERROR! PtGFeeder could not be created " + + "because of invalid attributes: {}", + e.getMessage(), + e); log.error("PtG Request is being dropped: {}", rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); + .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); } catch (InvalidPtPFeederAttributesException e) { - log.error("ADVANCED PICKER ERROR! PtPFeeder could not be created " - + "because of invalid attributes: {}", e.getMessage(), e); + log.error( + "ADVANCED PICKER ERROR! PtPFeeder could not be created " + + "because of invalid attributes: {}", + e.getMessage(), + e); log.error("PtP Request is being dropped: {}", rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); + .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); } catch (InvalidBoLFeederAttributesException e) { - log.error("ADVANCED PICKER ERROR! BoLFeeder could not be created " - + "because of invalid attributes: {}", e.getMessage(), e); + log.error( + "ADVANCED PICKER ERROR! BoLFeeder could not be created " + + "because of invalid attributes: {}", + e.getMessage(), + e); log.error("BoL Request is being dropped: {}", rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); + .failRequest(rsd, "Internal error does not allow request to be fed to scheduler."); } catch (SchedulerException | UnsupportedOperationException e) { - log.error("ADVANCED PICKER ERROR! The request could not be scheduled" - + "because of scheduler errors: {}", e.getMessage(), e); - log.error("ADVANCED PICKER ERROR! Request {} of type {} dropped.", rsd.requestToken(), + log.error( + "ADVANCED PICKER ERROR! The request could not be scheduled" + + "because of scheduler errors: {}", + e.getMessage(), + e); + log.error( + "ADVANCED PICKER ERROR! Request {} of type {} dropped.", + rsd.requestToken(), rsd.requestType()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "Internal scheduler has problems accepting request feed."); - + .failRequest(rsd, "Internal scheduler has problems accepting request feed."); } } } @@ -246,11 +251,11 @@ public void retrieve() { /** * Method used to remove the request identified by the supplied TRequestToken, from the internal * queue of Requests that must be scheduled. - * - * If a null TRequestToken is supplied, or some other abort request has been issued, then FALSE is - * returned; otherwise TRUE is returned. + * + *

If a null TRequestToken is supplied, or some other abort request has been issued, then FALSE + * is returned; otherwise TRUE is returned. */ - synchronized public boolean abortRequest(TRequestToken rt) { + public synchronized boolean abortRequest(TRequestToken rt) { if (abort) { @@ -272,11 +277,11 @@ synchronized public boolean abortRequest(TRequestToken rt) { * Method used to remove chunks of the request identified by the supplied TRequestToken, with * surls given by the collection c. Chunks in the DB get their status changed and so will not be * considered for processing. - * - * If a null TRequestToken or Collection is supplied, or some other abort request has been issued, - * then FALSE is returned; otherwise TRUE is returned. + * + *

If a null TRequestToken or Collection is supplied, or some other abort request has been + * issued, then FALSE is returned; otherwise TRUE is returned. */ - synchronized public boolean abortChunksOfRequest(TRequestToken rt, Collection c) { + public synchronized boolean abortChunksOfRequest(TRequestToken rt, Collection c) { if (abort) { @@ -294,5 +299,4 @@ synchronized public boolean abortChunksOfRequest(TRequestToken rt, CollectionIf the request contains nothing to process, an error message gets logged, the number of queued * requests is decreased, and the number of finished requests is increased. - * - * If the single part of the request has dirOption NOT set, then the number of queued requests is + * + *

If the single part of the request has dirOption NOT set, then the number of queued requests is * decreased, the number of progressing requests is increased, the status of that chunk is changed * to SRM_REQUEST_INPROGRESS; the chunk is given to the scheduler for handling. In case the * scheduler cannot accept the chunk for any reason, a message with the requestToken and the chunk s * data is logged, status of the chunk passes to SRM_ABORTED, and at the end the counters are such * that the queued-requests is decreased while the finished-requests is increased. - * - * If the single part of the request DOES have a dirOption set, then it is considered as an + * + *

If the single part of the request DOES have a dirOption set, then it is considered as an * expansion job and it gets handled now! So the number of queued requests is decreased and that for * progressing ones is increased, while the status is set to SRM_REQUEST_INPROGRESS. Each newly * expanded file gets handled as though it were part of the multifile request WITHOUT the dirOption @@ -53,26 +50,27 @@ * persistence system is created, and the total number of files in this request is updated. Finally * the status of this expansion request is set to SRM_DONE, the number of progressing requests is * decreased and the number of finished requests is increased. - * - * At the beginning of the expansion stage, some anomalous situations are considered and handled as - * follows: - * - * (0) In case of internal errors, they get logged and the expansion request gets failed: the status - * changes to SRM_FAILURE, number of progressing is decreased, number of finished is increased. - * - * (1) The expanded directory is empty: the request is set to SRM_SUCCESS with an explanatory String - * saying so. The number of progressing is decreased, and the number of finished is increased. - * - * (2) The directory does not exist: status set to SRM_INVALID_PATH; number of progressing is + * + *

At the beginning of the expansion stage, some anomalous situations are considered and handled + * as follows: + * + *

(0) In case of internal errors, they get logged and the expansion request gets failed: the + * status changes to SRM_FAILURE, number of progressing is decreased, number of finished is + * increased. + * + *

(1) The expanded directory is empty: the request is set to SRM_SUCCESS with an explanatory + * String saying so. The number of progressing is decreased, and the number of finished is + * increased. + * + *

(2) The directory does not exist: status set to SRM_INVALID_PATH; number of progressing is * decresed; number of finished is increased. - * - * (3) Attempting to expand a file: status set to SRM_INVALID_PATH; number of progressing is + * + *

(3) Attempting to expand a file: status set to SRM_INVALID_PATH; number of progressing is * decreased; number of finished is increased. - * - * (4) No rights to directory: status set to SRM_AUTHORIZATION_FAILURE; number of progressing is + * + *

(4) No rights to directory: status set to SRM_AUTHORIZATION_FAILURE; number of progressing is * decreased; number of finished is increased. - * - * + * * @author CNAF * @date Aug, 2009 * @version 1.0 @@ -109,7 +107,6 @@ public BoLFeeder(RequestSummaryData rsd) throws InvalidBoLFeederAttributesExcept log.error(e.getMessage(), e); throw new InvalidBoLFeederAttributesException(rsd, gu, null); - } } @@ -124,19 +121,18 @@ public void doIt() { Collection chunks = BoLChunkCatalog.getInstance().lookup(rsd.requestToken()); if (chunks.isEmpty()) { - log.warn("ATTENTION in BoLFeeder! This SRM BoL request contained nothing to process! " - + rsd.requestToken()); + log.warn( + "ATTENTION in BoLFeeder! This SRM BoL request contained nothing to process! " + + rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "This SRM Get request contained nothing to process!"); + .failRequest(rsd, "This SRM Get request contained nothing to process!"); } else { manageChunks(chunks); log.debug("BoLFeeder: finished pre-processing {}", rsd.requestToken()); } } - /** - * Private method that handles the Collection of chunks associated with the srm command! - */ + /** Private method that handles the Collection of chunks associated with the srm command! */ private void manageChunks(Collection chunks) { log.debug("BoLFeeder - number of chunks in request: {}", chunks.size()); @@ -161,7 +157,8 @@ private void manageChunks(Collection chunks) { log.warn( "BoLFeeder: srmBoL contract violation! fromSURL does not " + "correspond to this machine!\n Request: {}\n Chunk: {}", - rsd.requestToken(), chunkData); + rsd.requestToken(), + chunkData); chunkData.changeStatusSRM_FAILURE( "SRM protocol violation! " + "Cannot do an srmBoL of a SURL that is not local!"); @@ -177,9 +174,7 @@ private void manageChunks(Collection chunks) { gsm.finishedAdding(); } - /** - * Private method that handles the case of dirOption NOT set! - */ + /** Private method that handles the case of dirOption NOT set! */ private void manageNotDirectory(BoLPersistentChunkData auxChunkData) { log.debug("BoLFeeder - scheduling... "); @@ -188,15 +183,15 @@ private void manageNotDirectory(BoLPersistentChunkData auxChunkData) { try { /* hand it to scheduler! */ SchedulerFacade.getInstance() - .chunkScheduler() - .schedule(new BoLPersistentChunk(gu, rsd, auxChunkData, gsm)); + .chunkScheduler() + .schedule(new BoLPersistentChunk(gu, rsd, auxChunkData, gsm)); log.debug("BoLFeeder - chunk scheduled."); } catch (InvalidRequestAttributesException e) { /* * for some reason gu, rsd or auxChunkData may be null! This should not be so! */ - log.error("UNEXPECTED ERROR in BoLFeeder! Chunk could not be " + "created!\n{}", - e.getMessage(), e); + log.error( + "UNEXPECTED ERROR in BoLFeeder! Chunk could not be " + "created!\n{}", e.getMessage(), e); log.error("Request: {}" + rsd.requestToken()); log.error("Chunk: {}" + auxChunkData); @@ -207,8 +202,10 @@ private void manageNotDirectory(BoLPersistentChunkData auxChunkData) { gsm.failedChunk(auxChunkData); } catch (SchedulerException e) { /* Internal error of scheduler! */ - log.error("UNEXPECTED ERROR in ChunkScheduler! Chunk could not be " + "scheduled!\n{}", - e.getMessage(), e); + log.error( + "UNEXPECTED ERROR in ChunkScheduler! Chunk could not be " + "scheduled!\n{}", + e.getMessage(), + e); log.error("Request: {}", rsd.requestToken()); log.error("Chunk: {}", auxChunkData); @@ -220,9 +217,7 @@ private void manageNotDirectory(BoLPersistentChunkData auxChunkData) { } } - /** - * Private method that handles the case of a BoLChunkData having dirOption set! - */ + /** Private method that handles the case of a BoLChunkData having dirOption set! */ private void manageIsDirectory(BoLPersistentChunkData chunkData) { log.debug("BoLFeeder - pre-processing Directory chunk..."); @@ -238,20 +233,32 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { } catch (IllegalArgumentException e) { log.error( "Unable to build a stori for surl {} for user {}. " + "IllegalArgumentException: {}", - surl, user, e.getMessage(), e); + surl, + user, + e.getMessage(), + e); chunkData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (UnapprochableSurlException e) { log.info( "Unable to build a stori for surl {} for user {}. " + "UnapprochableSurlException: {}", - surl, user, e.getMessage()); + surl, + user, + e.getMessage()); chunkData.changeStatusSRM_AUTHORIZATION_FAILURE(e.getMessage()); } catch (NamespaceException e) { - log.error("Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", - surl, user, e.getMessage(), e); + log.error( + "Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", + surl, + user, + e.getMessage(), + e); chunkData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (InvalidSURLException e) { - log.info("Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", - surl, user, e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", + surl, + user, + e.getMessage()); chunkData.changeStatusSRM_INVALID_PATH(e.getMessage()); } finally { if (stori == null) { @@ -273,10 +280,17 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { for (StoRI storiChild : storiChildren) { try { - childData = new BoLPersistentChunkData(chunkData.getRequestToken(), storiChild.getSURL(), - chunkData.getLifeTime(), notDir, chunkData.getTransferProtocols(), - chunkData.getFileSize(), chunkData.getStatus(), chunkData.getTransferURL(), - chunkData.getDeferredStartTime()); + childData = + new BoLPersistentChunkData( + chunkData.getRequestToken(), + storiChild.getSURL(), + chunkData.getLifeTime(), + notDir, + chunkData.getTransferProtocols(), + chunkData.getFileSize(), + chunkData.getStatus(), + chunkData.getTransferURL(), + chunkData.getDeferredStartTime()); /* fill in new db row and set the PrimaryKey of ChildData! */ BoLChunkCatalog.getInstance().addChild(childData); @@ -291,8 +305,11 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { * For some reason it was not possible to create a BoLChunkData: it is a programme bug!!! * It should not occur!!! Log it and skip to the next one! */ - log.error("ERROR in BoLFeeder! While expanding recursive request, " - + "it was not possible to create a new BoLChunkData! {}", e.getMessage(), e); + log.error( + "ERROR in BoLFeeder! While expanding recursive request, " + + "it was not possible to create a new BoLChunkData! {}", + e.getMessage(), + e); } } log.debug("BoLFeeder - expansion completed."); @@ -310,16 +327,20 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { "srmBringOnLine with dirOption set:" + " expansion failure due to internal error!"); BoLChunkCatalog.getInstance().update(chunkData); - log.error("UNEXPECTED ERROR in BoLFeeder! Could not create TDirOption " - + "specifying non-expansion!\n{}", e.getMessage(), e); + log.error( + "UNEXPECTED ERROR in BoLFeeder! Could not create TDirOption " + + "specifying non-expansion!\n{}", + e.getMessage(), + e); log.error("Request: {}", rsd.requestToken()); log.error("Chunk: {}", chunkData); gsm.failedChunk(chunkData); } catch (InvalidDescendantsEmptyRequestException e) { - chunkData.changeStatusSRM_SUCCESS("BEWARE! srmBringOnLine with " - + "dirOption set: it referred to a directory that was empty!"); + chunkData.changeStatusSRM_SUCCESS( + "BEWARE! srmBringOnLine with " + + "dirOption set: it referred to a directory that was empty!"); BoLChunkCatalog.getInstance().update(chunkData); @@ -335,8 +356,9 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { BoLChunkCatalog.getInstance().update(chunkData); - log.debug("ATTENTION in BoLFeeder! BoLFeeder received request to expand " - + "non-existing directory."); + log.debug( + "ATTENTION in BoLFeeder! BoLFeeder received request to expand " + + "non-existing directory."); gsm.failedChunk(chunkData); } catch (InvalidDescendantsFileRequestException e) { @@ -349,7 +371,6 @@ private void manageIsDirectory(BoLPersistentChunkData chunkData) { log.debug("ATTENTION in BoLFeeder! BoLFeeder received request to expand " + "a file."); gsm.failedChunk(chunkData); - } } diff --git a/src/main/java/it/grid/storm/asynch/BoLPersistentChunk.java b/src/main/java/it/grid/storm/asynch/BoLPersistentChunk.java index 67a74cb7..652788a5 100644 --- a/src/main/java/it/grid/storm/asynch/BoLPersistentChunk.java +++ b/src/main/java/it/grid/storm/asynch/BoLPersistentChunk.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -11,7 +10,6 @@ import it.grid.storm.scheduler.PersistentRequestChunk; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,7 +58,7 @@ * to manipulate the ACLs, StoRM was not configured for the underlying FileSystem, or there was an * unexpected error; in the first case the status changes to SRM_INVALID_PATH, while in all other * ones it changes to SRM_FAILURE; corresponding messages get logged. - * + * * @author CNAF * @date Aug 2009 * @version 1.0 @@ -69,9 +67,7 @@ public class BoLPersistentChunk extends BoL implements PersistentRequestChunk { private static Logger log = LoggerFactory.getLogger(BoLPersistentChunk.class); - /** - * RequestSummaryData containing all the statistics for the originating srmBringOnLineRequest - */ + /** RequestSummaryData containing all the statistics for the originating srmBringOnLineRequest */ private RequestSummaryData rsd = null; /** manager for global status computation */ private GlobalStatusManager gsm = null; @@ -80,8 +76,11 @@ public class BoLPersistentChunk extends BoL implements PersistentRequestChunk { * Constructor requiring the GridUser, the RequestSummaryData and the BoLChunkData about this * chunk. If the supplied attributes are null, an InvalidBoLChunkAttributesException is thrown. */ - public BoLPersistentChunk(GridUserInterface gu, RequestSummaryData rsd, - BoLPersistentChunkData chunkData, GlobalStatusManager gsm) + public BoLPersistentChunk( + GridUserInterface gu, + RequestSummaryData rsd, + BoLPersistentChunkData chunkData, + GlobalStatusManager gsm) throws InvalidRequestAttributesException, InvalidPersistentRequestAttributesException { super(gu, chunkData); @@ -99,11 +98,15 @@ public Boolean completeRequest(TapeRecallStatus recallStatus) { persistStatus(); if (requestSuccessfull) { gsm.successfulChunk((BoLPersistentChunkData) requestData); - log.info("Completed BoL request ({}), file successfully recalled from tape: {}", - rsd.requestToken(), requestData.getSURL().toString()); + log.info( + "Completed BoL request ({}), file successfully recalled from tape: {}", + rsd.requestToken(), + requestData.getSURL().toString()); } else { gsm.failedChunk((BoLPersistentChunkData) requestData); - log.error("BoL request ({}), file not recalled from tape: {}", requestData.getRequestToken(), + log.error( + "BoL request ({}), file not recalled from tape: {}", + requestData.getRequestToken(), requestData.getSURL().toString()); } return requestSuccessfull; @@ -116,8 +119,8 @@ public Boolean completeRequest(TapeRecallStatus recallStatus) { @Override public String getName() { - return String.format("BoLChunk of request %s for SURL %s", rsd.requestToken(), - requestData.getSURL()); + return String.format( + "BoLChunk of request %s for SURL %s", rsd.requestToken(), requestData.getSURL()); } public String getRequestToken() { diff --git a/src/main/java/it/grid/storm/asynch/BuilderException.java b/src/main/java/it/grid/storm/asynch/BuilderException.java index 968c2b70..49005aa6 100644 --- a/src/main/java/it/grid/storm/asynch/BuilderException.java +++ b/src/main/java/it/grid/storm/asynch/BuilderException.java @@ -1,23 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class BuilderException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = -7167592101486133296L; - public BuilderException() { - - } + public BuilderException() {} public BuilderException(String message) { diff --git a/src/main/java/it/grid/storm/asynch/GlobalStatusManager.java b/src/main/java/it/grid/storm/asynch/GlobalStatusManager.java index bd41713b..a6913a37 100644 --- a/src/main/java/it/grid/storm/asynch/GlobalStatusManager.java +++ b/src/main/java/it/grid/storm/asynch/GlobalStatusManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -9,9 +8,7 @@ import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TStatusCode; - import java.util.LinkedList; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,7 +24,7 @@ * This is a special state: it is not possible to have a request with some chunks in this situation * while others are not. All chunks of the same request should invoke this method: it is an anomaly * if it does not occur; it gets signalled properly. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2006 @@ -36,12 +33,11 @@ public class GlobalStatusManager { private static Logger log = LoggerFactory.getLogger(GlobalStatusManager.class); private TRequestToken rt = null; - /** - * LinkedList containing identifiers of all chunks - */ + /** LinkedList containing identifiers of all chunks */ private LinkedList chunks = new LinkedList(); + private boolean finished = false; // boolean true if all chunks of the request - // have been added to the map + // have been added to the map private InternalState internal = InternalState.IN_PROGRESS; public GlobalStatusManager(TRequestToken rt) throws InvalidOverallRequestAttributeException { @@ -57,7 +53,7 @@ public GlobalStatusManager(TRequestToken rt) throws InvalidOverallRequestAttribu * computing the global state. If finishedAdding method has already been invoked, this method has * no effect. */ - synchronized public void addChunk(ChunkData c) { + public synchronized void addChunk(ChunkData c) { if (c == null) { log.warn("GlobalStatusManager: chunkData NOT added because it is null!"); @@ -67,14 +63,16 @@ synchronized public void addChunk(ChunkData c) { log.debug("GlobalStatusManager: asked to add chunkData {}", c.getIdentifier()); if (finished) { - log.warn("GlobalStatusManager: chunkData NOT added because finishedAdding " - + "has already been invoked!"); + log.warn( + "GlobalStatusManager: chunkData NOT added because finishedAdding " + + "has already been invoked!"); return; } if (chunks.contains(Long.valueOf(c.getIdentifier()))) { - log.warn("GlobalStatusManager: chunkData NOT added because of another " - + "chunkData already added with the same identifier"); + log.warn( + "GlobalStatusManager: chunkData NOT added because of another " + + "chunkData already added with the same identifier"); return; } @@ -86,7 +84,7 @@ synchronized public void addChunk(ChunkData c) { * Method used to indicate that no other ChunkDAta will be considered for the computation of the * global state. After invoking this method, all subsequent calls to addChunk will be ignored. */ - synchronized public void finishedAdding() { + public synchronized void finishedAdding() { log.debug("GlobalStatusManager: received finishedAdding signal."); this.finished = true; @@ -116,13 +114,14 @@ synchronized public void finishedAdding() { * not being tracked, an error message gets written to the logs and the global state transits to * ERROR. */ - synchronized public void successfulChunk(ChunkData c) { + public synchronized void successfulChunk(ChunkData c) { log.debug("GlobalStatusManager: received successfulChunk signal for {}", c); if (c == null) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a successful Chunk, but it is null!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a successful Chunk, but it is null!"); log.error("Request: {}", rt); log.error("Chunk: {}", c); internal = InternalState.ERROR; @@ -130,9 +129,10 @@ synchronized public void successfulChunk(ChunkData c) { } if (chunks.isEmpty()) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a successful Chunk, but there are actually no Chunks left to " - + "be considered!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a successful Chunk, but there are actually no Chunks left to " + + "be considered!"); log.error("Request: {}", rt); log.error("Chunk: {}", c); internal = InternalState.ERROR; @@ -142,9 +142,10 @@ synchronized public void successfulChunk(ChunkData c) { boolean removed = chunks.remove(Long.valueOf(c.getIdentifier())); if (!removed) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a successful Chunk, but it was not originally asked to be " - + "considered in the evaluation!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a successful Chunk, but it was not originally asked to be " + + "considered in the evaluation!"); log.error("Request: {}", rt); log.error("Chunk: {}", c); internal = InternalState.ERROR; @@ -167,7 +168,8 @@ synchronized public void successfulChunk(ChunkData c) { } else if (internal.equals(InternalState.ERROR)) { internal = InternalState.ERROR; // stays the same! } else { - log.error("ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", + log.error( + "ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", internal); return; } @@ -189,7 +191,8 @@ synchronized public void successfulChunk(ChunkData c) { } else if (internal.equals(InternalState.ERROR)) { internal = InternalState.ERROR; // stays the same! } else { - log.error("ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", + log.error( + "ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", internal); return; } @@ -199,9 +202,10 @@ synchronized public void successfulChunk(ChunkData c) { // more chunks may be added to request, and it is all the same if there // are or aren't any left to be processed! - log.debug("GlobalStatusManager: still not finished adding chunks for " - + "consideration, but it is the same whether there are more to be " - + "processed or not..."); + log.debug( + "GlobalStatusManager: still not finished adding chunks for " + + "consideration, but it is the same whether there are more to be " + + "processed or not..."); if (internal.equals(InternalState.IN_PROGRESS)) { internal = InternalState.INTERMEDIATE_SUCCESS; @@ -214,7 +218,8 @@ synchronized public void successfulChunk(ChunkData c) { } else if (internal.equals(InternalState.ERROR)) { internal = InternalState.ERROR; // stays the same! } else { - log.error("ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", + log.error( + "ERROR in GlobalStatusManager: programming bug! Unexpected InternalState: {}", internal); return; } @@ -224,12 +229,13 @@ synchronized public void successfulChunk(ChunkData c) { } else { // This cannot possibly occur by logic! But there could be // multithreading issues in case of bugs! - log.error("ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " - + "condition has materialised: it may be due to multithreading issues! " - + "Request is {}", rt); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " + + "condition has materialised: it may be due to multithreading issues! " + + "Request is {}", + rt); internal = InternalState.ERROR; } - } /** @@ -237,13 +243,14 @@ synchronized public void successfulChunk(ChunkData c) { * is null, there are no Chunks being kept track of, or that specific Chunk is not being tracked, * an error message gets written to the logs and the global state transits to ERROR. */ - synchronized public void failedChunk(ChunkData c) { + public synchronized void failedChunk(ChunkData c) { log.debug("GlobalStatusManager: received failedChunk signal for {}", c); if (c == null) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a failed Chunk, but it is null!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a failed Chunk, but it is null!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -251,8 +258,10 @@ synchronized public void failedChunk(ChunkData c) { } if (chunks.isEmpty()) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a failed Chunk, but there are actually no Chunks left to be " + "considered!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a failed Chunk, but there are actually no Chunks left to be " + + "considered!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -262,9 +271,10 @@ synchronized public void failedChunk(ChunkData c) { boolean removed = chunks.remove(Long.valueOf(c.getIdentifier())); if (!removed) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a successful Chunk, but it was not originally asked to be " - + "considered in the evaluation!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a successful Chunk, but it was not originally asked to be " + + "considered in the evaluation!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -319,9 +329,10 @@ synchronized public void failedChunk(ChunkData c) { } else if (!finished) { // more chunks may be added to request, and it is all the same if there // are or arent any left to be processed! - log.debug("GlobalStatusManager: still not finished adding chunks for " - + "consideration, but it is the same whether there are more to be " - + "processed or not..."); + log.debug( + "GlobalStatusManager: still not finished adding chunks for " + + "consideration, but it is the same whether there are more to be " + + "processed or not..."); if (internal.equals(InternalState.IN_PROGRESS)) { internal = InternalState.INTERMEDIATE_FAIL; @@ -344,9 +355,11 @@ synchronized public void failedChunk(ChunkData c) { } else { // This cannot possibly occur by logic! But there could be // multithreading issues in case of bugs! - log.error("ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " - + "condition has materialised: it may be due to multithreading issues! " - + "Request is {}", rt); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " + + "condition has materialised: it may be due to multithreading issues! " + + "Request is {}", + rt); internal = InternalState.ERROR; } } @@ -357,13 +370,14 @@ synchronized public void failedChunk(ChunkData c) { * track of, or that specific Chunk is not being tracked, an error message gets written to the * logs and the global state transits to ERROR. */ - synchronized public void expiredSpaceLifetimeChunk(ChunkData c) { + public synchronized void expiredSpaceLifetimeChunk(ChunkData c) { log.debug("GlobalStatusManager: received expiredSpaceLifetimeChunk signal for {}", c); if (c == null) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a failed Chunk, but it is null!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a failed Chunk, but it is null!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -371,8 +385,10 @@ synchronized public void expiredSpaceLifetimeChunk(ChunkData c) { } if (chunks.isEmpty()) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a failed Chunk, but there are actually no Chunks left to be " + "considered!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a failed Chunk, but there are actually no Chunks left to be " + + "considered!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -382,9 +398,10 @@ synchronized public void expiredSpaceLifetimeChunk(ChunkData c) { boolean removed = chunks.remove(Long.valueOf(c.getIdentifier())); if (!removed) { - log.error("ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " - + "signal a successful Chunk, but it was not originally asked to be " - + "considered in the evaluation!"); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! There was an attempt to " + + "signal a successful Chunk, but it was not originally asked to be " + + "considered in the evaluation!"); log.error("Request: {}", rt); log.error("ChunkData: {}", c); internal = InternalState.ERROR; @@ -435,9 +452,10 @@ synchronized public void expiredSpaceLifetimeChunk(ChunkData c) { // more chunks may be added to request, and it is all the same if there // are or arent any left to be processed! - log.debug("GlobalStatusManager: still not finished adding chunks for " - + "consideration, but it is the same whether there are more to be " - + "processed or not..."); + log.debug( + "GlobalStatusManager: still not finished adding chunks for " + + "consideration, but it is the same whether there are more to be " + + "processed or not..."); if (internal.equals(InternalState.IN_PROGRESS)) { internal = InternalState.SPACEFAIL; @@ -458,16 +476,16 @@ synchronized public void expiredSpaceLifetimeChunk(ChunkData c) { // This cannot possibly occur by logic! But there could be // multithreading issues in case of bugs! - log.error("ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " - + "condition has materialised: it may be due to multithreading issues! " - + "Request is {}", rt); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! An impossible logic " + + "condition has materialised: it may be due to multithreading issues! " + + "Request is {}", + rt); internal = InternalState.ERROR; } } - /** - * Method used to update the state in persistence. - */ + /** Method used to update the state in persistence. */ private void saveRequestState() { log.debug("GlobalStatusManager: invoked saveRequestState."); @@ -479,8 +497,10 @@ private void saveRequestState() { if (internal.equals(InternalState.ERROR)) { updatePinFileLifetime = true; - retstat = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Global status cannot be evaluated: single file status must be checked."); + retstat = + new TReturnStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, + "Global status cannot be evaluated: single file status must be checked."); } else if (internal.equals(InternalState.FAIL)) { @@ -495,24 +515,29 @@ private void saveRequestState() { } else if (internal.equals(InternalState.PARTIAL)) { updatePinFileLifetime = true; - retstat = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Some chunks were successful while others failed!"); + retstat = + new TReturnStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, "Some chunks were successful while others failed!"); } else if (internal.equals(InternalState.SPACEFAIL)) { updatePinFileLifetime = false; - retstat = new TReturnStatus(TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, - "Supplied SpaceToken has expired lifetime!"); + retstat = + new TReturnStatus( + TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, "Supplied SpaceToken has expired lifetime!"); } else { updatePinFileLifetime = true; - retstat = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Global status cannot be evaluated: single file status must be checked."); - log.error("ERROR IN GLOBAL STATUS EVALUATION! {} was attempted to be " - + "written into persistence, but it is not a final state!", internal); + retstat = + new TReturnStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, + "Global status cannot be evaluated: single file status must be checked."); + log.error( + "ERROR IN GLOBAL STATUS EVALUATION! {} was attempted to be " + + "written into persistence, but it is not a final state!", + internal); log.error("Request: {}", rt); - } log.debug("GlobalStatusManager: saving into persistence {}", retstat); @@ -528,88 +553,93 @@ private void saveRequestState() { /** * Auxiliary private class that keeps track of internal state of request, with respect to all the * chunks. - * + * * @author EGRID - ICTP Trieste * @version 2.0 * @date September, 2006 */ private static class InternalState { - public static InternalState IN_PROGRESS = new InternalState() { + public static InternalState IN_PROGRESS = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState IN_PROGRESS"; - } - }; + return "InternalState IN_PROGRESS"; + } + }; - public static InternalState INTERMEDIATE_SUCCESS = new InternalState() { + public static InternalState INTERMEDIATE_SUCCESS = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState INTERMEDIATE_SUCCESS"; - } - }; + return "InternalState INTERMEDIATE_SUCCESS"; + } + }; - public static InternalState INTERMEDIATE_FAIL = new InternalState() { + public static InternalState INTERMEDIATE_FAIL = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState INTERMEDIATE_FAIL"; - } - }; + return "InternalState INTERMEDIATE_FAIL"; + } + }; - public static InternalState ERROR = new InternalState() { + public static InternalState ERROR = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState ERROR"; - } - }; + return "InternalState ERROR"; + } + }; - public static InternalState FAIL = new InternalState() { + public static InternalState FAIL = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState FAIL"; - } - }; + return "InternalState FAIL"; + } + }; - public static InternalState SPACEFAIL = new InternalState() { + public static InternalState SPACEFAIL = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState SPACEFAIL"; - } - }; + return "InternalState SPACEFAIL"; + } + }; - public static InternalState SUCCESS = new InternalState() { + public static InternalState SUCCESS = + new InternalState() { - @Override - public String toString() { + @Override + public String toString() { - return "InternalState SUCCESS"; - } - }; - - public static InternalState PARTIAL = new InternalState() { + return "InternalState SUCCESS"; + } + }; - @Override - public String toString() { + public static InternalState PARTIAL = + new InternalState() { - return "InternalState PARTIAL"; - } - }; + @Override + public String toString() { - private InternalState() { + return "InternalState PARTIAL"; + } + }; - } + private InternalState() {} } - } diff --git a/src/main/java/it/grid/storm/asynch/InvalidBoLChunkAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidBoLChunkAttributesException.java index 2ffe5b88..662a1201 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidBoLChunkAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidBoLChunkAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -11,7 +10,7 @@ /** * This class represents an Exception thrown when a BoLChunk is created with any null attribute: * GridUser, RequestSummaryData, BoLChunkData or GlobalStatusManager. - * + * * @author: CNAF * @version: 1.0 * @date: Aug 2009 @@ -29,8 +28,11 @@ public class InvalidBoLChunkAttributesException extends Exception { * Constructor that requires the GridUser, RequestSummaryData, BoLChunkData and * GlobalStatusManager that caused the exception to be thrown. */ - public InvalidBoLChunkAttributesException(GridUserInterface gu, RequestSummaryData rsd, - BoLPersistentChunkData chunkData, GlobalStatusManager gsm) { + public InvalidBoLChunkAttributesException( + GridUserInterface gu, + RequestSummaryData rsd, + BoLPersistentChunkData chunkData, + GlobalStatusManager gsm) { nullGu = (gu == null); nullRsd = (rsd == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidBoLFeederAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidBoLFeederAttributesException.java index 150c1274..3a43e6eb 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidBoLFeederAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidBoLFeederAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -10,7 +9,7 @@ /** * Class that represents an Exception thrown when a BoLFeeder could not be created because the * supplied RequestSummayData or GridUser or GlobalStatusManager were null. - * + * * @author CNAF * @version 1.0 * @date Aug, 2009 @@ -27,8 +26,8 @@ public class InvalidBoLFeederAttributesException extends Exception { * Public constructor that requires the RequestSummaryData and the GridUser that caused the * exception to be thrown. */ - public InvalidBoLFeederAttributesException(RequestSummaryData rsd, GridUserInterface gu, - GlobalStatusManager gsm) { + public InvalidBoLFeederAttributesException( + RequestSummaryData rsd, GridUserInterface gu, GlobalStatusManager gsm) { nullRequestSummaryData = (rsd == null); nullGridUser = (gu == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidOverallRequestAttributeException.java b/src/main/java/it/grid/storm/asynch/InvalidOverallRequestAttributeException.java index 75831cce..980e3c84 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidOverallRequestAttributeException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidOverallRequestAttributeException.java @@ -1,27 +1,24 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; /** - * Class that represents an Exception thrown when OverallRequest was attempted - * to be created with a null TRequestToken. - * + * Class that represents an Exception thrown when OverallRequest was attempted to be created with a + * null TRequestToken. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2006 */ public class InvalidOverallRequestAttributeException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; @Override public String toString() { - return "Null TRequestToken supplied!"; - } + return "Null TRequestToken supplied!"; + } } diff --git a/src/main/java/it/grid/storm/asynch/InvalidPersistentRequestAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPersistentRequestAttributesException.java index c8a8241d..f1652c35 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPersistentRequestAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPersistentRequestAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -11,34 +10,31 @@ /** * This class represents an Exceptin thrown when a PtPChunk is created with any null attribute: * GridUser, RequestSummaryData, PtPChunkData or OverallRequest. - * + * * @author: EGRID - ICTP Trieste * @version: 2.0 * @date: June, 2005 */ public class InvalidPersistentRequestAttributesException extends InvalidRequestAttributesException { - /** - * - */ + /** */ private static final long serialVersionUID = 2359138715273364531L; - /** - * true if RequestSummaryData is null - */ + /** true if RequestSummaryData is null */ private final boolean nullRsd; - /** - * true if OverallRequest is null - */ + /** true if OverallRequest is null */ protected final boolean nullGsm; /** * Constructor that requires the GridUser, RequestSummaryData, PtPChunkData and OverallRequest, * that caused the exception to be thrown. */ - public InvalidPersistentRequestAttributesException(GridUserInterface gu, RequestSummaryData rsd, - PersistentChunkData chunkData, GlobalStatusManager gsm) { + public InvalidPersistentRequestAttributesException( + GridUserInterface gu, + RequestSummaryData rsd, + PersistentChunkData chunkData, + GlobalStatusManager gsm) { super(gu, chunkData); nullRsd = (rsd == null); @@ -50,7 +46,8 @@ public String toString() { return String.format( "Invalid attributes when creating Chunk: " - + "nullGridUser=%b, nullRequestSumamryData=%b, nullChunkData=%b, " + "nullGsm=%b", + + "nullGridUser=%b, nullRequestSumamryData=%b, nullChunkData=%b, " + + "nullGsm=%b", nullGu, nullRsd, nullChunkData, nullGsm); } } diff --git a/src/main/java/it/grid/storm/asynch/InvalidPtGAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPtGAttributesException.java index 4f2c87c6..80f1cb49 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPtGAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPtGAttributesException.java @@ -1,22 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; import it.grid.storm.catalogs.PtGData; import it.grid.storm.griduser.GridUserInterface; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class InvalidPtGAttributesException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 6957632945020144458L; + protected final boolean nullGu; // true if GridUser is null protected final boolean nullChunkData; // true if PtGChunkData is null @@ -34,7 +29,7 @@ public InvalidPtGAttributesException(GridUserInterface gu, PtGData chunkData) { public String toString() { return String.format( - "Invalid attributes when creating PtG: " + "null-GridUser=%b, null-PtGChunkData=%b", nullGu, - nullChunkData); + "Invalid attributes when creating PtG: " + "null-GridUser=%b, null-PtGChunkData=%b", + nullGu, nullChunkData); } } diff --git a/src/main/java/it/grid/storm/asynch/InvalidPtGChunkAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPtGChunkAttributesException.java index 97f805f0..5714660f 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPtGChunkAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPtGChunkAttributesException.java @@ -1,43 +1,36 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import it.grid.storm.catalogs.RequestSummaryData; import it.grid.storm.catalogs.PtGData; +import it.grid.storm.catalogs.RequestSummaryData; import it.grid.storm.griduser.GridUserInterface; /** * This class represents an Exceptin thrown when a PtGChunk is created with any null attribute: * GridUser, RequestSummaryData, PtGChunkData or GlobalStatusManager. - * + * * @author: EGRID - ICTP Trieste * @version: 2.0 * @date: May 16th, 2005 */ public class InvalidPtGChunkAttributesException extends InvalidPtGAttributesException { - /** - * - */ + /** */ private static final long serialVersionUID = 754275707315797289L; - /** - * true if RequestSummaryData is null - */ + /** true if RequestSummaryData is null */ private final boolean nullRsd; - /** - * true if gsm is null - */ + /** true if gsm is null */ private final boolean nullGlobalStatusManager; /** * Constructor that requires the GridUser, RequestSummaryData, PtGChunkData and * GlobalStatusManager that caused the exception to be thrown. */ - public InvalidPtGChunkAttributesException(GridUserInterface gu, RequestSummaryData rsd, - PtGData chunkData, GlobalStatusManager gsm) { + public InvalidPtGChunkAttributesException( + GridUserInterface gu, RequestSummaryData rsd, PtGData chunkData, GlobalStatusManager gsm) { super(gu, chunkData); nullRsd = (rsd == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidPtGFeederAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPtGFeederAttributesException.java index c9e1bb8e..35693fc8 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPtGFeederAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPtGFeederAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -10,16 +9,14 @@ /** * Class that represents an Exception thrown when a PtGFeeder could not be created because the * supplied RequestSummayData or GridUser or GlobalStatusManager were null. - * + * * @author EGRID ICTP * @version 3.0 * @date July, 2005 */ public class InvalidPtGFeederAttributesException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; private final boolean nullRequestSummaryData; @@ -30,8 +27,8 @@ public class InvalidPtGFeederAttributesException extends Exception { * Public constructor that requires the RequestSummaryData and the GridUser that caused the * exception to be thrown. */ - public InvalidPtGFeederAttributesException(RequestSummaryData rsd, GridUserInterface gu, - GlobalStatusManager gsm) { + public InvalidPtGFeederAttributesException( + RequestSummaryData rsd, GridUserInterface gu, GlobalStatusManager gsm) { nullRequestSummaryData = (rsd == null); nullGridUser = (gu == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidPtPFeederAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPtPFeederAttributesException.java index cc565bc2..b061f4e1 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPtPFeederAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPtPFeederAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -10,16 +9,14 @@ /** * Class that represents an Exception thrown when a PtPFeeder could not be created because the * supplied RequestSummayData or GridUser were null. - * + * * @author EGRID ICTP * @version 3.0 * @date June, 2005 */ public class InvalidPtPFeederAttributesException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; private final boolean nullRequestSummaryData; @@ -30,8 +27,8 @@ public class InvalidPtPFeederAttributesException extends Exception { * Public constructor that requires the RequestSummaryData, the GridUser and the * GlobalStatusManager that caused the exception to be thrown. */ - public InvalidPtPFeederAttributesException(RequestSummaryData rsd, GridUserInterface gu, - GlobalStatusManager gsm) { + public InvalidPtPFeederAttributesException( + RequestSummaryData rsd, GridUserInterface gu, GlobalStatusManager gsm) { nullRequestSummaryData = (rsd == null); nullGridUser = (gu == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidPutDoneReplyAttributeException.java b/src/main/java/it/grid/storm/asynch/InvalidPutDoneReplyAttributeException.java index 9c1411ab..d1d97c91 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPutDoneReplyAttributeException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPutDoneReplyAttributeException.java @@ -1,22 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; /** * Class that represents an exception thrown when an SRMPutDoneReply cannot be created because the * supplied TReturnStatus is null. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date August, 2006 */ public class InvalidPutDoneReplyAttributeException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; @Override diff --git a/src/main/java/it/grid/storm/asynch/InvalidPutReplyAttributeException.java b/src/main/java/it/grid/storm/asynch/InvalidPutReplyAttributeException.java index 3b71b19d..c9b22775 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPutReplyAttributeException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPutReplyAttributeException.java @@ -1,22 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; /** * Class that represents an exception thrown when an SRMPrepareToPutReply cannot be created because * the supplied TRequestToken is null. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2005 */ public class InvalidPutReplyAttributeException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; @Override diff --git a/src/main/java/it/grid/storm/asynch/InvalidPutStatusAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidPutStatusAttributesException.java index f0168edc..352f0719 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidPutStatusAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidPutStatusAttributesException.java @@ -1,25 +1,22 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import it.grid.storm.srm.types.TTURL; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TTURL; /** * Class that represents an exception thrown when an SRMStatusOfPutRequestReply cannot be created * because the supplied toTURL or returnStatus are null. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date October, 2005 */ public class InvalidPutStatusAttributesException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; // boolean indicating whether the supplied TURL is null or not @@ -28,9 +25,7 @@ public class InvalidPutStatusAttributesException extends Exception { // boolean indicating whether the supplied TReturnStatus is null or not private final boolean nullReturnStatus; - /** - * Constructor that requires the attributes that caused the exception to be thrown. - */ + /** Constructor that requires the attributes that caused the exception to be thrown. */ public InvalidPutStatusAttributesException(TTURL toTURL, TReturnStatus returnStatus) { nullToTURL = (toTURL == null); diff --git a/src/main/java/it/grid/storm/asynch/InvalidRequestAttributesException.java b/src/main/java/it/grid/storm/asynch/InvalidRequestAttributesException.java index 0616e637..a0c0e2cf 100644 --- a/src/main/java/it/grid/storm/asynch/InvalidRequestAttributesException.java +++ b/src/main/java/it/grid/storm/asynch/InvalidRequestAttributesException.java @@ -1,28 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; import it.grid.storm.catalogs.RequestData; import it.grid.storm.griduser.GridUserInterface; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class InvalidRequestAttributesException extends Exception { private static final long serialVersionUID = 2933131196386843154L; - /** - * true if GridUser is null - */ + /** true if GridUser is null */ protected final boolean nullGu; - /** - * true if PtPChunkData is null - */ + /** true if PtPChunkData is null */ protected final boolean nullChunkData; /** @@ -39,7 +31,7 @@ public InvalidRequestAttributesException(GridUserInterface gu, RequestData chunk public String toString() { return String.format( - "Invalid attributes when creating Request: " + "nullGridUser=%b, nullChunkData=%b", nullGu, - nullChunkData); + "Invalid attributes when creating Request: " + "nullGridUser=%b, nullChunkData=%b", + nullGu, nullChunkData); } } diff --git a/src/main/java/it/grid/storm/asynch/PtG.java b/src/main/java/it/grid/storm/asynch/PtG.java index 8396823c..91e99ad8 100644 --- a/src/main/java/it/grid/storm/asynch/PtG.java +++ b/src/main/java/it/grid/storm/asynch/PtG.java @@ -1,16 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import java.util.Arrays; -import java.util.Calendar; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.acl.AclManagerFS; import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; @@ -57,6 +49,11 @@ import it.grid.storm.synchcall.data.IdentityInputData; import it.grid.storm.tape.recalltable.TapeRecallCatalog; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; +import java.util.Arrays; +import java.util.Calendar; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class PtG implements Delegable, Chooser, Request, Suspendedable { @@ -64,19 +61,13 @@ public class PtG implements Delegable, Chooser, Request, Suspendedable { private static Logger log = LoggerFactory.getLogger(PtG.class); - /** - * PtGChunkData that holds the specific info for this chunk - */ + /** PtGChunkData that holds the specific info for this chunk */ protected PtGData requestData; - /** - * Time that wil be used in all jit and volatile tracking. - */ + /** Time that wil be used in all jit and volatile tracking. */ protected final Calendar start; - /** - * boolean that indicates the state of the shunk is failure - */ + /** boolean that indicates the state of the shunk is failure */ protected boolean failure = false; /** @@ -84,6 +75,7 @@ public class PtG implements Delegable, Chooser, Request, Suspendedable { * recalled from the tape */ private StoRI bupFileStori; + private LocalFile bupLocalFile; private LocalUser bupLocalUser; private TTURL bupTURL; @@ -109,12 +101,9 @@ public PtG(PtGData reqData) throws IllegalArgumentException { setupACLs = false; log.debug("Skipping ACL setup on PTG as requested by configuration."); } - } - /** - * Method that handles a chunk. It is invoked by the scheduler to carry out the task. - */ + /** Method that handles a chunk. It is invoked by the scheduler to carry out the task. */ @Override public void doIt() { @@ -139,21 +128,32 @@ public void doIt() { try { if (!downgradedToAnonymous && requestData instanceof IdentityInputData) { try { - fileStoRI = NamespaceDirector.getNamespace() - .resolveStoRIbySURL(surl, ((IdentityInputData) requestData).getUser()); + fileStoRI = + NamespaceDirector.getNamespace() + .resolveStoRIbySURL(surl, ((IdentityInputData) requestData).getUser()); } catch (UnapprochableSurlException e) { unapprochableSurl = true; - log.info("Unable to build a stori for surl {} for user {}. " - + "UnapprochableSurlException: {}" + surl, user, e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. " + + "UnapprochableSurlException: {}" + + surl, + user, + e.getMessage()); } catch (NamespaceException e) { failure = true; - log.error("Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", - surl, user, e.getMessage()); + log.error( + "Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", + surl, + user, + e.getMessage()); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (InvalidSURLException e) { failure = true; - log.info("Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", - surl, user, e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", + surl, + user, + e.getMessage()); requestData.changeStatusSRM_INVALID_PATH(e.getMessage()); } } else { @@ -161,36 +161,45 @@ public void doIt() { fileStoRI = NamespaceDirector.getNamespace().resolveStoRIbySURL(requestData.getSURL()); } catch (UnapprochableSurlException e) { failure = true; - log.info("Unable to build a stori for surl {}. " + "UnapprochableSurlException: {}", surl, + log.info( + "Unable to build a stori for surl {}. " + "UnapprochableSurlException: {}", + surl, e.getMessage()); requestData.changeStatusSRM_AUTHORIZATION_FAILURE(e.getMessage()); } catch (NamespaceException e) { failure = true; - log.error("Unable to build a stori for surl {}. " + "NamespaceException: {}", surl, + log.error( + "Unable to build a stori for surl {}. " + "NamespaceException: {}", + surl, e.getMessage()); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (InvalidSURLException e) { failure = true; - log.info("Unable to build a stori for surl {}. " + "InvalidSURLException: {}", surl, + log.info( + "Unable to build a stori for surl {}. " + "InvalidSURLException: {}", + surl, e.getMessage()); requestData.changeStatusSRM_INVALID_PATH(e.getMessage()); } } } catch (IllegalArgumentException e) { failure = true; - log.error("Unable to get StoRI for surl {}. IllegalArgumentException: ", surl, - e.getMessage()); + log.error( + "Unable to get StoRI for surl {}. IllegalArgumentException: ", surl, e.getMessage()); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } if (!failure) { AuthzDecision ptgAuthz; if (!unapprochableSurl) { if (!downgradedToAnonymous && requestData instanceof IdentityInputData) { - ptgAuthz = AuthzDirector.getPathAuthz() - .authorize(((IdentityInputData) requestData).getUser(), SRMFileRequest.PTG, fileStoRI); + ptgAuthz = + AuthzDirector.getPathAuthz() + .authorize( + ((IdentityInputData) requestData).getUser(), SRMFileRequest.PTG, fileStoRI); } else { - ptgAuthz = AuthzDirector.getPathAuthz() - .authorizeAnonymous(SRMFileRequest.PTG, fileStoRI.getStFN()); + ptgAuthz = + AuthzDirector.getPathAuthz() + .authorizeAnonymous(SRMFileRequest.PTG, fileStoRI.getStFN()); } } else { if (requestData.getTransferProtocols().allows(Protocol.HTTP)) { @@ -198,18 +207,25 @@ public void doIt() { fileStoRI = NamespaceDirector.getNamespace().resolveStoRIbySURL(requestData.getSURL()); } catch (UnapprochableSurlException e) { failure = true; - log.info("Unable to build a stori for surl {}. " + "UnapprochableSurlException: {}", - surl, e.getMessage()); + log.info( + "Unable to build a stori for surl {}. " + "UnapprochableSurlException: {}", + surl, + e.getMessage()); requestData.changeStatusSRM_AUTHORIZATION_FAILURE(e.getMessage()); } catch (InvalidSURLException e) { failure = true; - log.info("Unable to build a stori for surl {}. " + "InvalidSURLException: {}", surl, + log.info( + "Unable to build a stori for surl {}. " + "InvalidSURLException: {}", + surl, e.getMessage()); requestData.changeStatusSRM_INVALID_PATH(e.getMessage()); } catch (Throwable e) { failure = true; - log.error("Unable to build a stori for surl {} {}: {}", surl, - e.getClass().getCanonicalName(), e.getMessage()); + log.error( + "Unable to build a stori for surl {} {}: {}", + surl, + e.getClass().getCanonicalName(), + e.getMessage()); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } if (!failure) { @@ -278,9 +294,10 @@ private void manageIsPermit(StoRI fileStoRI) { requestData.changeStatusSRM_INVALID_PATH( "The requested file either" + " does not exist, or it is a directory!"); failure = true; - log.debug("ANOMALY in PtGChunk! PolicyCollector confirms read rights on" - + " file, yet file does not exist physically! Or, an srmPrepareToGet" - + " was attempted on a directory!"); + log.debug( + "ANOMALY in PtGChunk! PolicyCollector confirms read rights on" + + " file, yet file does not exist physically! Or, an srmPrepareToGet" + + " was attempted on a directory!"); } else { /* File exists and it is not a directory */ /* Sets traverse permissions on file parent folders */ @@ -294,7 +311,9 @@ private void manageIsPermit(StoRI fileStoRI) { log.error( "ERROR in PtGChunk! Unable to find LocalUser for {}! " + "CannotMapUserException: {}", - DataHelper.getRequestor(requestData), e.getMessage(), e); + DataHelper.getRequestor(requestData), + e.getMessage(), + e); return; } if (canTraverse) { @@ -305,8 +324,11 @@ private void manageIsPermit(StoRI fileStoRI) { requestData.changeStatusSRM_FAILURE( "Unable to build the TURL for the provided transfer protocol"); failure = true; - log.error("ERROR in PtGChunk! There was a failure building the " - + "TURL. : TURLBuildingException {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! There was a failure building the " + + "TURL. : TURLBuildingException {}", + e.getMessage(), + e); return; } catch (IllegalArgumentException e) { /* @@ -314,16 +336,22 @@ private void manageIsPermit(StoRI fileStoRI) { */ requestData.changeStatusSRM_FAILURE("Unable to decide TURL!"); failure = true; - log.error("ERROR in PtGChunk! invalid TURLPrefix in PtGChunkData " - + "caused StoRI to be unable to establish TTURL! " - + "IllegalArgumentException: {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! invalid TURLPrefix in PtGChunkData " + + "caused StoRI to be unable to establish TTURL! " + + "IllegalArgumentException: {}", + e.getMessage(), + e); return; } catch (InvalidGetTURLProtocolException e) { requestData.changeStatusSRM_FAILURE("Unable to decide TURL!"); failure = true; - log.error("ERROR in PtGChunk! invalid TURL Protocol in PtGChunkData " - + "caused StoRI to be unable to establish TTURL! " - + "InvalidGetTURLProtocolException: {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! invalid TURL Protocol in PtGChunkData " + + "caused StoRI to be unable to establish TTURL! " + + "InvalidGetTURLProtocolException: {}", + e.getMessage(), + e); return; } if (fileStoRI.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { @@ -332,7 +360,6 @@ private void manageIsPermit(StoRI fileStoRI) { (System.currentTimeMillis() / 1000 + requestData.getPinLifeTime().value()); StormEA.setPinned(fileStoRI.getLocalFile().getAbsolutePath(), expDate); - try { TSizeInBytes fileSize = TSizeInBytes.make(fileStoRI.getLocalFile().length(), SizeUnit.BYTES); @@ -343,8 +370,11 @@ private void manageIsPermit(StoRI fileStoRI) { } catch (InvalidTSizeAttributesException e) { requestData.changeStatusSRM_FAILURE("Unable to determine file size"); failure = true; - log.error("ERROR in PtGChunk! error in file size computation! " - + "InvalidTSizeAttributesException: {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! error in file size computation! " + + "InvalidTSizeAttributesException: {}", + e.getMessage(), + e); return; } } @@ -354,8 +384,10 @@ private void manageIsPermit(StoRI fileStoRI) { } catch (FSException e) { requestData.changeStatusSRM_FAILURE("Unable to verify file disk status"); failure = true; - log.error("ERROR in PtGChunk! error in file on disk check! " + "FSException: {}", - e.getMessage(), e); + log.error( + "ERROR in PtGChunk! error in file on disk check! " + "FSException: {}", + e.getMessage(), + e); return; } if (!isOnDisk @@ -364,25 +396,33 @@ private void manageIsPermit(StoRI fileStoRI) { String voName = null; if (!downgradedToAnonymous && requestData instanceof IdentityInputData) { if (((IdentityInputData) requestData).getUser() instanceof AbstractGridUser) { - voName = ((AbstractGridUser) ((IdentityInputData) requestData).getUser()).getVO() - .getValue(); + voName = + ((AbstractGridUser) ((IdentityInputData) requestData).getUser()) + .getVO() + .getValue(); } } try { - new TapeRecallCatalog().insertTask(this, voName, - fileStoRI.getLocalFile().getAbsolutePath()); + new TapeRecallCatalog() + .insertTask(this, voName, fileStoRI.getLocalFile().getAbsolutePath()); } catch (DataAccessException e) { requestData.changeStatusSRM_FAILURE("Unable to request file recall from tape"); failure = true; - log.error("ERROR in PtGChunk! error in tape recall task " - + "insertion! DataAccessException: {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! error in tape recall task " + + "insertion! DataAccessException: {}", + e.getMessage(), + e); return; } /* Stores the parameters in this object */ if (!downgradedToAnonymous && requestData instanceof IdentityInputData) { try { - backupData(fileStoRI, fileStoRI.getLocalFile(), - ((IdentityInputData) requestData).getUser().getLocalUser(), turl); + backupData( + fileStoRI, + fileStoRI.getLocalFile(), + ((IdentityInputData) requestData).getUser().getLocalUser(), + turl); } catch (CannotMapUserException e) { requestData.changeStatusSRM_FAILURE( "Unable to find local user for " + DataHelper.getRequestor(requestData)); @@ -390,7 +430,9 @@ private void manageIsPermit(StoRI fileStoRI) { log.error( "ERROR in PtGChunk! Unable to find LocalUser " + "for {}! CannotMapUserException: {}", - DataHelper.getRequestor(requestData), e.getMessage(), e); + DataHelper.getRequestor(requestData), + e.getMessage(), + e); return; } } else { @@ -417,7 +459,9 @@ private void manageIsPermit(StoRI fileStoRI) { log.error( "ERROR in PtGChunk! Unable to find LocalUser for {}! " + "CannotMapUserException: {}", - DataHelper.getRequestor(requestData), e.getMessage(), e); + DataHelper.getRequestor(requestData), + e.getMessage(), + e); return; } if (canRead) { @@ -432,8 +476,11 @@ private void manageIsPermit(StoRI fileStoRI) { } catch (InvalidTSizeAttributesException e) { requestData.changeStatusSRM_FAILURE("Unable to determine file size"); failure = true; - log.error("ERROR in PtGChunk! error in file size computation! " - + "InvalidTSizeAttributesException: {}", e.getMessage(), e); + log.error( + "ERROR in PtGChunk! error in file size computation! " + + "InvalidTSizeAttributesException: {}", + e.getMessage(), + e); return; } @@ -457,13 +504,17 @@ private void manageIsPermit(StoRI fileStoRI) { */ requestData.changeStatusSRM_FAILURE("StoRM is not allowed to work on " + "requested file!"); failure = true; - log.error("ATTENTION in PtGChunk! PtGChunk received a SecurityException " - + "from Java SecurityManager; StoRM cannot check-existence or " - + "check-if-directory for: {}", fileStoRI.getLocalFile().toString(), e); + log.error( + "ATTENTION in PtGChunk! PtGChunk received a SecurityException " + + "from Java SecurityManager; StoRM cannot check-existence or " + + "check-if-directory for: {}", + fileStoRI.getLocalFile().toString(), + e); } } else { - String emsg = String.format("Read access to %s in Storage Area: %s " + "denied!", - requestData.getSURL(), token); + String emsg = + String.format( + "Read access to %s in Storage Area: %s " + "denied!", requestData.getSURL(), token); requestData.changeStatusSRM_AUTHORIZATION_FAILURE(emsg); failure = true; log.debug(emsg); @@ -475,19 +526,16 @@ private void manageIsPermit(StoRI fileStoRI) { * wrong! * * @param fileStoRI - * * @param fileStoRI * @param localUser * @return * @throws CannotMapUserException */ - private boolean managePermitTraverseStep(StoRI fileStoRI) throws CannotMapUserException { if (!downgradedToAnonymous && requestData instanceof IdentityInputData) { - if (!setupACLs) - return verifyPath(fileStoRI); + if (!setupACLs) return verifyPath(fileStoRI); return verifyPath(fileStoRI) && setParentsAcl(fileStoRI, ((IdentityInputData) requestData).getUser().getLocalUser()); @@ -495,8 +543,7 @@ private boolean managePermitTraverseStep(StoRI fileStoRI) throws CannotMapUserEx if (verifyPath(fileStoRI)) { - if (setupACLs) - setHttpsServiceParentAcl(fileStoRI); + if (setupACLs) setHttpsServiceParentAcl(fileStoRI); return true; } @@ -516,11 +563,14 @@ private boolean verifyPath(StoRI fileStoRI) { failure = true; String errorString; if (!exists) { - errorString = String.format("The requested SURL is: %s, but its parent %s does not exist!", - fileStoRI.getSURL().toString(), parentStoRI.getSURL().toString()); + errorString = + String.format( + "The requested SURL is: %s, but its parent %s does not exist!", + fileStoRI.getSURL().toString(), parentStoRI.getSURL().toString()); } else { errorString = - String.format("The requested SURL is: %s, but its parent %s is not a directory!", + String.format( + "The requested SURL is: %s, but its parent %s is not a directory!", fileStoRI.getSURL().toString(), parentStoRI.getSURL().toString()); } requestData.changeStatusSRM_INVALID_PATH(errorString); @@ -537,8 +587,8 @@ private boolean setParentsAcl(StoRI fileStoRI, LocalUser localUser) { for (StoRI parentStoRI : fileStoRI.getParents()) { try { - if (!setAcl(parentStoRI, localUser, FilesystemPermission.Traverse, - fileStoRI.hasJustInTimeACLs())) { + if (!setAcl( + parentStoRI, localUser, FilesystemPermission.Traverse, fileStoRI.hasJustInTimeACLs())) { requestData.changeStatusSRM_FAILURE( "Local filesystem mask does " + "not allow setting up correct ACLs for PtG!"); failure = true; @@ -560,8 +610,11 @@ private boolean managePermitReadFileStep(StoRI fileStoRI, TTURL turl) if (setupACLs) { - if (managePermitReadFileStep(fileStoRI, fileStoRI.getLocalFile(), - ((IdentityInputData) requestData).getUser().getLocalUser(), turl)) { + if (managePermitReadFileStep( + fileStoRI, + fileStoRI.getLocalFile(), + ((IdentityInputData) requestData).getUser().getLocalUser(), + turl)) { setDefaultAcl(fileStoRI, fileStoRI.getLocalFile()); @@ -591,8 +644,8 @@ private boolean managePermitReadFileStep(StoRI fileStoRI, TTURL turl) * @param turl * @return */ - private boolean managePermitReadFileStep(StoRI fileStoRI, LocalFile localFile, - LocalUser localUser, TTURL turl) { + private boolean managePermitReadFileStep( + StoRI fileStoRI, LocalFile localFile, LocalUser localUser, TTURL turl) { try { if (!setAcl(fileStoRI, localUser, FilesystemPermission.Read, fileStoRI.hasJustInTimeACLs())) { @@ -602,8 +655,8 @@ private boolean managePermitReadFileStep(StoRI fileStoRI, LocalFile localFile, return false; } } catch (Exception e) { - requestData - .changeStatusSRM_INTERNAL_ERROR("Local filesystem has" + " problems manipulating ACE!"); + requestData.changeStatusSRM_INTERNAL_ERROR( + "Local filesystem has" + " problems manipulating ACE!"); failure = true; return false; } @@ -612,8 +665,12 @@ private boolean managePermitReadFileStep(StoRI fileStoRI, LocalFile localFile, return true; } - private boolean setAcl(StoRI parentStoRI, LocalUser localUser, FilesystemPermission permission, - boolean hasJustInTimeACLs) throws Exception { + private boolean setAcl( + StoRI parentStoRI, + LocalUser localUser, + FilesystemPermission permission, + boolean hasJustInTimeACLs) + throws Exception { if (hasJustInTimeACLs) { return setJiTAcl(parentStoRI, localUser, permission); @@ -624,15 +681,21 @@ private boolean setAcl(StoRI parentStoRI, LocalUser localUser, FilesystemPermiss private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermission permission) throws Exception { - log.debug("Adding JiT ACL {} to user {} for directory : '{}'", permission, localUser, + log.debug( + "Adding JiT ACL {} to user {} for directory : '{}'", + permission, + localUser, fileStori.getAbsolutePath()); try { AclManagerFS.getInstance() - .grantUserPermission(fileStori.getLocalFile(), localUser, permission); + .grantUserPermission(fileStori.getLocalFile(), localUser, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user traverse permission on parent file. " - + "IllegalArgumentException: {}", e.getMessage(), e); + log.error( + "Unable to grant user traverse permission on parent file. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); return false; } @@ -641,8 +704,11 @@ private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis if (fp == null) { log.error( "ERROR in PTGChunk! A {} User-ACL was set on {} for user {} " - + "but when subsequently verifying its effectivity, a null ACE was " + "found!", - permission, fileStori.getAbsolutePath(), localUser.toString()); + + "but when subsequently verifying its effectivity, a null ACE was " + + "found!", + permission, + fileStori.getAbsolutePath(), + localUser.toString()); throw new Exception("Unable to verify user ACL"); } @@ -650,36 +716,46 @@ private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis log.error( "ATTENTION in PtGChunk! The local filesystem has a mask that " + "does not allow {} User-ACL to be set up on!", - permission, fileStori.getLocalFile().toString()); + permission, + fileStori.getLocalFile().toString()); return false; } VolatileAndJiTCatalog.getInstance() - .trackJiT(fileStori.getPFN(), localUser, permission, start, requestData.getPinLifeTime()); + .trackJiT(fileStori.getPFN(), localUser, permission, start, requestData.getPinLifeTime()); return true; } private boolean setAoTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermission permission) throws Exception { - log.debug("Adding AoT ACL {} to user {} for directory : '{}'", permission, localUser, + log.debug( + "Adding AoT ACL {} to user {} for directory : '{}'", + permission, + localUser, fileStori.getAbsolutePath()); try { AclManagerFS.getInstance() - .grantGroupPermission(fileStori.getLocalFile(), localUser, permission); + .grantGroupPermission(fileStori.getLocalFile(), localUser, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user traverse permission on parent file. " - + "IllegalArgumentException: {}", e.getMessage(), e); + log.error( + "Unable to grant user traverse permission on parent file. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); return false; } FilesystemPermission fp = fileStori.getLocalFile().getEffectiveGroupPermission(localUser); if (fp == null) { - log.error("ERROR in PtGChunk! A Traverse Group-ACL was set on {} for " - + "user {} but when subsequently verifying its effectivity, a null ACE " + "was found!", - fileStori.getAbsolutePath(), localUser.toString()); + log.error( + "ERROR in PtGChunk! A Traverse Group-ACL was set on {} for " + + "user {} but when subsequently verifying its effectivity, a null ACE " + + "was found!", + fileStori.getAbsolutePath(), + localUser.toString()); return false; } if (!fp.allows(permission)) { @@ -708,10 +784,14 @@ private void setHttpsServiceAcl(LocalFile file, FilesystemPermission permission) try { AclManagerFS.getInstance().grantHttpsServiceGroupPermission(file, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user permission on the created folder. " - + "IllegalArgumentException: {}", e.getMessage(), e); - requestData.getStatus() - .extendExplaination("Unable to grant group permission on the created folder"); + log.error( + "Unable to grant user permission on the created folder. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + requestData + .getStatus() + .extendExplaination("Unable to grant group permission on the created folder"); } } @@ -734,7 +814,9 @@ private void setDefaultAcl(StoRI fileStoRI, LocalFile localFile) { log.debug("ACE {} is not valid!", ace.toString()); continue; } - log.debug("Adding DefaultACL for the gid: {} with permission: {}", ace.getGroupID(), + log.debug( + "Adding DefaultACL for the gid: {} with permission: {}", + ace.getGroupID(), ace.getFilePermissionString()); LocalUser u = new LocalUser(ace.getGroupID(), ace.getGroupID()); if (ace.getFilesystemPermission() == null) { @@ -743,14 +825,14 @@ private void setDefaultAcl(StoRI fileStoRI, LocalFile localFile) { } try { AclManagerFS.getInstance() - .grantGroupPermission(localFile, u, ace.getFilesystemPermission()); + .grantGroupPermission(localFile, u, ace.getFilesystemPermission()); } catch (IllegalArgumentException e) { log.error( "Unable to grant group permissions on the file. " + "IllegalArgumentException: {}", - e.getMessage(), e); + e.getMessage(), + e); } } - } /** @@ -816,15 +898,20 @@ public Boolean completeRequest(TapeRecallStatus recallStatus) { try { isOnDisk = bupLocalFile.isOnDisk(); } catch (FSException e) { - log.error("Unable to determine if file {} is on disk. FSException: ", - bupLocalFile.getAbsolutePath(), e.getMessage(), e); - requestData - .changeStatusSRM_FAILURE("Internal error: unable to determine " + "if the file is on disk"); + log.error( + "Unable to determine if file {} is on disk. FSException: ", + bupLocalFile.getAbsolutePath(), + e.getMessage(), + e); + requestData.changeStatusSRM_FAILURE( + "Internal error: unable to determine " + "if the file is on disk"); return false; } if (!isOnDisk) { - log.error("File {} not found on the disk, but it was reported to be " - + "successfully recalled from tape", bupLocalFile.getAbsolutePath()); + log.error( + "File {} not found on the disk, but it was reported to be " + + "successfully recalled from tape", + bupLocalFile.getAbsolutePath()); requestData.changeStatusSRM_FAILURE("Error recalling file from tape"); return false; } @@ -868,9 +955,7 @@ public String getUserDN() { return DataHelper.getRequestor(requestData); } - /** - * @return - */ + /** @return */ @Override public boolean isResultSuccess() { @@ -879,9 +964,7 @@ public boolean isResultSuccess() { || requestData.getStatus().isSRM_SUCCESS()); } - /** - * Manager of the IsDeny state: it indicates that Permission is not granted. - */ + /** Manager of the IsDeny state: it indicates that Permission is not granted. */ private void manageIsDeny() { String emsg = String.format("Read access to %s denied!", requestData.getSURL()); @@ -896,8 +979,9 @@ private void manageIsDeny() { */ private void manageIsIndeterminate(AuthzDecision ad) { - requestData.changeStatusSRM_FAILURE("Failure in PolicySource prevented" - + " PolicyCollector from establishing access rights! Processing failed!"); + requestData.changeStatusSRM_FAILURE( + "Failure in PolicySource prevented" + + " PolicyCollector from establishing access rights! Processing failed!"); failure = true; log.error("ERROR in PtGChunk! PolicyCollector received an error from " + "PolicySource!"); log.error("Received state: {}", ad); @@ -910,8 +994,10 @@ private void manageIsIndeterminate(AuthzDecision ad) { */ private void manageIsNotApplicabale(AuthzDecision ad) { - requestData.changeStatusSRM_FAILURE("No policies found for the requested " - + "SURL! Therefore access rights cannot be established! Processing cannot " + "continue!"); + requestData.changeStatusSRM_FAILURE( + "No policies found for the requested " + + "SURL! Therefore access rights cannot be established! Processing cannot " + + "continue!"); failure = true; log.warn("PtGChunk: PolicyCollector found no policy for the supplied SURL!"); log.warn("Received state: {}", ad); @@ -923,24 +1009,35 @@ protected void printRequestOutcome(PtGData inputData) { if (inputData != null) { if (inputData.getSURL() != null) { if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - inputData.getRequestToken(), Arrays.asList(inputData.getSURL().toString())); + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, + inputData.getRequestToken(), + Arrays.asList(inputData.getSURL().toString())); } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, Arrays.asList(inputData.getSURL().toString())); } } else { if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, inputData.getStatus(), inputData, inputData.getRequestToken()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData); } } } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "No input available")); } } diff --git a/src/main/java/it/grid/storm/asynch/PtGBuilder.java b/src/main/java/it/grid/storm/asynch/PtGBuilder.java index bf89cac0..96a4f140 100644 --- a/src/main/java/it/grid/storm/asynch/PtGBuilder.java +++ b/src/main/java/it/grid/storm/asynch/PtGBuilder.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -23,10 +22,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PtGBuilder { private static Logger log = LoggerFactory.getLogger(PtGBuilder.class); @@ -47,25 +43,39 @@ public static PtG build(FileTransferInputData inputData) throws BuilderException PtGData data; try { if (inputData instanceof IdentityInputData) { - data = new IdentityPtGData(((IdentityInputData) inputData).getUser(), toSURL, pinLifetime, - dirOption, transferProtocols, fileSize, status, transferURL); + data = + new IdentityPtGData( + ((IdentityInputData) inputData).getUser(), + toSURL, + pinLifetime, + dirOption, + transferProtocols, + fileSize, + status, + transferURL); } else { - data = new AnonymousPtGData(toSURL, pinLifetime, dirOption, transferProtocols, fileSize, - status, transferURL); + data = + new AnonymousPtGData( + toSURL, pinLifetime, dirOption, transferProtocols, fileSize, status, transferURL); } data.store(); } catch (InvalidPtGDataAttributesException e) { - log.error("Unable to build PtGChunkData. " + "InvalidPtGChunkDataAttributesException: {}", - e.getMessage(), e); + log.error( + "Unable to build PtGChunkData. " + "InvalidPtGChunkDataAttributesException: {}", + e.getMessage(), + e); throw new BuilderException("Error building PtG PtGChunkData. Building failed"); } catch (InvalidFileTransferDataAttributesException e) { log.error( "Unable to build PtGChunkData. " + "InvalidFileTransferChunkDataAttributesException: {}", - e.getMessage(), e); + e.getMessage(), + e); throw new BuilderException("Error building PtG PtGChunkData. Building failed"); } catch (InvalidSurlRequestDataAttributesException e) { - log.error("Unable to build PtGChunkData. " + "InvalidSurlRequestDataAttributesException: {}", - e.getMessage(), e); + log.error( + "Unable to build PtGChunkData. " + "InvalidSurlRequestDataAttributesException: {}", + e.getMessage(), + e); throw new BuilderException("Error building PtG PtGChunkData. Building failed"); } return new PtG(data); diff --git a/src/main/java/it/grid/storm/asynch/PtGFeeder.java b/src/main/java/it/grid/storm/asynch/PtGFeeder.java index 5f39e964..459634ff 100644 --- a/src/main/java/it/grid/storm/asynch/PtGFeeder.java +++ b/src/main/java/it/grid/storm/asynch/PtGFeeder.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -23,9 +22,7 @@ import it.grid.storm.srm.types.TDirOption; import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.DataHelper; - import java.util.Collection; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,18 +30,18 @@ * This class represents a PrepareToGet Feeder: the Feeder that will handle the srmPrepareToGet * statements. It chops a multifile request, and for each part it checks whether the dir option is * set and expands the directory as necessary. - * - * If the request contains nothing to process, an error message gets logged, the number of queued + * + *

If the request contains nothing to process, an error message gets logged, the number of queued * requests is decreased, and the number of finished requests is increased. - * - * If the single part of the request has dirOption NOT set, then the number of queued requests is + * + *

If the single part of the request has dirOption NOT set, then the number of queued requests is * decreased, the number of progressing requests is increased, the status of that chunk is changed * to SRM_REQUEST_INPROGRESS; the chunk is given to the scheduler for handling. In case the * scheduler cannot accept the chunk for any reason, a messagge with the requestToken and the chunk * s data is logged, status of the chunk passes to SRM_ABORTED, and at the end the counters are such * that the queued-requests is decreased while the finished-requests is increased. - * - * If the single part of the request DOES have a dirOption set, then it is considered as an + * + *

If the single part of the request DOES have a dirOption set, then it is considered as an * expansion job and it gets handled now! So the number of queued requests is decreased and that for * progressing ones is increased, while the status is set to SRM_REQUEST_INPROGRESS. Each newly * expanded file gets handled as though it were part of the multifile request WITHOUT the dirOption @@ -52,26 +49,27 @@ * persistence system is created, and the total number of files in this request is updated. Finally * the status of this expansion request is set to SRM_DONE, the number of progressing requests is * decreased and the number of finished requests is increased. - * - * At the beginning of the expansion stage, some anomalous situations are considered and handled as - * follows: - * - * (0) In case of internal errors, they get logged and the expansion request gets failed: the status - * changes to SRM_FAILURE, number of progressing is decreased, number of finished is increased. - * - * (1) The expanded directory is empty: the request is set to SRM_SUCCESS with an explanatory String - * saying so. The number of progressing is decreased, and the number of finished is increased. - * - * (2) The directory does not exist: status set to SRM_INVALID_PATH; number of progressing is + * + *

At the beginning of the expansion stage, some anomalous situations are considered and handled + * as follows: + * + *

(0) In case of internal errors, they get logged and the expansion request gets failed: the + * status changes to SRM_FAILURE, number of progressing is decreased, number of finished is + * increased. + * + *

(1) The expanded directory is empty: the request is set to SRM_SUCCESS with an explanatory + * String saying so. The number of progressing is decreased, and the number of finished is + * increased. + * + *

(2) The directory does not exist: status set to SRM_INVALID_PATH; number of progressing is * decresed; number of finished is increased. - * - * (3) Attempting to expand a file: status set to SRM_INVALID_PATH; number of progressing is + * + *

(3) Attempting to expand a file: status set to SRM_INVALID_PATH; number of progressing is * decreased; number of finished is increased. - * - * (4) No rights to directory: status set to SRM_AUTHORIZATION_FAILURE; number of progressing is + * + *

(4) No rights to directory: status set to SRM_AUTHORIZATION_FAILURE; number of progressing is * decreased; number of finished is increased. - * - * + * * @author EGRID - ICTP Trieste * @date March 21st, 2005 * @version 4.0 @@ -87,7 +85,7 @@ public final class PtGFeeder implements Delegable { /** * Public constructor requiring the RequestSummaryData to which this PtGFeeder refers, as well as * the GridUser. If null objects are supplied, an InvalidPtGFeederAttributesException is thrown. - * + * * @param rsd * @throws InvalidPtGFeederAttributesException */ @@ -105,7 +103,8 @@ public PtGFeeder(RequestSummaryData rsd) throws InvalidPtGFeederAttributesExcept } catch (InvalidOverallRequestAttributeException e) { log.error( "ATTENTION in PtGFeeder! Programming bug when creating " + "GlobalStatusManager! {}", - e.getMessage(), e); + e.getMessage(), + e); throw new InvalidPtGFeederAttributesException(rsd, null, null); } } @@ -121,19 +120,18 @@ public void doIt() { Collection chunks = PtGChunkCatalog.getInstance().lookup(rsd.requestToken()); if (chunks.isEmpty()) { - log.warn("ATTENTION in PtGFeeder! This SRM PtG request contained nothing " + "to process! {}", + log.warn( + "ATTENTION in PtGFeeder! This SRM PtG request contained nothing " + "to process! {}", rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "This SRM Get request contained nothing to process!"); + .failRequest(rsd, "This SRM Get request contained nothing to process!"); } else { manageChunks(chunks); log.debug("PtGFeeder: finished pre-processing {}", rsd.requestToken()); } } - /** - * Private method that handles the Collection of chunks associated with the srm command! - */ + /** Private method that handles the Collection of chunks associated with the srm command! */ private void manageChunks(Collection chunks) { log.debug("PtGFeeder - number of chunks in request: {}", chunks.size()); @@ -155,8 +153,9 @@ private void manageChunks(Collection chunks) { /* * fromSURL does _not_ correspond to this installation of StoRM: fail chunk! */ - log.warn("PtGFeeder: srmPtG contract violation! fromSURL does not" - + "correspond to this machine!"); + log.warn( + "PtGFeeder: srmPtG contract violation! fromSURL does not" + + "correspond to this machine!"); log.warn("Request: {}", rsd.requestToken()); log.warn("Chunk: {}", chunkData); @@ -177,21 +176,21 @@ private void manageChunks(Collection chunks) { /** * Private method that handles the case of dirOption NOT set! - * + * * @param auxChunkData */ private void manageNotDirectory(PtGPersistentChunkData auxChunkData) { log.debug("PtGFeeder - scheduling... "); /* change status of this chunk to being processed! */ - auxChunkData - .changeStatusSRM_REQUEST_INPROGRESS("srmPrepareToGet " + "chunk is being processed!"); + auxChunkData.changeStatusSRM_REQUEST_INPROGRESS( + "srmPrepareToGet " + "chunk is being processed!"); PtGChunkCatalog.getInstance().update(auxChunkData); try { /* hand it to scheduler! */ SchedulerFacade.getInstance() - .chunkScheduler() - .schedule(new PtGPersistentChunk(rsd, auxChunkData, gsm)); + .chunkScheduler() + .schedule(new PtGPersistentChunk(rsd, auxChunkData, gsm)); log.debug("PtGFeeder - chunk scheduled."); } catch (InvalidPersistentRequestAttributesException e) { log.error("UNEXPECTED ERROR in PtGFeeder! Chunk could not be created!"); @@ -232,7 +231,7 @@ private void manageNotDirectory(PtGPersistentChunkData auxChunkData) { /** * Private method that handles the case of a PtGChunkData having dirOption set! - * + * * @param chunkData */ private void manageIsDirectory(PtGPersistentChunkData chunkData) { @@ -254,20 +253,32 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { } catch (IllegalArgumentException e) { log.error( "Unable to build a stori for surl {} for user {}. " + "IllegalArgumentException: {}", - surl, user, e.getMessage(), e); + surl, + user, + e.getMessage(), + e); chunkData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (UnapprochableSurlException e) { log.info( "Unable to build a stori for surl {} for user {}. " + "UnapprochableSurlException: {}", - surl, user, e.getMessage()); + surl, + user, + e.getMessage()); chunkData.changeStatusSRM_AUTHORIZATION_FAILURE(e.getMessage()); } catch (NamespaceException e) { - log.error("Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", - surl, user, e.getMessage(), e); + log.error( + "Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", + surl, + user, + e.getMessage(), + e); chunkData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (InvalidSURLException e) { - log.info("Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", - surl, user, e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", + surl, + user, + e.getMessage()); chunkData.changeStatusSRM_INVALID_PATH(e.getMessage()); } finally { if (stori == null) { @@ -292,16 +303,18 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { * The expanded directory was empty, anyway a request on a Directory is considered done * whether there is somethig to expand or not! */ - chunkData.changeStatusSRM_FILE_PINNED("BEWARE! srmPrepareToGet with " - + "dirOption set: it referred to a directory that was empty!"); + chunkData.changeStatusSRM_FILE_PINNED( + "BEWARE! srmPrepareToGet with " + + "dirOption set: it referred to a directory that was empty!"); PtGChunkCatalog.getInstance().update(chunkData); gsm.successfulChunk(chunkData); return; } catch (InvalidDescendantsPathRequestException e) { - log.debug("ATTENTION in PtGFeeder! PtGFeeder received request" - + " to expand non-existing directory."); + log.debug( + "ATTENTION in PtGFeeder! PtGFeeder received request" + + " to expand non-existing directory."); // Attempting to expand non existent directory! chunkData.changeStatusSRM_INVALID_PATH( "srmPrepareToGet with dirOption " + "set: it referred to a non-existent directory!"); @@ -318,7 +331,6 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { PtGChunkCatalog.getInstance().update(chunkData); gsm.failedChunk(chunkData); return; - } log.debug("PtGFeeder - Number of children in parent: {}", storiChildren.size()); @@ -334,7 +346,10 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { log.error( "UNEXPECTED ERROR in PtGFeeder! Could not create TDirOption " + "specifying non-expansion!\n{}\nRequest: {}\nChunk: {}", - e.getMessage(), rsd.requestToken(), chunkData, e); + e.getMessage(), + rsd.requestToken(), + chunkData, + e); chunkData.changeStatusSRM_FAILURE( "srmPrepareToGet with dirOption set:" + " expansion failure due to internal error!"); @@ -346,10 +361,17 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { PtGPersistentChunkData childData; for (StoRI storiChild : storiChildren) { try { - childData = new PtGPersistentChunkData(chunkData.getUser(), chunkData.getRequestToken(), - storiChild.getSURL(), chunkData.getPinLifeTime(), notDir, - chunkData.getTransferProtocols(), chunkData.getFileSize(), chunkData.getStatus(), - chunkData.getTransferURL()); + childData = + new PtGPersistentChunkData( + chunkData.getUser(), + chunkData.getRequestToken(), + storiChild.getSURL(), + chunkData.getPinLifeTime(), + notDir, + chunkData.getTransferProtocols(), + chunkData.getFileSize(), + chunkData.getStatus(), + chunkData.getTransferURL()); /* fill in new db row and set the PrimaryKey of ChildData! */ PtGChunkCatalog.getInstance().addChild(childData); log.debug("PtGFeeder - added child data: {}", childData); @@ -362,7 +384,8 @@ private void manageIsDirectory(PtGPersistentChunkData chunkData) { log.error( "ERROR in PtGFeeder! While expanding recursive request," + " it was not possible to create a new PtGPersistentChunkData! {}", - e.getMessage(), e); + e.getMessage(), + e); } } log.debug("PtGFeeder - expansion completed."); diff --git a/src/main/java/it/grid/storm/asynch/PtGPersistentChunk.java b/src/main/java/it/grid/storm/asynch/PtGPersistentChunk.java index 8d01b7cb..bc8c22a3 100644 --- a/src/main/java/it/grid/storm/asynch/PtGPersistentChunk.java +++ b/src/main/java/it/grid/storm/asynch/PtGPersistentChunk.java @@ -1,10 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import java.util.Arrays; import it.grid.storm.catalogs.PtGChunkCatalog; import it.grid.storm.catalogs.PtGData; import it.grid.storm.catalogs.PtGPersistentChunkData; @@ -13,7 +11,7 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.synchcall.command.CommandHelper; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - +import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,7 +60,7 @@ * to manipulate the ACLs, StoRM was not configured for the underlying FileSystem, or there was an * unexpected error; in the first case the status changes to SRM_INVALID_PATH, while in all other * ones it changes to SRM_FAILURE; corresponding messagges get logged. - * + * * @author EGRID - ICTP Trieste * @date May 3rd, 2005 * @version 4.0 @@ -71,9 +69,7 @@ public class PtGPersistentChunk extends PtG implements PersistentRequestChunk { private static Logger log = LoggerFactory.getLogger(PtGPersistentChunk.class); - /** - * RequestSummaryData containing all the statistics for the originating srmPrepareToGetRequest - */ + /** RequestSummaryData containing all the statistics for the originating srmPrepareToGetRequest */ private final RequestSummaryData rsd; /** @@ -86,16 +82,18 @@ public class PtGPersistentChunk extends PtG implements PersistentRequestChunk { * Constructor requiring the GridUser, the RequestSummaryData and the PtGChunkData about this * chunk. If the supplied attributes are null, an InvalidPtGChunkAttributesException is thrown. */ - public PtGPersistentChunk(RequestSummaryData summaryData, PtGPersistentChunkData chunkData, - GlobalStatusManager gsm) + public PtGPersistentChunk( + RequestSummaryData summaryData, PtGPersistentChunkData chunkData, GlobalStatusManager gsm) throws InvalidRequestAttributesException, InvalidPersistentRequestAttributesException { super(chunkData); if (summaryData == null || gsm == null) { throw new IllegalArgumentException( - "Unable to instantiate the object, illegal arguments: summaryData=" + summaryData - + " chunkData=" + chunkData); + "Unable to instantiate the object, illegal arguments: summaryData=" + + summaryData + + " chunkData=" + + chunkData); } this.rsd = summaryData; this.gsm = gsm; @@ -110,13 +108,17 @@ public Boolean completeRequest(TapeRecallStatus recallStatus) { if (success) { gsm.successfulChunk((PtGPersistentChunkData) requestData); - log.info("Completed PtG request ({}), file successfully recalled from " + "tape: {}", - rsd.requestToken(), requestData.getSURL().toString()); + log.info( + "Completed PtG request ({}), file successfully recalled from " + "tape: {}", + rsd.requestToken(), + requestData.getSURL().toString()); } else { gsm.failedChunk((PtGPersistentChunkData) requestData); - log.error("BoL request ({}), file not recalled from tape: {}", rsd.requestToken(), + log.error( + "BoL request ({}), file not recalled from tape: {}", + rsd.requestToken(), requestData.getSURL().toString()); } return success; @@ -178,24 +180,35 @@ protected void printRequestOutcome(PtGData inputData) { if (inputData != null) { if (inputData.getSURL() != null) { if (rsd.requestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - rsd.requestToken(), Arrays.asList(inputData.getSURL().toString())); + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, + rsd.requestToken(), + Arrays.asList(inputData.getSURL().toString())); } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, Arrays.asList(inputData.getSURL().toString())); } } else { if (rsd.requestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - rsd.requestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, inputData.getStatus(), inputData, rsd.requestToken()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData); } } } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "No input available")); } } diff --git a/src/main/java/it/grid/storm/asynch/PtP.java b/src/main/java/it/grid/storm/asynch/PtP.java index 111505c6..8f128a8d 100644 --- a/src/main/java/it/grid/storm/asynch/PtP.java +++ b/src/main/java/it/grid/storm/asynch/PtP.java @@ -1,17 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import java.io.IOException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.acl.AclManagerFS; import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; @@ -59,6 +50,12 @@ import it.grid.storm.synchcall.command.CommandHelper; import it.grid.storm.synchcall.data.DataHelper; import it.grid.storm.synchcall.data.IdentityInputData; +import java.io.IOException; +import java.util.Arrays; +import java.util.Calendar; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class that represents a chunk of an srmPrepareToPut request: it handles a single file of a @@ -100,24 +97,16 @@ public class PtP implements Delegable, Chooser, Request { private static Logger log = LoggerFactory.getLogger(PtP.class); - /** - * PtPChunkData that holds the specific info for this chunk - */ + /** PtPChunkData that holds the specific info for this chunk */ protected final PtPData requestData; - /** - * Time that wil be used in all jit and volatile tracking. - */ + /** Time that wil be used in all jit and volatile tracking. */ protected final Calendar start; - /** - * boolean that indicates the state of the shunk is failure - */ + /** boolean that indicates the state of the shunk is failure */ protected boolean failure = false; - /** - * boolean that indicates a failed chunk because of an expired space token - */ + /** boolean that indicates a failed chunk because of an expired space token */ protected boolean spacefailure = false; /** @@ -135,9 +124,7 @@ public PtP(PtPData chunkData) throws InvalidRequestAttributesException { start = Calendar.getInstance(); } - /** - * Method that handles a chunk. It is invoked by the scheduler to carry out the task. - */ + /** Method that handles a chunk. It is invoked by the scheduler to carry out the task. */ @Override public void doIt() { @@ -176,8 +163,9 @@ public void doIt() { try { if (requestData instanceof IdentityInputData) { - fileStoRI = NamespaceDirector.getNamespace() - .resolveStoRIbySURL(surl, ((IdentityInputData) requestData).getUser()); + fileStoRI = + NamespaceDirector.getNamespace() + .resolveStoRIbySURL(surl, ((IdentityInputData) requestData).getUser()); } else { fileStoRI = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); } @@ -185,20 +173,32 @@ public void doIt() { } catch (UnapprochableSurlException e) { log.info( "Unable to build a stori for surl {} for user {}. " + "UnapprochableSurlExceptions: {}", - surl, user, e.getMessage()); + surl, + user, + e.getMessage()); requestData.changeStatusSRM_AUTHORIZATION_FAILURE(e.getMessage()); } catch (IllegalArgumentException e) { log.error( "Unable to build a stori for surl {} for user {}. " + "IllegalArgumentException: {}", - surl, user, e.getMessage(), e); + surl, + user, + e.getMessage(), + e); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (NamespaceException e) { - log.error("Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", - surl, user, e.getMessage(), e); + log.error( + "Unable to build a stori for surl {} for user {}. " + "NamespaceException: {}", + surl, + user, + e.getMessage(), + e); requestData.changeStatusSRM_INTERNAL_ERROR(e.getMessage()); } catch (InvalidSURLException e) { - log.info("Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", - surl, user, e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. " + "InvalidSURLException: {}", + surl, + user, + e.getMessage()); requestData.changeStatusSRM_INVALID_PATH(e.getMessage()); } finally { if (fileStoRI == null) { @@ -215,9 +215,13 @@ public void doIt() { exists = fileStoRI.getLocalFile().exists(); } catch (SecurityException e) { - log.error("ATTENTION in PtPChunk! PtPChunk received a SecurityException " - + "from Java SecurityManager: StoRM cannot check for the existence of " - + "file: {}; exception: {}", fileStoRI.getLocalFile().toString(), e.getMessage(), e); + log.error( + "ATTENTION in PtPChunk! PtPChunk received a SecurityException " + + "from Java SecurityManager: StoRM cannot check for the existence of " + + "file: {}; exception: {}", + fileStoRI.getLocalFile().toString(), + e.getMessage(), + e); failure = true; requestData.changeStatusSRM_FAILURE("StoRM is not allowed to work on " + "requested file!"); printRequestOutcome(requestData); @@ -238,8 +242,8 @@ public void doIt() { } else if (mode.equals(TOverwriteMode.NEVER)) { - requestData - .changeStatusSRM_DUPLICATION_ERROR("Cannot srmPut file " + "because it already exists!"); + requestData.changeStatusSRM_DUPLICATION_ERROR( + "Cannot srmPut file " + "because it already exists!"); failure = true; } else { @@ -249,7 +253,6 @@ public void doIt() { mode); requestData.changeStatusSRM_FAILURE("Unexpected overwrite option! " + "Processing failed!"); failure = true; - } } printRequestOutcome(requestData); @@ -263,8 +266,10 @@ private void manageNotExistentFile(StoRI fileStoRI) { AuthzDecision decision; if (requestData instanceof IdentityInputData) { - decision = AuthzDirector.getPathAuthz() - .authorize(((IdentityInputData) requestData).getUser(), SRMFileRequest.PTP, fileStoRI); + decision = + AuthzDirector.getPathAuthz() + .authorize( + ((IdentityInputData) requestData).getUser(), SRMFileRequest.PTP, fileStoRI); } else { decision = AuthzDirector.getPathAuthz().authorizeAnonymous(SRMFileRequest.PTP, fileStoRI.getStFN()); @@ -299,8 +304,9 @@ private void manageOverwriteExistingFile(StoRI fileStoRI) { AuthzDecision decision; if (requestData instanceof IdentityInputData) { - decision = AuthzDirector.getPathAuthz() - .authorize(((IdentityInputData) requestData).getUser(), SRMFileRequest.RM, fileStoRI); + decision = + AuthzDirector.getPathAuthz() + .authorize(((IdentityInputData) requestData).getUser(), SRMFileRequest.RM, fileStoRI); } else { decision = AuthzDirector.getPathAuthz().authorizeAnonymous(SRMFileRequest.RM, fileStoRI.getStFN()); @@ -312,7 +318,9 @@ private void manageOverwriteExistingFile(StoRI fileStoRI) { requestData.changeStatusSRM_AUTHORIZATION_FAILURE( "Write access to " + requestData.getSURL() + " denied!"); failure = true; - log.debug("Write access to {} for user {} denied!", requestData.getSURL(), + log.debug( + "Write access to {} for user {} denied!", + requestData.getSURL(), DataHelper.getRequestor(requestData)); } else { manageAnomaly(decision); @@ -320,9 +328,7 @@ private void manageOverwriteExistingFile(StoRI fileStoRI) { } } - /** - * Private method that handles the case of Permit on Create and Write rights! - */ + /** Private method that handles the case of Permit on Create and Write rights! */ private void managePermit(StoRI fileStoRI) { TSpaceToken token = new SpaceHelper().getTokenFromStoRI(PtP.log, fileStoRI); @@ -336,11 +342,15 @@ private void managePermit(StoRI fileStoRI) { isSpaceAuthorized = spaceAuth.authorizeAnonymous(SRMSpaceRequest.PTP); } if (!isSpaceAuthorized) { - requestData.changeStatusSRM_AUTHORIZATION_FAILURE("Create/Write access for " - + requestData.getSURL() + " in Storage Area: " + token + " denied!"); + requestData.changeStatusSRM_AUTHORIZATION_FAILURE( + "Create/Write access for " + + requestData.getSURL() + + " in Storage Area: " + + token + + " denied!"); failure = true; - log.debug("Create/Write access for {} in Storage Area: {} denied!", requestData.getSURL(), - token); + log.debug( + "Create/Write access for {} in Storage Area: {} denied!", requestData.getSURL(), token); return; } TTURL auxTURL; @@ -352,7 +362,8 @@ private void managePermit(StoRI fileStoRI) { log.error( "ERROR in PtPChunk! Null TURLPrefix in PtPChunkData caused " + "StoRI to be unable to establish TTURL! IllegalArgumentException: {}", - e.getMessage(), e); + e.getMessage(), + e); return; } catch (InvalidGetTURLProtocolException e) { requestData.changeStatusSRM_NOT_SUPPORTED( @@ -364,8 +375,11 @@ private void managePermit(StoRI fileStoRI) { requestData.changeStatusSRM_FAILURE( "Unable to build the TURL for the " + "provided transfer protocol"); failure = true; - log.error("ERROR in PtPChunk! There was a failure building the TURL. " - + "TURLBuildingException: {} ", e.getMessage(), e); + log.error( + "ERROR in PtPChunk! There was a failure building the TURL. " + + "TURLBuildingException: {} ", + e.getMessage(), + e); return; } boolean canTraverse; @@ -378,7 +392,9 @@ private void managePermit(StoRI fileStoRI) { failure = true; log.error( "ERROR in PtGChunk! Unable to find LocalUser for {}! " + "CannotMapUserException: {}", - DataHelper.getRequestor(requestData), e.getMessage(), e); + DataHelper.getRequestor(requestData), + e.getMessage(), + e); return; } if (canTraverse) { @@ -394,7 +410,9 @@ private void managePermit(StoRI fileStoRI) { failure = true; log.error( "ERROR in PtGChunk! Unable to find LocalUser for {}! " + "CannotMapUserException: {}", - DataHelper.getRequestor(requestData), e.getMessage(), e); + DataHelper.getRequestor(requestData), + e.getMessage(), + e); return; } if (!canWrite) { @@ -409,8 +427,8 @@ private void managePermit(StoRI fileStoRI) { failure = false; if (requestData.fileStorageType().equals(TFileStorageType.VOLATILE)) { VolatileAndJiTCatalog.getInstance() - .trackVolatile(fileStoRI.getPFN(), Calendar.getInstance(), - requestData.fileLifetime()); + .trackVolatile( + fileStoRI.getPFN(), Calendar.getInstance(), requestData.fileLifetime()); } } } else { @@ -445,7 +463,6 @@ private boolean managePermitTraverseStep(StoRI fileStoRI) throws CannotMapUserEx /** * @param fileStoRI * @return - * */ private boolean preparePath(StoRI fileStoRI) { @@ -465,8 +482,8 @@ private boolean prepareDirectory(LocalFile dir) { if (dir.exists()) { if (!dir.isDirectory()) { - requestData - .changeStatusSRM_INVALID_PATH(dir.getAbsolutePath() + " exists but is not a directory!"); + requestData.changeStatusSRM_INVALID_PATH( + dir.getAbsolutePath() + " exists but is not a directory!"); failure = true; return false; } @@ -474,10 +491,11 @@ private boolean prepareDirectory(LocalFile dir) { } if (!automaticDirectoryCreation) { - log.debug("srmPtP: {} doesn't exist and automatic directory creation is " + "disabled", + log.debug( + "srmPtP: {} doesn't exist and automatic directory creation is " + "disabled", dir.getAbsolutePath()); - requestData - .changeStatusSRM_INVALID_PATH("Parent " + dir.getAbsolutePath() + " doesn't exist!"); + requestData.changeStatusSRM_INVALID_PATH( + "Parent " + dir.getAbsolutePath() + " doesn't exist!"); failure = true; return false; } @@ -513,24 +531,26 @@ private void updateUsedSpace(LocalFile dir) { private boolean setParentAcl(StoRI fileStoRI, LocalUser localUser) { - log.debug("PtPChunk: setting parent traverse ACL for {} to user {}", - fileStoRI.getAbsolutePath(), localUser); + log.debug( + "PtPChunk: setting parent traverse ACL for {} to user {}", + fileStoRI.getAbsolutePath(), + localUser); for (StoRI parentStoRI : fileStoRI.getParents()) { LocalFile parentFile = parentStoRI.getLocalFile(); log.debug("PtPChunk TraverseStep - processing parent {}", parentFile.toString()); try { - if (!setAcl(parentStoRI, localUser, FilesystemPermission.Traverse, - fileStoRI.hasJustInTimeACLs())) { + if (!setAcl( + parentStoRI, localUser, FilesystemPermission.Traverse, fileStoRI.hasJustInTimeACLs())) { requestData.changeStatusSRM_FAILURE( "Local filesystem mask does " + "not allow setting up correct ACLs for PtG!"); failure = true; return false; } } catch (Exception e) { - requestData - .changeStatusSRM_INTERNAL_ERROR("Local filesystem has" + " problems manipulating ACE!"); + requestData.changeStatusSRM_INTERNAL_ERROR( + "Local filesystem has" + " problems manipulating ACE!"); failure = true; return false; } @@ -541,8 +561,8 @@ private boolean setParentAcl(StoRI fileStoRI, LocalUser localUser) { private boolean managePermitSetFileStep(StoRI fileStoRI) throws CannotMapUserException { if (requestData instanceof IdentityInputData) { - if (managePermitSetFileStep(fileStoRI, - ((IdentityInputData) requestData).getUser().getLocalUser())) { + if (managePermitSetFileStep( + fileStoRI, ((IdentityInputData) requestData).getUser().getLocalUser())) { setDefaultAcl(fileStoRI); setTapeManagementAcl(fileStoRI); return true; @@ -563,28 +583,32 @@ private boolean managePermitSetFileStep(StoRI fileStoRI, LocalUser localUser) { // BEWARE THAT READ PERMISSION IS NEEDED BECAUSE GRID_FTP SERVER _ALSO_ // REQUIRES READ RIGHTS ELSE IT WON T BE ABLE TO WRITE THE FILE!!! - log.debug("PtPChunk: setting RW ACL for {} for user {}", fileStoRI.getAbsolutePath(), - localUser); + log.debug( + "PtPChunk: setting RW ACL for {} for user {}", fileStoRI.getAbsolutePath(), localUser); try { - if (!setAcl(fileStoRI, localUser, FilesystemPermission.ReadWrite, - fileStoRI.hasJustInTimeACLs())) { + if (!setAcl( + fileStoRI, localUser, FilesystemPermission.ReadWrite, fileStoRI.hasJustInTimeACLs())) { requestData.changeStatusSRM_FAILURE( "Local filesystem mask does " + "not allow setting up correct ACLs for PtP!"); failure = true; return false; } } catch (Exception e) { - requestData - .changeStatusSRM_INTERNAL_ERROR("Local filesystem has" + " problems manipulating ACE!"); + requestData.changeStatusSRM_INTERNAL_ERROR( + "Local filesystem has" + " problems manipulating ACE!"); failure = true; return false; } return true; } - private boolean setAcl(StoRI parentStoRI, LocalUser localUser, FilesystemPermission permission, - boolean hasJustInTimeACLs) throws Exception { + private boolean setAcl( + StoRI parentStoRI, + LocalUser localUser, + FilesystemPermission permission, + boolean hasJustInTimeACLs) + throws Exception { if (hasJustInTimeACLs) { return setJiTAcl(parentStoRI, localUser, permission); @@ -595,15 +619,21 @@ private boolean setAcl(StoRI parentStoRI, LocalUser localUser, FilesystemPermiss private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermission permission) throws Exception { - log.debug("SrmMkdir: Adding JiT ACL {} to user {} for directory: '{}'", permission, localUser, + log.debug( + "SrmMkdir: Adding JiT ACL {} to user {} for directory: '{}'", + permission, + localUser, fileStori.getAbsolutePath()); try { AclManagerFS.getInstance() - .grantUserPermission(fileStori.getLocalFile(), localUser, permission); + .grantUserPermission(fileStori.getLocalFile(), localUser, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user traverse permission on parent file. " - + "IllegalArgumentException: {}", e.getMessage(), e); + log.error( + "Unable to grant user traverse permission on parent file. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); return false; } @@ -612,20 +642,23 @@ private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis if (fp != null) { if (fp.allows(permission)) { VolatileAndJiTCatalog.getInstance() - .trackJiT(fileStori.getPFN(), localUser, permission, start, requestData.pinLifetime()); + .trackJiT(fileStori.getPFN(), localUser, permission, start, requestData.pinLifetime()); response = true; } else { log.error( "ATTENTION in PtPChunk! The local filesystem has" + " a mask that does not allow {} User-ACL to be set up on!", - permission, fileStori.getLocalFile().toString()); + permission, + fileStori.getLocalFile().toString()); response = false; } } else { log.error( "ERROR in PtPChunk! A {} User-ACL was set on {} for user {} but " + "when subsequently verifying its effectivity, a null ACE was found!", - permission, fileStori.getAbsolutePath(), localUser.toString()); + permission, + fileStori.getAbsolutePath(), + localUser.toString()); throw new Exception("Unable to verify user ACL"); } return response; @@ -634,15 +667,21 @@ private boolean setJiTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis private boolean setAoTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermission permission) throws Exception { - log.debug("SrmMkdir: Adding AoT ACL {} to user {} for directory: '{}'", permission, localUser, + log.debug( + "SrmMkdir: Adding AoT ACL {} to user {} for directory: '{}'", + permission, + localUser, fileStori.getAbsolutePath()); try { AclManagerFS.getInstance() - .grantGroupPermission(fileStori.getLocalFile(), localUser, permission); + .grantGroupPermission(fileStori.getLocalFile(), localUser, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user traverse permission on parent file. " - + "IllegalArgumentException: {}", e.getMessage(), e); + log.error( + "Unable to grant user traverse permission on parent file. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); return false; } @@ -659,9 +698,12 @@ private boolean setAoTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis response = false; } } else { - log.error("ERROR in PtPChunk! A Traverse Group-ACL was set on {} for " - + "user {} but when subsequently verifying its effectivity, a null ACE " + "was found!", - fileStori.getAbsolutePath(), localUser.toString()); + log.error( + "ERROR in PtPChunk! A Traverse Group-ACL was set on {} for " + + "user {} but when subsequently verifying its effectivity, a null ACE " + + "was found!", + fileStori.getAbsolutePath(), + localUser.toString()); response = false; } return response; @@ -669,7 +711,8 @@ private boolean setAoTAcl(StoRI fileStori, LocalUser localUser, FilesystemPermis private void setHttpsServiceParentAcl(StoRI fileStoRI) { - log.debug("SrmMkdir: Adding parent https ACL for directory: '{}' parents", + log.debug( + "SrmMkdir: Adding parent https ACL for directory: '{}' parents", fileStoRI.getAbsolutePath()); for (StoRI parentStoRI : fileStoRI.getParents()) { setHttpsServiceAcl(parentStoRI.getLocalFile(), FilesystemPermission.Traverse); @@ -683,16 +726,18 @@ private void setHttpsServiceAcl(LocalFile file, FilesystemPermission permission) try { AclManagerFS.getInstance().grantHttpsServiceGroupPermission(file, permission); } catch (IllegalArgumentException e) { - log.error("Unable to grant user permission on the created folder. " - + "IllegalArgumentException: {}", e.getMessage(), e); - requestData.getStatus() - .extendExplaination("Unable to grant group permission on the created folder"); + log.error( + "Unable to grant user permission on the created folder. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + requestData + .getStatus() + .extendExplaination("Unable to grant group permission on the created folder"); } } - /** - * Private method used to manage ReserveSpace. Returns false if something went wrong! - */ + /** Private method used to manage ReserveSpace. Returns false if something went wrong! */ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { log.debug("PtPChunk: entered ReserveSpaceStep for {}", fileStoRI.getAbsolutePath()); @@ -718,9 +763,11 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { */ TSpaceToken SASpaceToken = sp.getTokenFromStoRI(PtP.log, fileStoRI); if (SASpaceToken == null || SASpaceToken.isEmpty()) { - log.error("PtPChunk - ReserveSpaceStep: Unable to get a valid " - + "TSpaceToken for stori {} . Unable to verify storage area space " - + "initialization", fileStoRI); + log.error( + "PtPChunk - ReserveSpaceStep: Unable to get a valid " + + "TSpaceToken for stori {} . Unable to verify storage area space " + + "initialization", + fileStoRI); requestData.changeStatusSRM_FAILURE("No valid space token for the Storage Area"); failure = true; return false; @@ -728,9 +775,10 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { if (!sp.isSAInitialized(PtP.log, fileStoRI) && Configuration.getInstance().getDiskUsageServiceEnabled()) { /* Trust we got space, let the request pass */ - log.debug("PtPChunk: ReserveSpaceStep: the storage area space " - + "initialization is in progress, optimistic approach, considering " - + "we got enough space"); + log.debug( + "PtPChunk: ReserveSpaceStep: the storage area space " + + "initialization is in progress, optimistic approach, considering " + + "we got enough space"); } else { log.debug("PtPChunk - ReserveSpaceStep: no free space on Storage Area!"); requestData.changeStatusSRM_FAILURE("No free space on Storage Area"); @@ -763,25 +811,25 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { } if (spaceToken.isEmpty() && (!size.isEmpty())) { - log.debug("PtPChunk: no SpaceToken available but " - + "there is a FileSize specified; implicit space reservation " + "taking place..."); + log.debug( + "PtPChunk: no SpaceToken available but " + + "there is a FileSize specified; implicit space reservation " + + "taking place..."); fileStoRI.allotSpaceForFile(size); - } if (!spaceToken.isEmpty()) { if (!isExistingSpaceToken(spaceToken)) { - requestData - .changeStatusSRM_INVALID_REQUEST("The provided Space Token " + "does not exists"); + requestData.changeStatusSRM_INVALID_REQUEST( + "The provided Space Token " + "does not exists"); log.info( "PtPChunk execution failed. The space token {} provided by " + "user does not exists", spaceToken); failure = true; return false; - } if (size.isEmpty()) { @@ -806,9 +854,12 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { // the PolicyCollector! requestData.changeStatusSRM_FAILURE("Space Management step in " + "srmPrepareToPut failed!"); failure = true; - log.error("ERROR in PtPChunk! During space reservation step in PtP, " - + "could not create file: {}; Java s SecurityManager does not allow " - + "writing the file! ", localFile.toString(), e); + log.error( + "ERROR in PtPChunk! During space reservation step in PtP, " + + "could not create file: {}; Java s SecurityManager does not allow " + + "writing the file! ", + localFile.toString(), + e); return false; } catch (IOException e) { // file.createNewFile could not create file because of a local IO @@ -818,23 +869,31 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { log.error( "ERROR in PtPChunk! During space reservation step in PtP, " + "an error occured while trying to create the file: {}; error: {}", - localFile.toString(), e.getMessage(), e); + localFile.toString(), + e.getMessage(), + e); return false; } catch (it.grid.storm.filesystem.InvalidPermissionOnFileException e) { // I haven t got the right to create a file as StoRM user! // This is thrown when executing createNewFile method! requestData.changeStatusSRM_FAILURE("Space Management step in " + "srmPrepareToPut failed!"); failure = true; - log.error("ERROR in PtPChunk! During space reservation step in PtP, an " - + "attempt to create file {} failed because StoRM lacks the privileges " - + "to do so! Exception follows: {}", localFile.toString(), e.getMessage(), e); + log.error( + "ERROR in PtPChunk! During space reservation step in PtP, an " + + "attempt to create file {} failed because StoRM lacks the privileges " + + "to do so! Exception follows: {}", + localFile.toString(), + e.getMessage(), + e); return false; } catch (ReservationException e) { // Something went wrong while using space reservation component! requestData.changeStatusSRM_FAILURE("Space Management step in " + "srmPrepareToPut failed!"); failure = true; - log.error("ERROR in PtPChunk! Space component failed! Exception " + "follows: {}", - e.getMessage(), e); + log.error( + "ERROR in PtPChunk! Space component failed! Exception " + "follows: {}", + e.getMessage(), + e); return false; } catch (ExpiredSpaceTokenException e) { // The supplied space token is expired @@ -852,7 +911,9 @@ private boolean managePermitReserveSpaceStep(StoRI fileStoRI) { log.error( "ERROR in PtPChunk - space Step! Unexpected error in reserve " + "space step of PtP for file {}! Exception follows: {}", - localFile.toString(), e.getMessage(), e); + localFile.toString(), + e.getMessage(), + e); return false; } } @@ -863,14 +924,20 @@ private boolean isExistingSpaceToken(TSpaceToken spaceToken) throws Exception { try { spaceData = new ReservedSpaceCatalog().getStorageSpace(spaceToken); } catch (TransferObjectDecodingException e) { - log.error("Unable to build StorageSpaceData from StorageSpaceTO." - + " TransferObjectDecodingException: {}", e.getMessage()); - throw new Exception("Error retrieving Storage Area information from Token." - + " TransferObjectDecodingException: " + e.getMessage()); + log.error( + "Unable to build StorageSpaceData from StorageSpaceTO." + + " TransferObjectDecodingException: {}", + e.getMessage()); + throw new Exception( + "Error retrieving Storage Area information from Token." + + " TransferObjectDecodingException: " + + e.getMessage()); } catch (DataAccessException e) { log.error("Unable to build get StorageSpaceTO. DataAccessException: {}", e.getMessage()); - throw new Exception("Error retrieving Storage Area information from Token." - + " DataAccessException: " + e.getMessage()); + throw new Exception( + "Error retrieving Storage Area information from Token." + + " DataAccessException: " + + e.getMessage()); } return spaceData != null; } @@ -881,7 +948,9 @@ private void setDefaultAcl(StoRI fileStoRI) { if (dacl != null && !dacl.isEmpty()) { for (ACLEntry ace : dacl.getACL()) { if (ace.isValid()) { - log.debug("Adding DefaultACL for the gid: {} with permission: {}", ace.getGroupID(), + log.debug( + "Adding DefaultACL for the gid: {} with permission: {}", + ace.getGroupID(), ace.getFilePermissionString()); LocalUser u = new LocalUser(ace.getGroupID(), ace.getGroupID()); if (ace.getFilesystemPermission() == null) { @@ -889,11 +958,12 @@ private void setDefaultAcl(StoRI fileStoRI) { } else { try { AclManagerFS.getInstance() - .grantGroupPermission(fileStoRI.getLocalFile(), u, ace.getFilesystemPermission()); + .grantGroupPermission(fileStoRI.getLocalFile(), u, ace.getFilesystemPermission()); } catch (IllegalArgumentException e) { log.error( "Unable to grant group permission on the file. " + "IllegalArgumentException: {}", - e.getMessage(), e); + e.getMessage(), + e); } } } @@ -910,7 +980,6 @@ private void setTapeManagementAcl(StoRI fileStoRI) { // set extended attribute that indicates the file pinned StormEA.setPinned(fileStoRI.getLocalFile().getAbsolutePath(), expDate); - } } @@ -923,7 +992,9 @@ private void manageDeny() { requestData.changeStatusSRM_AUTHORIZATION_FAILURE( "Create/Write access to " + requestData.getSURL() + " denied!"); failure = true; - log.debug("Create/Write access to {}, for user {} denied!", requestData.getSURL(), + log.debug( + "Create/Write access to {}, for user {} denied!", + requestData.getSURL(), DataHelper.getRequestor(requestData)); } @@ -951,8 +1022,8 @@ private void manageAnomaly(AuthzDecision decision) { break; case INDETERMINATE: // PolicyCollector error - requestData - .changeStatusSRM_FAILURE("PolicyCollector error! Access rights cannot be established!"); + requestData.changeStatusSRM_FAILURE( + "PolicyCollector error! Access rights cannot be established!"); failure = true; log.error("PtPChunk: PolicyCollector encountered internal problems!"); log.error("Requested SURL: {}", requestData.getSURL()); @@ -1005,9 +1076,7 @@ public boolean isResultSuccess() { return result; } - /** - * @return the requestData - */ + /** @return the requestData */ public PtPData getRequestData() { return requestData; @@ -1024,24 +1093,35 @@ protected void printRequestOutcome(PtPData inputData) { if (inputData != null) { if (inputData.getSURL() != null) { if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - inputData.getRequestToken(), Arrays.asList(inputData.getSURL().toString())); + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, + inputData.getRequestToken(), + Arrays.asList(inputData.getSURL().toString())); } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, Arrays.asList(inputData.getSURL().toString())); } } else { if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, inputData.getStatus(), inputData, inputData.getRequestToken()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData); } } } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "No input available")); } } diff --git a/src/main/java/it/grid/storm/asynch/PtPBuilder.java b/src/main/java/it/grid/storm/asynch/PtPBuilder.java index f61c57f5..93a7fe42 100644 --- a/src/main/java/it/grid/storm/asynch/PtPBuilder.java +++ b/src/main/java/it/grid/storm/asynch/PtPBuilder.java @@ -1,11 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.catalogs.AnonymousPtPData; import it.grid.storm.catalogs.IdentityPtPData; import it.grid.storm.catalogs.InvalidFileTransferDataAttributesException; @@ -25,11 +22,10 @@ import it.grid.storm.srm.types.TTURL; import it.grid.storm.synchcall.data.IdentityInputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PtPBuilder { private static Logger log = LoggerFactory.getLogger(PtPBuilder.class); @@ -41,8 +37,9 @@ public static PtP build(PrepareToPutInputData inputData) throws BuilderException TSURL toSURL = inputData.getSurl(); TLifeTimeInSeconds pinLifetime = inputData.getDesiredPinLifetime(); TLifeTimeInSeconds fileLifetime = inputData.getDesiredFileLifetime(); - TFileStorageType fileStorageType = TFileStorageType - .getTFileStorageType(Configuration.getInstance().getDefaultFileStorageType()); + TFileStorageType fileStorageType = + TFileStorageType.getTFileStorageType( + Configuration.getInstance().getDefaultFileStorageType()); TSpaceToken spaceToken = inputData.getTargetSpaceToken(); TSizeInBytes expectedFileSize = inputData.getFileSize(); TURLPrefix transferProtocols = inputData.getTransferProtocols(); @@ -53,16 +50,37 @@ public static PtP build(PrepareToPutInputData inputData) throws BuilderException PtPData data; try { if (inputData instanceof IdentityInputData) { - data = new IdentityPtPData(((IdentityInputData) inputData).getUser(), toSURL, pinLifetime, - fileLifetime, fileStorageType, spaceToken, expectedFileSize, transferProtocols, - overwriteOption, status, transferURL); + data = + new IdentityPtPData( + ((IdentityInputData) inputData).getUser(), + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); } else { - data = new AnonymousPtPData(toSURL, pinLifetime, fileLifetime, fileStorageType, spaceToken, - expectedFileSize, transferProtocols, overwriteOption, status, transferURL); + data = + new AnonymousPtPData( + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); } data.store(); } catch (InvalidPtPDataAttributesException e) { - log.error("Unable to build PtPChunkData. " + "InvalidPtPChunkDataAttributesException: {}", + log.error( + "Unable to build PtPChunkData. " + "InvalidPtPChunkDataAttributesException: {}", e.getMessage()); throw new BuilderException("Error building PtP PtPChunkData. Building failed"); } catch (InvalidFileTransferDataAttributesException e) { @@ -71,7 +89,8 @@ public static PtP build(PrepareToPutInputData inputData) throws BuilderException e.getMessage()); throw new BuilderException("Error building PtP PtPChunkData. Building failed"); } catch (InvalidSurlRequestDataAttributesException e) { - log.error("Unable to build PtPChunkData. " + "InvalidSurlRequestDataAttributesException: ", + log.error( + "Unable to build PtPChunkData. " + "InvalidSurlRequestDataAttributesException: ", e.getMessage()); throw new BuilderException("Error building PtP PtPChunkData. Building failed"); } diff --git a/src/main/java/it/grid/storm/asynch/PtPFeeder.java b/src/main/java/it/grid/storm/asynch/PtPFeeder.java index 40f1d0d6..abbbdf64 100644 --- a/src/main/java/it/grid/storm/asynch/PtPFeeder.java +++ b/src/main/java/it/grid/storm/asynch/PtPFeeder.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -12,26 +11,24 @@ import it.grid.storm.scheduler.Delegable; import it.grid.storm.scheduler.SchedulerException; import it.grid.storm.srm.types.TSURL; - import java.util.Collection; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class represents a PrepareToPut Feeder: the Feeder that will handle the srmPrepareToPut * statements. It chops a multifile request into its constituent parts. - * - * If the request contains nothing to process, an error message gets logged, the number of queued + * + *

If the request contains nothing to process, an error message gets logged, the number of queued * requests is decreased, and the number of finished requests is increased. - * - * Each single part of the request is handled as follows: the number of queued requests is + * + *

Each single part of the request is handled as follows: the number of queued requests is * decreased, the number of progressing requests is increased, the status of that chunk is changed * to SRM_REQUEST_INPROGRESS; the chunk is given to the scheduler for handling. In case the * scheduler cannot accept the chunk for any reason, a messagge with the requestToken and the chunk * s data is logged, status of the chunk passes to SRM_ABORTED, and at the end the counters are such * that the queued-requests is decreased while the finished-requests is increased. - * + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 2.0 @@ -82,10 +79,11 @@ public void doIt() { Collection chunks = PtPChunkCatalog.getInstance().lookup(rsd.requestToken()); if (chunks.isEmpty()) { - log.warn("ATTENTION in PtPFeeder! This SRM put request contained nothing " + "to process! {}", + log.warn( + "ATTENTION in PtPFeeder! This SRM put request contained nothing " + "to process! {}", rsd.requestToken()); RequestSummaryCatalog.getInstance() - .failRequest(rsd, "This SRM put request contained nothing to process!"); + .failRequest(rsd, "This SRM put request contained nothing to process!"); } else { manageChunks(chunks); log.debug("PtPFeeder: finished pre-processing {}", rsd.requestToken()); @@ -94,7 +92,7 @@ public void doIt() { /** * Private method that handles the Collection of chunks associated with the srm command! - * + * * @param chunksData */ private void manageChunks(Collection chunksData) { @@ -131,7 +129,7 @@ private void manageChunks(Collection chunksData) { /** * Private method that handles the chunk! - * + * * @param auxChunkData */ private void manage(PtPPersistentChunkData auxChunkData) { @@ -139,15 +137,15 @@ private void manage(PtPPersistentChunkData auxChunkData) { log.debug("PtPFeeder - scheduling... "); try { /* change status of this chunk to being processed! */ - auxChunkData - .changeStatusSRM_REQUEST_INPROGRESS("srmPrepareToPut " + "chunk is being processed!"); + auxChunkData.changeStatusSRM_REQUEST_INPROGRESS( + "srmPrepareToPut " + "chunk is being processed!"); PtPChunkCatalog.getInstance().update(auxChunkData); /* hand it to scheduler! */ SchedulerFacade.getInstance() - .chunkScheduler() - .schedule(new PtPPersistentChunk(rsd, auxChunkData, gsm)); + .chunkScheduler() + .schedule(new PtPPersistentChunk(rsd, auxChunkData, gsm)); log.debug("PtPFeeder - chunk scheduled."); } catch (IllegalArgumentException e) { log.error("Unable to schedule the chunk. IllegalArgumentException: {}", e.getMessage(), e); @@ -158,8 +156,8 @@ private void manage(PtPPersistentChunkData auxChunkData) { PtPChunkCatalog.getInstance().update(auxChunkData); gsm.failedChunk(auxChunkData); } catch (InvalidRequestAttributesException e) { - log.error("UNEXPECTED ERROR in PtPFeeder! Chunk could not be " + "created!\n{}", - e.getMessage(), e); + log.error( + "UNEXPECTED ERROR in PtPFeeder! Chunk could not be " + "created!\n{}", e.getMessage(), e); auxChunkData.changeStatusSRM_FAILURE( "StoRM internal error does not " + "allow this chunk to be processed!"); @@ -168,8 +166,10 @@ private void manage(PtPPersistentChunkData auxChunkData) { gsm.failedChunk(auxChunkData); } catch (SchedulerException e) { /* Internal error of scheduler! */ - log.error("UNEXPECTED ERROR in ChunkScheduler! Chunk could not be " + "scheduled!\n{}", - e.getMessage(), e); + log.error( + "UNEXPECTED ERROR in ChunkScheduler! Chunk could not be " + "scheduled!\n{}", + e.getMessage(), + e); auxChunkData.changeStatusSRM_FAILURE( "StoRM internal scheduler " + "error prevented this chunk from being processed!"); @@ -179,9 +179,7 @@ private void manage(PtPPersistentChunkData auxChunkData) { } } - /** - * Method used by chunk scheduler for internal logging; it returns the request token! - */ + /** Method used by chunk scheduler for internal logging; it returns the request token! */ public String getName() { return "PtPFeeder of request: " + rsd.requestToken(); diff --git a/src/main/java/it/grid/storm/asynch/PtPPersistentChunk.java b/src/main/java/it/grid/storm/asynch/PtPPersistentChunk.java index 21cfcf5e..c202002a 100644 --- a/src/main/java/it/grid/storm/asynch/PtPPersistentChunk.java +++ b/src/main/java/it/grid/storm/asynch/PtPPersistentChunk.java @@ -1,10 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import java.util.Arrays; import it.grid.storm.catalogs.PtPChunkCatalog; import it.grid.storm.catalogs.PtPData; import it.grid.storm.catalogs.PtPPersistentChunkData; @@ -12,20 +10,16 @@ import it.grid.storm.scheduler.PersistentRequestChunk; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.synchcall.command.CommandHelper; +import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PtPPersistentChunk extends PtP implements PersistentRequestChunk { private static Logger log = LoggerFactory.getLogger(PtPPersistentChunk.class); - /** - * RequestSummaryData containing all the statistics for the originating srmPrepareToPutRequest - */ + /** RequestSummaryData containing all the statistics for the originating srmPrepareToPutRequest */ private final RequestSummaryData rsd; /** @@ -38,18 +32,21 @@ public class PtPPersistentChunk extends PtP implements PersistentRequestChunk { * Constructor requiring the VomsGridUser, the RequestSummaryData, the PtPChunkData about this * chunk, and the GlobalStatusManager. If the supplied attributes are null, an * InvalidPtPChunkAttributesException is thrown. - * + * * @throws InvalidPtPAttributesException * @throws InvalidPtPChunkAttributesException */ - public PtPPersistentChunk(RequestSummaryData summaryData, PtPPersistentChunkData chunkData, - GlobalStatusManager gsm) throws InvalidRequestAttributesException, IllegalArgumentException { + public PtPPersistentChunk( + RequestSummaryData summaryData, PtPPersistentChunkData chunkData, GlobalStatusManager gsm) + throws InvalidRequestAttributesException, IllegalArgumentException { super(chunkData); if (summaryData == null || gsm == null) { throw new IllegalArgumentException( - "Unable to instantiate the object, illegal arguments: summaryData=" + summaryData - + " chunkData=" + chunkData); + "Unable to instantiate the object, illegal arguments: summaryData=" + + summaryData + + " chunkData=" + + chunkData); } this.rsd = summaryData; this.gsm = gsm; @@ -67,7 +64,7 @@ public String getName() { /* * (non-Javadoc) - * + * * @see it.grid.storm.asynch.RequestChunk#getRequestToken() */ @Override @@ -79,16 +76,18 @@ public String getRequestToken() { @Override public void persistStatus() { - PtPPersistentChunk.log.debug("Persisting status of request: {} on SURL {}", rsd.requestToken(), - requestData.getSURL()); + PtPPersistentChunk.log.debug( + "Persisting status of request: {} on SURL {}", rsd.requestToken(), requestData.getSURL()); PtPChunkCatalog.getInstance().update((PtPPersistentChunkData) requestData); } @Override public void updateGlobalStatus() { - PtPPersistentChunk.log.debug("Updating global status for request: {} on " + "SURL ", - rsd.requestToken(), requestData.getSURL()); + PtPPersistentChunk.log.debug( + "Updating global status for request: {} on " + "SURL ", + rsd.requestToken(), + requestData.getSURL()); if (failure) { gsm.failedChunk((PtPPersistentChunkData) requestData); } else { @@ -106,24 +105,35 @@ protected void printRequestOutcome(PtPData inputData) { if (inputData != null) { if (inputData.getSURL() != null) { if (rsd.requestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - rsd.requestToken(), Arrays.asList(inputData.getSURL().toString())); + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, + rsd.requestToken(), + Arrays.asList(inputData.getSURL().toString())); } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + inputData.getStatus(), + inputData, Arrays.asList(inputData.getSURL().toString())); } } else { if (rsd.requestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData, - rsd.requestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, inputData.getStatus(), inputData, rsd.requestToken()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, inputData.getStatus(), inputData); } } } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "No input available")); } } diff --git a/src/main/java/it/grid/storm/asynch/Request.java b/src/main/java/it/grid/storm/asynch/Request.java index 41767242..93fdac76 100644 --- a/src/main/java/it/grid/storm/asynch/Request.java +++ b/src/main/java/it/grid/storm/asynch/Request.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -10,9 +9,6 @@ public interface Request { public String getSURL(); - /** - * @return boolean - */ + /** @return boolean */ public boolean isResultSuccess(); - } diff --git a/src/main/java/it/grid/storm/asynch/RequestChunk.java b/src/main/java/it/grid/storm/asynch/RequestChunk.java index 6a7f96d1..8001e4d5 100644 --- a/src/main/java/it/grid/storm/asynch/RequestChunk.java +++ b/src/main/java/it/grid/storm/asynch/RequestChunk.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; diff --git a/src/main/java/it/grid/storm/asynch/SRMPrepareToPutReply.java b/src/main/java/it/grid/storm/asynch/SRMPrepareToPutReply.java index 76871c30..3f35f73b 100644 --- a/src/main/java/it/grid/storm/asynch/SRMPrepareToPutReply.java +++ b/src/main/java/it/grid/storm/asynch/SRMPrepareToPutReply.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -9,7 +8,7 @@ /** * Class that represents a reply to an issued SRMPrepareToPut command. It provides a method to * recover the assigned request token. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2005 @@ -25,14 +24,11 @@ public class SRMPrepareToPutReply { */ public SRMPrepareToPutReply(TRequestToken requestToken) throws InvalidPutReplyAttributeException { - if (requestToken == null) - throw new InvalidPutReplyAttributeException(); + if (requestToken == null) throw new InvalidPutReplyAttributeException(); this.requestToken = requestToken; } - /** - * Method that returns the assigned request token. - */ + /** Method that returns the assigned request token. */ public TRequestToken requestToken() { return requestToken; diff --git a/src/main/java/it/grid/storm/asynch/SRMPutDoneReply.java b/src/main/java/it/grid/storm/asynch/SRMPutDoneReply.java index ff54f274..6366b937 100644 --- a/src/main/java/it/grid/storm/asynch/SRMPutDoneReply.java +++ b/src/main/java/it/grid/storm/asynch/SRMPutDoneReply.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -8,7 +7,7 @@ /** * Class that represents the reply received from issuing an srmPutDone command. - * + * * @author EGRID ICTP Trieste * @version 1.0 * @date August 2006 @@ -17,20 +16,15 @@ public class SRMPutDoneReply { private TReturnStatus overallRetStat = null; // overall request return status - /** - * Constructor that requires the overall TReturnStatus of the reply. - */ + /** Constructor that requires the overall TReturnStatus of the reply. */ public SRMPutDoneReply(TReturnStatus overallRetStat) throws InvalidPutDoneReplyAttributeException { - if (overallRetStat == null) - throw new InvalidPutDoneReplyAttributeException(); + if (overallRetStat == null) throw new InvalidPutDoneReplyAttributeException(); this.overallRetStat = overallRetStat; } - /** - * Method that returns the overll status of the request. - */ + /** Method that returns the overll status of the request. */ public TReturnStatus overallRetStat() { return overallRetStat; diff --git a/src/main/java/it/grid/storm/asynch/SRMStatusOfPutRequestReply.java b/src/main/java/it/grid/storm/asynch/SRMStatusOfPutRequestReply.java index 74c11f04..f3ce113d 100644 --- a/src/main/java/it/grid/storm/asynch/SRMStatusOfPutRequestReply.java +++ b/src/main/java/it/grid/storm/asynch/SRMStatusOfPutRequestReply.java @@ -1,16 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; -import it.grid.storm.srm.types.TTURL; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TTURL; /** * Class that represents the reply returned from an invocation of SRMStatusOfPutRequest. It supplies * methods for quering the toTURL assigned, and the returnStatus of the request. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September 2005 @@ -18,10 +17,10 @@ public class SRMStatusOfPutRequestReply { private TTURL toTURL = null; // TTURL as supplied by the invoked server in the - // SRMStatusOfPutRequest + // SRMStatusOfPutRequest private TReturnStatus returnStatus = null; // returnStatus as supplied by the - // invoked server in the - // SRMStatusOfPutRequest + // invoked server in the + // SRMStatusOfPutRequest public SRMStatusOfPutRequestReply(TTURL toTURL, TReturnStatus returnStatus) throws InvalidPutStatusAttributesException { @@ -32,17 +31,13 @@ public SRMStatusOfPutRequestReply(TTURL toTURL, TReturnStatus returnStatus) this.returnStatus = returnStatus; } - /** - * Method that returns the toTURL that the invoked server assigned to the put request. - */ + /** Method that returns the toTURL that the invoked server assigned to the put request. */ public TTURL toTURL() { return toTURL; } - /** - * Method that returns the TReturnStatus that the invoked server assigned to the put request. - */ + /** Method that returns the TReturnStatus that the invoked server assigned to the put request. */ public TReturnStatus returnStatus() { return returnStatus; diff --git a/src/main/java/it/grid/storm/asynch/SchedulerFacade.java b/src/main/java/it/grid/storm/asynch/SchedulerFacade.java index e690f2fe..5ac12cae 100644 --- a/src/main/java/it/grid/storm/asynch/SchedulerFacade.java +++ b/src/main/java/it/grid/storm/asynch/SchedulerFacade.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -9,7 +8,7 @@ /** * This is a Facade to the Schedulers. - * + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date April 25th, 2005 @@ -24,29 +23,21 @@ public class SchedulerFacade { // Scheduler that manages Chunck tasks private ChunkScheduler chunkSched = SchedulerFactory.chunkSched(); - private SchedulerFacade() { + private SchedulerFacade() {} - } - - /** - * Method that returns the only instance of SchedulerFacade. - */ + /** Method that returns the only instance of SchedulerFacade. */ public static SchedulerFacade getInstance() { return sf; } - /** - * Method that returns the Scheduler in charge of handling Chunk. - */ + /** Method that returns the Scheduler in charge of handling Chunk. */ public ChunkScheduler chunkScheduler() { return chunkSched; } - /** - * Method that returns the Scheduler in charge of handling Feeder - */ + /** Method that returns the Scheduler in charge of handling Feeder */ public CrusherScheduler crusherScheduler() { return crusherSched; diff --git a/src/main/java/it/grid/storm/asynch/SchedulerFactory.java b/src/main/java/it/grid/storm/asynch/SchedulerFactory.java index d20240ed..be365e3e 100644 --- a/src/main/java/it/grid/storm/asynch/SchedulerFactory.java +++ b/src/main/java/it/grid/storm/asynch/SchedulerFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -9,7 +8,7 @@ /** * This class is a factory for making the schedulers of StoRM - * + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date April 25th, 2005 @@ -17,20 +16,15 @@ */ public class SchedulerFactory { - private SchedulerFactory() {} - /** - * Method that returns the Scheduler in charge of handling Feeder tasks. - */ + /** Method that returns the Scheduler in charge of handling Feeder tasks. */ public static CrusherScheduler crusherSched() { return CrusherScheduler.getInstance(); } - /** - * Method that returns the Scheduler in charge of Chunk tasks. - */ + /** Method that returns the Scheduler in charge of Chunk tasks. */ public static ChunkScheduler chunkSched() { return ChunkScheduler.getInstance(); diff --git a/src/main/java/it/grid/storm/asynch/Suspendedable.java b/src/main/java/it/grid/storm/asynch/Suspendedable.java index 80564d10..05668ba2 100644 --- a/src/main/java/it/grid/storm/asynch/Suspendedable.java +++ b/src/main/java/it/grid/storm/asynch/Suspendedable.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -12,5 +11,4 @@ public interface Suspendedable { public Boolean completeRequest(TapeRecallStatus recallStatus); public RequestData getRequestData(); - } diff --git a/src/main/java/it/grid/storm/asynch/UnsupportedOperationFeeder.java b/src/main/java/it/grid/storm/asynch/UnsupportedOperationFeeder.java index e6d0de92..1ceea2bb 100644 --- a/src/main/java/it/grid/storm/asynch/UnsupportedOperationFeeder.java +++ b/src/main/java/it/grid/storm/asynch/UnsupportedOperationFeeder.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.asynch; @@ -12,7 +11,6 @@ public class UnsupportedOperationFeeder implements Delegable { public void doIt() { throw new UnsupportedOperationException(); - } @Override @@ -20,5 +18,4 @@ public String getName() { return "Unsupported Operation Feeder"; } - } diff --git a/src/main/java/it/grid/storm/authz/AuthzDecision.java b/src/main/java/it/grid/storm/authz/AuthzDecision.java index f0e4c275..10b4534a 100644 --- a/src/main/java/it/grid/storm/authz/AuthzDecision.java +++ b/src/main/java/it/grid/storm/authz/AuthzDecision.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz; -/** - * @author ritz - */ +/** @author ritz */ public enum AuthzDecision { - PERMIT, DENY, INDETERMINATE, NOT_APPLICABLE + PERMIT, + DENY, + INDETERMINATE, + NOT_APPLICABLE } diff --git a/src/main/java/it/grid/storm/authz/AuthzDirector.java b/src/main/java/it/grid/storm/authz/AuthzDirector.java index c5340adc..bc49825b 100644 --- a/src/main/java/it/grid/storm/authz/AuthzDirector.java +++ b/src/main/java/it/grid/storm/authz/AuthzDirector.java @@ -1,17 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz; -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.authz.path.PathAuthz; import it.grid.storm.authz.path.conf.PathAuthzDBReader; import it.grid.storm.authz.sa.AuthzDBReaderException; @@ -23,141 +14,138 @@ import it.grid.storm.namespace.model.SAAuthzType; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.srm.types.TSpaceToken; +import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class AuthzDirector { - private static final Logger log = LoggerFactory - .getLogger(AuthzDirector.class); - private static String configurationPATH; - - // Map between 'SpaceToken' and the related 'SpaceAuthz' - private static Map spaceAuthzs = null; - - // PathAuthz is only one, shared by all SAs - private static PathAuthzInterface pathAuthz = null; - - /** - * Scan the Namespace.xml to retrieve the list of file AuthZDB to digest - */ - private static Map buildSpaceAuthzsMAP() { - - HashMap spaceAuthzMap = new HashMap(); - - // Retrieve the list of VFS from Namespace - NamespaceInterface ns = NamespaceDirector.getNamespace(); - ArrayList vfss; - try { - vfss = new ArrayList(ns.getAllDefinedVFS()); - for (VirtualFS vfs : vfss) { - String vfsName = vfs.getAliasName(); - SAAuthzType authzTp = vfs.getStorageAreaAuthzType(); - String authzName = ""; - if (authzTp.equals(SAAuthzType.AUTHZDB)) { - // The Space Authz is based on Authz DB - authzName = vfs.getStorageAreaAuthzDB(); - log.debug("Loading AuthzDB '{}'", authzName); - if (existsAuthzDBFile(authzName)) { - // Digest the Space AuthzDB File - TSpaceToken spaceToken = vfs.getSpaceToken(); - SpaceAuthzInterface spaceAuthz = new SpaceDBAuthz(authzName); - spaceAuthzMap.put(spaceToken, spaceAuthz); - } else { - log.error("File AuthzDB '{}' related to '{}' does not exists.", - authzName, vfsName); - } - } else { - authzName = vfs.getStorageAreaAuthzFixed(); - } - log.debug("VFS ['{}'] = {} : {}", vfsName, authzTp, authzName); - } - } catch (NamespaceException e) { - log.error("Unable to initialize AUTHZ DB! Error: {}", e.getMessage(), e); - } - - return spaceAuthzMap; - } - - /** - * Utility method - * - * @param dbFileName - * @return - * @throws AuthzDBReaderException - */ - private static boolean existsAuthzDBFile(String dbFileName) { - - String fileName = configurationPATH + File.separator + dbFileName; - boolean exists = (new File(fileName)).exists(); - if (!exists) { - log.warn("The AuthzDB File '{}' does not exists", dbFileName); - } - return exists; - } - - // **************************************** - // PUBLIC METHODS - // **************************************** - - /****************************** - * SPACE AUTHORIZATION ENGINE - ******************************/ - public static void initializeSpaceAuthz() { - - // Build Space Authzs MAP - spaceAuthzs = buildSpaceAuthzsMAP(); - } - - /** - * Retrieve the Space Authorization module related to the Space Token - * - * @param token - * @return - */ - public static SpaceAuthzInterface getSpaceAuthz(TSpaceToken token) { - - SpaceAuthzInterface spaceAuthz = new MockSpaceAuthz(); - // Retrieve the SpaceAuthz related to the Space Token - if ((spaceAuthzs != null) && (spaceAuthzs.containsKey(token))) { - spaceAuthz = spaceAuthzs.get(token); - log.debug("Space Authz related to S.Token ='{}' is '{}'", token, - spaceAuthz.getSpaceAuthzID()); - } else { - log.debug("Space Authz related to S.Token ='{}' does not exists. " - + "Use the MOCK one.", token); - } - return spaceAuthz; - } - - /****************************** - * PATH AUTHORIZATION ENGINE - ******************************/ - - /** - * Initializating the Path Authorization engine - * - * @param pathAuthz2 - */ - public static void initializePathAuthz(String pathAuthzDBFileName) - throws DirectorException { - - PathAuthzDBReader authzDBReader; - try { - authzDBReader = new PathAuthzDBReader(pathAuthzDBFileName); - } catch (Exception e) { - log.error("Unable to build a PathAuthzDBReader: {}", e.getMessage(), e); - throw new DirectorException("Unable to build a PathAuthzDBReader"); - } - AuthzDirector.pathAuthz = new PathAuthz(authzDBReader.getPathAuthzDB()); - } - - /** - * Retrieve the Path Authorization module - * - * @todo: To implement this. - */ - public static PathAuthzInterface getPathAuthz() { - - return AuthzDirector.pathAuthz; - } - + private static final Logger log = LoggerFactory.getLogger(AuthzDirector.class); + private static String configurationPATH; + + // Map between 'SpaceToken' and the related 'SpaceAuthz' + private static Map spaceAuthzs = null; + + // PathAuthz is only one, shared by all SAs + private static PathAuthzInterface pathAuthz = null; + + /** Scan the Namespace.xml to retrieve the list of file AuthZDB to digest */ + private static Map buildSpaceAuthzsMAP() { + + HashMap spaceAuthzMap = + new HashMap(); + + // Retrieve the list of VFS from Namespace + NamespaceInterface ns = NamespaceDirector.getNamespace(); + ArrayList vfss; + try { + vfss = new ArrayList(ns.getAllDefinedVFS()); + for (VirtualFS vfs : vfss) { + String vfsName = vfs.getAliasName(); + SAAuthzType authzTp = vfs.getStorageAreaAuthzType(); + String authzName = ""; + if (authzTp.equals(SAAuthzType.AUTHZDB)) { + // The Space Authz is based on Authz DB + authzName = vfs.getStorageAreaAuthzDB(); + log.debug("Loading AuthzDB '{}'", authzName); + if (existsAuthzDBFile(authzName)) { + // Digest the Space AuthzDB File + TSpaceToken spaceToken = vfs.getSpaceToken(); + SpaceAuthzInterface spaceAuthz = new SpaceDBAuthz(authzName); + spaceAuthzMap.put(spaceToken, spaceAuthz); + } else { + log.error("File AuthzDB '{}' related to '{}' does not exists.", authzName, vfsName); + } + } else { + authzName = vfs.getStorageAreaAuthzFixed(); + } + log.debug("VFS ['{}'] = {} : {}", vfsName, authzTp, authzName); + } + } catch (NamespaceException e) { + log.error("Unable to initialize AUTHZ DB! Error: {}", e.getMessage(), e); + } + + return spaceAuthzMap; + } + + /** + * Utility method + * + * @param dbFileName + * @return + * @throws AuthzDBReaderException + */ + private static boolean existsAuthzDBFile(String dbFileName) { + + String fileName = configurationPATH + File.separator + dbFileName; + boolean exists = (new File(fileName)).exists(); + if (!exists) { + log.warn("The AuthzDB File '{}' does not exists", dbFileName); + } + return exists; + } + + // **************************************** + // PUBLIC METHODS + // **************************************** + + /** **************************** SPACE AUTHORIZATION ENGINE **************************** */ + public static void initializeSpaceAuthz() { + + // Build Space Authzs MAP + spaceAuthzs = buildSpaceAuthzsMAP(); + } + + /** + * Retrieve the Space Authorization module related to the Space Token + * + * @param token + * @return + */ + public static SpaceAuthzInterface getSpaceAuthz(TSpaceToken token) { + + SpaceAuthzInterface spaceAuthz = new MockSpaceAuthz(); + // Retrieve the SpaceAuthz related to the Space Token + if ((spaceAuthzs != null) && (spaceAuthzs.containsKey(token))) { + spaceAuthz = spaceAuthzs.get(token); + log.debug( + "Space Authz related to S.Token ='{}' is '{}'", token, spaceAuthz.getSpaceAuthzID()); + } else { + log.debug( + "Space Authz related to S.Token ='{}' does not exists. " + "Use the MOCK one.", token); + } + return spaceAuthz; + } + + /** **************************** PATH AUTHORIZATION ENGINE **************************** */ + + /** + * Initializating the Path Authorization engine + * + * @param pathAuthz2 + */ + public static void initializePathAuthz(String pathAuthzDBFileName) throws DirectorException { + + PathAuthzDBReader authzDBReader; + try { + authzDBReader = new PathAuthzDBReader(pathAuthzDBFileName); + } catch (Exception e) { + log.error("Unable to build a PathAuthzDBReader: {}", e.getMessage(), e); + throw new DirectorException("Unable to build a PathAuthzDBReader"); + } + AuthzDirector.pathAuthz = new PathAuthz(authzDBReader.getPathAuthzDB()); + } + + /** + * Retrieve the Path Authorization module + * + * @todo: To implement this. + */ + public static PathAuthzInterface getPathAuthz() { + + return AuthzDirector.pathAuthz; + } } diff --git a/src/main/java/it/grid/storm/authz/AuthzException.java b/src/main/java/it/grid/storm/authz/AuthzException.java index 4468b4b3..901964c2 100644 --- a/src/main/java/it/grid/storm/authz/AuthzException.java +++ b/src/main/java/it/grid/storm/authz/AuthzException.java @@ -1,40 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz; -/** - * @author zappi - * - */ +/** @author zappi */ public class AuthzException extends RuntimeException { - /** - * - */ - private static final long serialVersionUID = 1L; + /** */ + private static final long serialVersionUID = 1L; - public AuthzException() { + public AuthzException() { - super(); - } + super(); + } - public AuthzException(String message) { + public AuthzException(String message) { - super(message); - } + super(message); + } - public AuthzException(String message, Throwable cause) { + public AuthzException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public AuthzException(Throwable cause) { + public AuthzException(Throwable cause) { - super(cause); - } + super(cause); + } } diff --git a/src/main/java/it/grid/storm/authz/DirectorException.java b/src/main/java/it/grid/storm/authz/DirectorException.java index eb96a730..d16452ad 100644 --- a/src/main/java/it/grid/storm/authz/DirectorException.java +++ b/src/main/java/it/grid/storm/authz/DirectorException.java @@ -1,33 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz; public class DirectorException extends Exception { - /** - * - */ - private static final long serialVersionUID = 8391356294029256927L; + /** */ + private static final long serialVersionUID = 8391356294029256927L; - public DirectorException() { + public DirectorException() {} - } + public DirectorException(String message) { - public DirectorException(String message) { + super(message); + } - super(message); - } + public DirectorException(Throwable cause) { - public DirectorException(Throwable cause) { + super(cause); + } - super(cause); - } - - public DirectorException(String message, Throwable cause) { - - super(message, cause); - } + public DirectorException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/authz/PathAuthzInterface.java b/src/main/java/it/grid/storm/authz/PathAuthzInterface.java index 0389abbf..6b68687a 100644 --- a/src/main/java/it/grid/storm/authz/PathAuthzInterface.java +++ b/src/main/java/it/grid/storm/authz/PathAuthzInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz; @@ -12,25 +11,19 @@ public interface PathAuthzInterface { - public AuthzDecision authorize(GridUserInterface guser, - PathOperation pathOperation, StFN fileStFN); + public AuthzDecision authorize( + GridUserInterface guser, PathOperation pathOperation, StFN fileStFN); - public AuthzDecision authorize(GridUserInterface guser, - SRMFileRequest srmPathOp, StoRI stori); + public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest srmPathOp, StoRI stori); - public AuthzDecision authorize(GridUserInterface guser, - SRMFileRequest srmPathOp, StFN fileStFN); + public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest srmPathOp, StFN fileStFN); - public AuthzDecision authorize(GridUserInterface guser, - SRMFileRequest srmPathOp, StoRI storiSource, StoRI storiDest); + public AuthzDecision authorize( + GridUserInterface guser, SRMFileRequest srmPathOp, StoRI storiSource, StoRI storiDest); - public AuthzDecision authorizeAnonymous(PathOperation pathOperation, - StFN fileStFN); + public AuthzDecision authorizeAnonymous(PathOperation pathOperation, StFN fileStFN); - public AuthzDecision authorizeAnonymous(SRMFileRequest srmPathOp, - StFN fileStFN); - - public AuthzDecision authorizeAnonymous(SRMFileRequest mvSource, - StoRI fromStori, StoRI toStori); + public AuthzDecision authorizeAnonymous(SRMFileRequest srmPathOp, StFN fileStFN); + public AuthzDecision authorizeAnonymous(SRMFileRequest mvSource, StoRI fromStori, StoRI toStori); } diff --git a/src/main/java/it/grid/storm/authz/SpaceAuthzInterface.java b/src/main/java/it/grid/storm/authz/SpaceAuthzInterface.java index 86cab478..bccef912 100644 --- a/src/main/java/it/grid/storm/authz/SpaceAuthzInterface.java +++ b/src/main/java/it/grid/storm/authz/SpaceAuthzInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz; @@ -10,14 +9,13 @@ public interface SpaceAuthzInterface { - public boolean authorize(GridUserInterface guser, SRMSpaceRequest srmSpaceOp); + public boolean authorize(GridUserInterface guser, SRMSpaceRequest srmSpaceOp); - public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp); + public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp); - void setAuthzDB(AuthzDBInterface authzDB); + void setAuthzDB(AuthzDBInterface authzDB); - void refresh(); - - public String getSpaceAuthzID(); + void refresh(); + public String getSpaceAuthzID(); } diff --git a/src/main/java/it/grid/storm/authz/path/PathAuthz.java b/src/main/java/it/grid/storm/authz/path/PathAuthz.java index beac6896..f36f1f4c 100644 --- a/src/main/java/it/grid/storm/authz/path/PathAuthz.java +++ b/src/main/java/it/grid/storm/authz/path/PathAuthz.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path; import it.grid.storm.authz.AuthzDecision; @@ -17,13 +14,10 @@ import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.namespace.StoRI; import it.grid.storm.namespace.util.userinfo.LocalGroups; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class PathAuthz implements PathAuthzInterface { private static final Logger log = LoggerFactory.getLogger(PathAuthz.class); @@ -34,14 +28,14 @@ public PathAuthz(PathAuthzDB pathAuthzDB) { this.pathAuthzDB = pathAuthzDB; } - public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest pathOperation, - StoRI stori) { + public AuthzDecision authorize( + GridUserInterface guser, SRMFileRequest pathOperation, StoRI stori) { return authorize(guser, pathOperation, stori.getStFN()); } - public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest pathOperation, - StFN fileStFN) { + public AuthzDecision authorize( + GridUserInterface guser, SRMFileRequest pathOperation, StFN fileStFN) { String groupName = null; try { @@ -50,13 +44,17 @@ public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest pathOpera log.error("Unable to retrieve the local group for '{}'", guser, e); return AuthzDecision.INDETERMINATE; } - log.debug(" Compute authorization for groupName:'{}', " - + "filename:'{}', pathOperation:'{}'", groupName, fileStFN, pathOperation); + log.debug( + " Compute authorization for groupName:'{}', " + + "filename:'{}', pathOperation:'{}'", + groupName, + fileStFN, + pathOperation); return pathAuthzDB.evaluate(groupName, fileStFN, pathOperation); } - public AuthzDecision authorize(GridUserInterface guser, PathOperation pathOperation, - StFN fileStFN) { + public AuthzDecision authorize( + GridUserInterface guser, PathOperation pathOperation, StFN fileStFN) { String groupName = null; try { @@ -65,30 +63,40 @@ public AuthzDecision authorize(GridUserInterface guser, PathOperation pathOperat log.error("Unable to retrieve the local group for '{}'", guser, e); return AuthzDecision.INDETERMINATE; } - log.debug(" Compute authorization for groupName:'{}', " - + "filename:'{}', pathOperation:'{}'", groupName, fileStFN, pathOperation); + log.debug( + " Compute authorization for groupName:'{}', " + + "filename:'{}', pathOperation:'{}'", + groupName, + fileStFN, + pathOperation); return pathAuthzDB.evaluate(groupName, fileStFN, pathOperation); } @Override public AuthzDecision authorizeAnonymous(PathOperation pathOperation, StFN fileStFN) { - log.debug(" Compute authorization for anonymous user on " - + "filename:'{}', pathOperation:'{}'", fileStFN, pathOperation); + log.debug( + " Compute authorization for anonymous user on " + + "filename:'{}', pathOperation:'{}'", + fileStFN, + pathOperation); return pathAuthzDB.evaluateAnonymous(fileStFN, pathOperation); } @Override public AuthzDecision authorizeAnonymous(SRMFileRequest srmPathOp, StFN fileStFN) { - log.debug(" Compute authorization for anonymous user on " - + "filename:'{}', SRMFileRequest:'{}'", fileStFN, srmPathOp); + log.debug( + " Compute authorization for anonymous user on " + + "filename:'{}', SRMFileRequest:'{}'", + fileStFN, + srmPathOp); return pathAuthzDB.evaluateAnonymous(fileStFN, srmPathOp); } @Override - public AuthzDecision authorizeAnonymous(SRMFileRequest pathOperation, StoRI storiSource, - StoRI storiDest) { + public AuthzDecision authorizeAnonymous( + SRMFileRequest pathOperation, StoRI storiSource, StoRI storiDest) { AuthzDecision result = AuthzDecision.INDETERMINATE; @@ -153,8 +161,8 @@ public AuthzDecision authorizeAnonymous(SRMFileRequest pathOperation, StoRI stor return result; } - public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest pathOperation, - StoRI storiSource, StoRI storiDest) { + public AuthzDecision authorize( + GridUserInterface guser, SRMFileRequest pathOperation, StoRI storiSource, StoRI storiDest) { AuthzDecision result = AuthzDecision.INDETERMINATE; @@ -228,5 +236,4 @@ public AuthzDecision authorize(GridUserInterface guser, SRMFileRequest pathOpera } return result; } - } diff --git a/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDB.java b/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDB.java index 6160f129..ae048b57 100644 --- a/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDB.java +++ b/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDB.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.conf; import it.grid.storm.authz.AuthzDecision; @@ -17,9 +14,7 @@ import java.util.LinkedList; import java.util.List; -/** - * @author zappi - */ +/** @author zappi */ public class PathAuthzDB { public static final String MOCK_ID = "mock-PathAuthzDB"; @@ -31,8 +26,8 @@ public class PathAuthzDB { private final PathAuthzEvaluationAlgorithm evaluationAlg; private final LinkedList authzDB = new LinkedList(); - public PathAuthzDB(String pathAuthzDBID, PathAuthzEvaluationAlgorithm algorithm, - List aces) { + public PathAuthzDB( + String pathAuthzDBID, PathAuthzEvaluationAlgorithm algorithm, List aces) { this.pathAuthzDBID = pathAuthzDBID; this.evaluationAlg = algorithm; @@ -46,9 +41,7 @@ public PathAuthzDB(String pathAuthzDBID, List aces) { this.authzDB.addAll(aces); } - /** - * Empty constructor. Use it only if there is not - */ + /** Empty constructor. Use it only if there is not */ public PathAuthzDB() { this.pathAuthzDBID = MOCK_ID; @@ -104,5 +97,4 @@ public AuthzDecision evaluateAnonymous(StFN fileStFN, SRMFileRequest pathOperati return evaluationAlg.evaluateAnonymous(fileStFN, pathOperation, authzDB); } - } diff --git a/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDBReader.java b/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDBReader.java index 0acd3617..ca3aaaee 100644 --- a/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDBReader.java +++ b/src/main/java/it/grid/storm/authz/path/conf/PathAuthzDBReader.java @@ -1,32 +1,25 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.conf; import it.grid.storm.authz.AuthzException; import it.grid.storm.authz.path.model.PathACE; import it.grid.storm.authz.path.model.PathAuthzEvaluationAlgorithm; import it.grid.storm.config.Configuration; - import java.io.BufferedReader; import java.io.File; +import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.LinkedList; -import java.io.FileNotFoundException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class PathAuthzDBReader { private static final Logger log = LoggerFactory.getLogger(PathAuthzDBReader.class); @@ -35,7 +28,10 @@ public class PathAuthzDBReader { private PathAuthzDB pathAuthzDB; private static enum LineType { - COMMENT, ALGORITHM_NAME, PATH_ACE, OTHER + COMMENT, + ALGORITHM_NAME, + PATH_ACE, + OTHER } public PathAuthzDBReader(String filename) throws Exception { @@ -72,12 +68,11 @@ public PathAuthzDB getPathAuthzDB() { return pathAuthzDB; } - /************************** - * Private BUILDERs helper - * - * @param authzDBFilename - **************************/ - + /** + * ************************ Private BUILDERs helper + * + * @param authzDBFilename ************************ + */ private PathAuthzDB loadPathAuthzDB(String authzDBFilename) throws Exception { if (existsAuthzDBFile(authzDBFilename)) { @@ -102,10 +97,16 @@ private PathAuthzDB parsePathAuthzDB(String authzDBFilename) throws Exception { try { reader = new BufferedReader(new FileReader(authzDBFilename)); } catch (FileNotFoundException e) { - log.error("Unable to get a FIleReader on '{}'. FileNotFoundException: ", authzDBFilename, - e.getMessage(), e); - throw new Exception("No file available at path \'" + authzDBFilename - + "\' . FileNotFoundException: " + e.getMessage()); + log.error( + "Unable to get a FIleReader on '{}'. FileNotFoundException: ", + authzDBFilename, + e.getMessage(), + e); + throw new Exception( + "No file available at path \'" + + authzDBFilename + + "\' . FileNotFoundException: " + + e.getMessage()); } try { String str; @@ -120,7 +121,8 @@ private PathAuthzDB parsePathAuthzDB(String authzDBFilename) throws Exception { log.error( "Attention! More than one Algorithm specified in " + "configuration file: '{}', {}", - parsedLine.getAlgorithmName(), algorithm.getClass()); + parsedLine.getAlgorithmName(), + algorithm.getClass()); throw new Exception("More than one Algorithm specified in configuration file"); } try { @@ -158,7 +160,9 @@ private PathAuthzEvaluationAlgorithm buildAlgorithmInstance(String algorithmName } catch (ClassNotFoundException e) { log.error( "Unable to load the Path Authz Algorithm Class '{}'. " + "ClassNotFoundException: {}", - algorithmName, e.getMessage(), e); + algorithmName, + e.getMessage(), + e); throw new Exception("Unable to load a class with name \'" + algorithmName + "\'"); } Class authzAlgClass; @@ -168,15 +172,21 @@ private PathAuthzEvaluationAlgorithm buildAlgorithmInstance(String algorithmName log.error( "The loaded class Class '{}' is not a " + "PathAuthzEvaluationAlgorithm. ClassCastException: {}", - algorithmName, e.getMessage(), e); + algorithmName, + e.getMessage(), + e); throw new Exception("Class \'" + algorithmName + "\' is not a PathAuthzEvaluationAlgorithm"); } Method instanceMethod; try { instanceMethod = authzAlgClass.getMethod("getInstance", new Class[0]); } catch (NoSuchMethodException e) { - log.error("The loaded class Class '{}' has not a getInstance method. " - + "NoSuchMethodException: {}", algorithmName, e.getMessage(), e); + log.error( + "The loaded class Class '{}' has not a getInstance method. " + + "NoSuchMethodException: {}", + algorithmName, + e.getMessage(), + e); throw new Exception("Class \'" + algorithmName + "\' has not a getInstance method"); } catch (SecurityException e) { log.error("Unable to get getInstance method. SecurityException: {}", e.getMessage(), e); @@ -193,12 +203,12 @@ private PathAuthzEvaluationAlgorithm buildAlgorithmInstance(String algorithmName log.error("Unable to call getInstance method. IllegalAccessException: {}", e.getMessage(), e); throw new Exception("Unable to call getInstance method"); } catch (IllegalArgumentException e) { - log.error("Unable to call getInstance method. IllegalArgumentException: {}", e.getMessage(), - e); + log.error( + "Unable to call getInstance method. IllegalArgumentException: {}", e.getMessage(), e); throw new Exception("Unable to call getInstance method"); } catch (InvocationTargetException e) { - log.error("Unable to call getInstance method. InvocationTargetException: {}", e.getMessage(), - e); + log.error( + "Unable to call getInstance method. InvocationTargetException: {}", e.getMessage(), e); throw new Exception("Unable to call getInstance method"); } @@ -211,7 +221,8 @@ private PathAuthzEvaluationAlgorithm buildAlgorithmInstance(String algorithmName log.error( "The method getInstance of class '{}' does not return a valid " + "Path Authz Evaluation Algorithm object but a '{}'", - algorithmName, authzAlgInstance.getClass()); + algorithmName, + authzAlgInstance.getClass()); throw new Exception("Unable to get a valid instance of PathAuthzEvaluationAlgorithm"); } } @@ -252,17 +263,13 @@ private ParseLineResults parseLine(String pathACEString) { log.error(" - explanation: {}", e.getMessage()); result = new ParseLineResults(LineType.OTHER); } - } } } return result; } - /*********************************************** - * UTILITY Methods - */ - + /** ********************************************* UTILITY Methods */ private boolean existsAuthzDBFile(String fileName) { File file = new File(fileName); @@ -288,9 +295,7 @@ private class ParseLineResults { private String algorithmName = null; private PathACE pathAce = null; - /** - * @param - */ + /** @param */ public ParseLineResults(LineType type) { this.type = type; diff --git a/src/main/java/it/grid/storm/authz/path/model/PathACE.java b/src/main/java/it/grid/storm/authz/path/model/PathACE.java index 2e59c827..5086ab00 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathACE.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathACE.java @@ -1,28 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.model; import it.grid.storm.authz.AuthzException; import it.grid.storm.common.types.InvalidStFNAttributeException; import it.grid.storm.common.types.StFN; import it.grid.storm.namespace.util.userinfo.LocalGroups; - import java.net.URI; import java.util.LinkedList; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class PathACE { private static final Logger log = LoggerFactory.getLogger(PathACE.class); @@ -33,7 +26,7 @@ public class PathACE { public static final String FIELD_SEP = "\\s"; // * White space character **/ private static final boolean PERMIT_ACE = true; public static final String ALGORITHM = "algorithm"; // property key used to - // define the algorithm + // define the algorithm public static final PathACE PERMIT_ALL = buildPermitAllPathACE(); @@ -48,21 +41,25 @@ public class PathACE { /** * Quite similar to clone - * + * * @throws AuthzException */ public static PathACE build(PathACE other) throws AuthzException { - PathACE result = new PathACE(other.localGroupName, other.getStorageFileName(), - other.getPathAccessMask(), other.isPermitAce()); + PathACE result = + new PathACE( + other.localGroupName, + other.getStorageFileName(), + other.getPathAccessMask(), + other.isPermitAce()); return result; } private static PathACE buildPermitAllPathACE() throws IllegalStateException { try { - return new PathACE(PathACE.ALL_GROUPS, StFN.makeEmpty(), PathAccessMask.DEFAULT, - PathACE.PERMIT_ACE); + return new PathACE( + PathACE.ALL_GROUPS, StFN.makeEmpty(), PathAccessMask.DEFAULT, PathACE.PERMIT_ACE); } catch (AuthzException e) { // never thrown throw new IllegalStateException("Unexpected AuthzException: " + e); @@ -118,11 +115,15 @@ public static PathACE buildFromString(String pathACEString) throws AuthzExceptio /* Checks if the path string represents a valid URI */ URI.create(notemptyFields.get(1)); } catch (IllegalArgumentException uriEx) { - throw new AuthzException("Error (IllegalArgumentException )while parsing the StFN '" - + notemptyFields.get(1) + "' in Path ACE. Is not a valid URI"); + throw new AuthzException( + "Error (IllegalArgumentException )while parsing the StFN '" + + notemptyFields.get(1) + + "' in Path ACE. Is not a valid URI"); } catch (NullPointerException npe) { - throw new AuthzException("Error (NullPointerException )while parsing the StFN '" - + notemptyFields.get(1) + "' in Path ACE."); + throw new AuthzException( + "Error (NullPointerException )while parsing the StFN '" + + notemptyFields.get(1) + + "' in Path ACE."); } // Setting the StFN try { @@ -184,10 +185,7 @@ public boolean isPermitAce() { return isPermitACE; } - /** - * ## BUSINESS Methods - */ - + /** ## BUSINESS Methods */ public boolean subjectMatch(String subjectGroup) { Matcher allGroupsMatcher = allGroupsPattern.matcher(localGroupName); @@ -198,9 +196,7 @@ public boolean subjectMatch(String subjectGroup) { return false; } - /** - * - */ + /** */ @Override public boolean equals(Object other) { @@ -231,8 +227,11 @@ public int hashCode() { @Override public String toString() { - return String.format("%s %s %s %s", localGroupName == null ? "NULL" : localGroupName, - storageFileName, pathAccessMask, isPermitACE ? "PERMIT" : "DENY"); + return String.format( + "%s %s %s %s", + localGroupName == null ? "NULL" : localGroupName, + storageFileName, + pathAccessMask, + isPermitACE ? "PERMIT" : "DENY"); } - } diff --git a/src/main/java/it/grid/storm/authz/path/model/PathAccessMask.java b/src/main/java/it/grid/storm/authz/path/model/PathAccessMask.java index 6fcc6939..03b38879 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathAccessMask.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathAccessMask.java @@ -1,27 +1,25 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.path.model; import java.util.ArrayList; import java.util.List; -/** - * @author zappi - */ +/** @author zappi */ @SuppressWarnings("serial") public class PathAccessMask { private final List pathAccessMask; - private static List operations = new ArrayList() { + private static List operations = + new ArrayList() { - { - add(PathOperation.READ_FILE); - add(PathOperation.LIST_DIRECTORY); - } - }; + { + add(PathOperation.READ_FILE); + add(PathOperation.LIST_DIRECTORY); + } + }; public static final PathAccessMask DEFAULT = new PathAccessMask(operations); diff --git a/src/main/java/it/grid/storm/authz/path/model/PathAuthzAlgBestMatch.java b/src/main/java/it/grid/storm/authz/path/model/PathAuthzAlgBestMatch.java index b24517ee..0a6c909f 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathAuthzAlgBestMatch.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathAuthzAlgBestMatch.java @@ -1,25 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.path.model; import it.grid.storm.authz.AuthzDecision; import it.grid.storm.common.types.StFN; import it.grid.storm.namespace.naming.NamespaceUtil; - import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; -import java.util.Collections; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class PathAuthzAlgBestMatch extends PathAuthzEvaluationAlgorithm { public static PathAuthzEvaluationAlgorithm getInstance() { @@ -30,9 +25,7 @@ public static PathAuthzEvaluationAlgorithm getInstance() { return instance; } - private PathAuthzAlgBestMatch() { - - } + private PathAuthzAlgBestMatch() {} private static final Logger log = LoggerFactory.getLogger(PathAuthzAlgBestMatch.class); @@ -42,12 +35,10 @@ public String getDescription() { return "< Best Match Path Authorization Algorithm >"; } - /** - * - */ + /** */ @Override - public AuthzDecision evaluate(String subject, StFN fileName, SRMFileRequest pathOperation, - List acl) { + public AuthzDecision evaluate( + String subject, StFN fileName, SRMFileRequest pathOperation, List acl) { AuthzDecision result = AuthzDecision.INDETERMINATE; @@ -105,11 +96,10 @@ public AuthzDecision evaluate(String subject, StFN fileName, SRMFileRequest path result = AuthzDecision.PERMIT; } return result; - } - public AuthzDecision evaluate(String subject, StFN fileName, PathOperation op, - List acl) { + public AuthzDecision evaluate( + String subject, StFN fileName, PathOperation op, List acl) { // Retrieve the list of compatible ACE List compACE = getCompatibleACE(subject, acl); @@ -139,8 +129,8 @@ public AuthzDecision evaluate(String subject, StFN fileName, PathOperation op, } @Override - public AuthzDecision evaluateAnonymous(StFN fileName, PathOperation pathOperation, - LinkedList authzDB) { + public AuthzDecision evaluateAnonymous( + StFN fileName, PathOperation pathOperation, LinkedList authzDB) { if ((authzDB == null) || (authzDB.isEmpty())) { return AuthzDecision.NOT_APPLICABLE; @@ -150,8 +140,8 @@ public AuthzDecision evaluateAnonymous(StFN fileName, PathOperation pathOperatio List orderedACEs = getOrderedACEs(fileName, authzDB); log.debug("There are '{}' ACEs regarding file '{}'", orderedACEs.size(), fileName); - log.trace(" Operation that needs anonymous authorization " + "is: {}", - pathOperation); + log.trace( + " Operation that needs anonymous authorization " + "is: {}", pathOperation); for (OrderedACE oAce : orderedACEs) { if (oAce.ace.isAllGroupsACE() @@ -169,8 +159,8 @@ public AuthzDecision evaluateAnonymous(StFN fileName, PathOperation pathOperatio } @Override - public AuthzDecision evaluateAnonymous(StFN fileName, SRMFileRequest pathOperation, - LinkedList authzDB) { + public AuthzDecision evaluateAnonymous( + StFN fileName, SRMFileRequest pathOperation, LinkedList authzDB) { if ((authzDB == null) || (authzDB.isEmpty())) { return AuthzDecision.NOT_APPLICABLE; @@ -180,8 +170,8 @@ public AuthzDecision evaluateAnonymous(StFN fileName, SRMFileRequest pathOperati List orderedACEs = getOrderedACEs(fileName, authzDB); log.debug("There are '' ACEs regarding file '{}'", orderedACEs.size(), fileName); - log.trace(" Operation that needs anonymous authorization " + "is: {}", - pathOperation); + log.trace( + " Operation that needs anonymous authorization " + "is: {}", pathOperation); PathAccessMask requestedOps = pathOperation.getSRMPathAccessMask(); ArrayList ops = new ArrayList(requestedOps.getPathOperations()); HashMap decision = new HashMap(); @@ -220,12 +210,16 @@ private List getCompatibleACE(String subjectGroup, List acl) { if ((acl != null) && (!(acl.isEmpty()))) { for (PathACE pathACE : acl) { if (pathACE.subjectMatch(subjectGroup)) { - log.trace("-compatibleACE: ACE:'{}' match with " + "subject='{}'", pathACE, + log.trace( + "-compatibleACE: ACE:'{}' match with " + "subject='{}'", + pathACE, subjectGroup); compatibleACE.add(pathACE); } else { - log.trace("-compatibleACE: ACE:'{}' DOESN'T match with " + "subject='{}'", - pathACE, subjectGroup); + log.trace( + "-compatibleACE: ACE:'{}' DOESN'T match with " + "subject='{}'", + pathACE, + subjectGroup); } } } else { @@ -248,16 +242,13 @@ private List getOrderedACEs(StFN fileName, List compatibleA StFN aceStFN = pathAce.getStorageFileName(); distance = NamespaceUtil.computeDistanceFromPath(aceStFN.getValue(), fileName.getValue()); bestACEs.add(new OrderedACE(pathAce, distance)); - } // End of cycle - // Sort the BestACE in base of distance + // Sort the BestACE in base of distance Collections.sort(bestACEs); return bestACEs; } - /** - * @author ritz - */ + /** @author ritz */ private class OrderedACE implements Comparable { private final PathACE ace; @@ -302,6 +293,5 @@ public String toString() { return "[" + ace.toString() + "] distance:" + distance; } - } } diff --git a/src/main/java/it/grid/storm/authz/path/model/PathAuthzEvaluationAlgorithm.java b/src/main/java/it/grid/storm/authz/path/model/PathAuthzEvaluationAlgorithm.java index ec8a8409..32dfe413 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathAuthzEvaluationAlgorithm.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathAuthzEvaluationAlgorithm.java @@ -1,18 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.path.model; import it.grid.storm.authz.AuthzDecision; import it.grid.storm.common.types.StFN; - import java.util.LinkedList; import java.util.List; -/** - * @author zappi - */ +/** @author zappi */ public abstract class PathAuthzEvaluationAlgorithm { public static PathAuthzEvaluationAlgorithm instance = null; @@ -26,21 +22,18 @@ public static PathAuthzEvaluationAlgorithm getInstance() throws Exception { return instance; } - public abstract AuthzDecision evaluate(String subject, StFN fileName, - SRMFileRequest pathOperation, List acl); + public abstract AuthzDecision evaluate( + String subject, StFN fileName, SRMFileRequest pathOperation, List acl); - public abstract AuthzDecision evaluate(String subject, StFN fileName, PathOperation pathOperation, - List acl); + public abstract AuthzDecision evaluate( + String subject, StFN fileName, PathOperation pathOperation, List acl); - /** - * @return - */ + /** @return */ public abstract String getDescription(); - public abstract AuthzDecision evaluateAnonymous(StFN fileStFN, PathOperation pathOperation, - LinkedList authzDB); - - public abstract AuthzDecision evaluateAnonymous(StFN fileStFN, SRMFileRequest pathOperation, - LinkedList authzDB); + public abstract AuthzDecision evaluateAnonymous( + StFN fileStFN, PathOperation pathOperation, LinkedList authzDB); + public abstract AuthzDecision evaluateAnonymous( + StFN fileStFN, SRMFileRequest pathOperation, LinkedList authzDB); } diff --git a/src/main/java/it/grid/storm/authz/path/model/PathOperation.java b/src/main/java/it/grid/storm/authz/path/model/PathOperation.java index 1bc5b55b..6d4aa2df 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathOperation.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathOperation.java @@ -1,23 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.model; -/** - * @author zappi - */ - +/** @author zappi */ public enum PathOperation { - WRITE_FILE('W', "WRITE_FILE", "Write data"), READ_FILE('R', "READ_FILE", "Read data", - true), RENAME('F', "RENAME", "Rename a file or a directory"), DELETE('D', "DELETE", - "Delete a file or a directory"), LIST_DIRECTORY('L', "LIST_DIRECTORY", - "Listing a directory", - true), MAKE_DIRECTORY('M', "CREATE_DIRECTORY", "Create a directory"), CREATE_FILE('N', - "CREATE_FILE", "Create a new file"), UNDEFINED('?', "UNDEFINED", "Undefined"); + WRITE_FILE('W', "WRITE_FILE", "Write data"), + READ_FILE('R', "READ_FILE", "Read data", true), + RENAME('F', "RENAME", "Rename a file or a directory"), + DELETE('D', "DELETE", "Delete a file or a directory"), + LIST_DIRECTORY('L', "LIST_DIRECTORY", "Listing a directory", true), + MAKE_DIRECTORY('M', "CREATE_DIRECTORY", "Create a directory"), + CREATE_FILE('N', "CREATE_FILE", "Create a new file"), + UNDEFINED('?', "UNDEFINED", "Undefined"); private final char operation; private final String operationName; @@ -56,8 +52,8 @@ public static PathOperation getSpaceOperation(char op) { return RENAME; case 'D': return DELETE; - // case 'T': - // return TRAVERSE_DIRECTORY; + // case 'T': + // return TRAVERSE_DIRECTORY; case 'L': return LIST_DIRECTORY; case 'M': @@ -99,5 +95,4 @@ public boolean isReadOnly() { return this.readonly; } - } diff --git a/src/main/java/it/grid/storm/authz/path/model/PathPrincipal.java b/src/main/java/it/grid/storm/authz/path/model/PathPrincipal.java index 645e2f6f..3c43eae2 100644 --- a/src/main/java/it/grid/storm/authz/path/model/PathPrincipal.java +++ b/src/main/java/it/grid/storm/authz/path/model/PathPrincipal.java @@ -1,16 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.model; -/** - * @author zappi - * - */ +/** @author zappi */ public class PathPrincipal { public static final String prefix = "@"; @@ -53,5 +47,4 @@ public int hashCode() { result = 31 * result + (principalCategory ? 1 : 0); return result; } - } diff --git a/src/main/java/it/grid/storm/authz/path/model/SRMFileRequest.java b/src/main/java/it/grid/storm/authz/path/model/SRMFileRequest.java index 5b24ba70..307101c7 100644 --- a/src/main/java/it/grid/storm/authz/path/model/SRMFileRequest.java +++ b/src/main/java/it/grid/storm/authz/path/model/SRMFileRequest.java @@ -1,189 +1,222 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.path.model; import java.util.ArrayList; import java.util.HashMap; import java.util.List; -/** - * @author zappi - */ +/** @author zappi */ public enum SRMFileRequest { /** * WRITE_FILE 'W' READ_FILE 'R' RENAME 'F' DELETE 'D' TRAVERSE_DIRECTORY 'T' LIST_DIRECTORY 'L' * MAKE_DIRECTORY 'M' CREATE_FILE 'N' UNDEFINED '?' - **/ + */ // Operations to SURL - PTP_Overwrite("srmPrepareToPut-overwrite", "PTP-Over", new ArrayList() { + PTP_Overwrite( + "srmPrepareToPut-overwrite", + "PTP-Over", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.WRITE_FILE); - } - }), + { + add(PathOperation.WRITE_FILE); + } + }), - PTP("srmPrepareToPut", "PTP", new ArrayList() { + PTP( + "srmPrepareToPut", + "PTP", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.CREATE_FILE); - add(PathOperation.WRITE_FILE); - } - }), + { + add(PathOperation.CREATE_FILE); + add(PathOperation.WRITE_FILE); + } + }), - PTG("srmPrepareToGet", "PTG", new ArrayList() { + PTG( + "srmPrepareToGet", + "PTG", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.READ_FILE); - } - }), + { + add(PathOperation.READ_FILE); + } + }), - CPto("srmCopy to", "CPto", new ArrayList() { + CPto( + "srmCopy to", + "CPto", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.WRITE_FILE); - add(PathOperation.CREATE_FILE); - } - }), + { + add(PathOperation.WRITE_FILE); + add(PathOperation.CREATE_FILE); + } + }), - CPto_Overwrite("srmCopy to-overwrite", "CPto_Over", new ArrayList() { + CPto_Overwrite( + "srmCopy to-overwrite", + "CPto_Over", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.WRITE_FILE); - } - }), + { + add(PathOperation.WRITE_FILE); + } + }), - CPfrom("srmCopy from", "CPfrom", new ArrayList() { + CPfrom( + "srmCopy from", + "CPfrom", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.READ_FILE); - } - }), + { + add(PathOperation.READ_FILE); + } + }), // OVERLOAD with OP - RM("srmRemove", "RM", new ArrayList() { + RM( + "srmRemove", + "RM", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.DELETE); - } - }), + { + add(PathOperation.DELETE); + } + }), - RMD("srmRemoveDir", "RMD", new ArrayList() { + RMD( + "srmRemoveDir", + "RMD", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.DELETE); - } - }), + { + add(PathOperation.DELETE); + } + }), - MD("srmMakeDir", "MD", new ArrayList() { + MD( + "srmMakeDir", + "MD", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.MAKE_DIRECTORY); - } - }), + { + add(PathOperation.MAKE_DIRECTORY); + } + }), - LS("srmLS", "LS", new ArrayList() { + LS( + "srmLS", + "LS", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.LIST_DIRECTORY); - } - }), + { + add(PathOperation.LIST_DIRECTORY); + } + }), - MV_source("srmMove-source", "MV-source", new ArrayList() { + MV_source( + "srmMove-source", + "MV-source", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.READ_FILE); - add(PathOperation.DELETE); - } - }), + { + add(PathOperation.READ_FILE); + add(PathOperation.DELETE); + } + }), - MV_dest_Overwrite("srmMove-dest-overwrite", "MV-dest-Over", new ArrayList() { + MV_dest_Overwrite( + "srmMove-dest-overwrite", + "MV-dest-Over", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.WRITE_FILE); - } - }), + { + add(PathOperation.WRITE_FILE); + } + }), - MV_dest("srmMove-dest-overwrite", "MV-dest", new ArrayList() { + MV_dest( + "srmMove-dest-overwrite", + "MV-dest", + new ArrayList() { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - { - add(PathOperation.CREATE_FILE); - add(PathOperation.WRITE_FILE); - } - }); + { + add(PathOperation.CREATE_FILE); + add(PathOperation.WRITE_FILE); + } + }); private final String description; private final String srmOp; private final PathAccessMask requestedPathOps; - private static HashMap ops = new HashMap() { - - /** - * - */ - private static final long serialVersionUID = 1L; - - { - put("PTP-Over", PTP_Overwrite); - put("srmPrepareToPut-overwrite", PTP_Overwrite); - put("PTP", PTP); - put("srmPrepareToPut", PTP); - put("PTG", PTG); - put("srmPrepareToGet", PTG); - put("CPto_Over", CPto_Overwrite); - put("srmCopy to-overwrite", CPto_Overwrite); - put("CPto", CPto); - put("srmCopy to", CPto); - put("CPFrom", CPfrom); - put("srmCopy from", CPfrom); - put("RM", RM); - put("srmRm", RM); - put("RMD", RMD); - put("srmRemoveDir", RM); - put("MD", MD); - put("srmMakeDir", MD); - put("LS", LS); - put("srmLs", LS); - put("MV-source", MV_source); - put("srmMove-source", MV_source); - put("MV-dest-Over", MV_dest_Overwrite); - put("srmMove-dest-overwrite", MV_dest_Overwrite); - put("MV-dest", MV_dest); - put("srmMove-dest", MV_dest); - } - }; + private static HashMap ops = + new HashMap() { + + /** */ + private static final long serialVersionUID = 1L; + + { + put("PTP-Over", PTP_Overwrite); + put("srmPrepareToPut-overwrite", PTP_Overwrite); + put("PTP", PTP); + put("srmPrepareToPut", PTP); + put("PTG", PTG); + put("srmPrepareToGet", PTG); + put("CPto_Over", CPto_Overwrite); + put("srmCopy to-overwrite", CPto_Overwrite); + put("CPto", CPto); + put("srmCopy to", CPto); + put("CPFrom", CPfrom); + put("srmCopy from", CPfrom); + put("RM", RM); + put("srmRm", RM); + put("RMD", RMD); + put("srmRemoveDir", RM); + put("MD", MD); + put("srmMakeDir", MD); + put("LS", LS); + put("srmLs", LS); + put("MV-source", MV_source); + put("srmMove-source", MV_source); + put("MV-dest-Over", MV_dest_Overwrite); + put("srmMove-dest-overwrite", MV_dest_Overwrite); + put("MV-dest", MV_dest); + put("srmMove-dest", MV_dest); + } + }; /* * Used only for testing @@ -197,9 +230,7 @@ public static SRMFileRequest buildFromString(String srmOp) { } } - /** - * SRMOperation - */ + /** SRMOperation */ private SRMFileRequest(String description, String srmOp, List pathOps) { this.description = description; @@ -230,5 +261,4 @@ public boolean isReadOnly() { } return true; } - } diff --git a/src/main/java/it/grid/storm/authz/remote/Constants.java b/src/main/java/it/grid/storm/authz/remote/Constants.java index fc4f48bf..2fb1f3f4 100644 --- a/src/main/java/it/grid/storm/authz/remote/Constants.java +++ b/src/main/java/it/grid/storm/authz/remote/Constants.java @@ -1,13 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.remote; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class Constants { public static final String ENCODING_SCHEME = "UTF-8"; @@ -64,7 +60,7 @@ public class Constants { * /USER?DN_KEY=dn&FQANS_KEY=fquanFQANS_SEPARATORfquan /RESOURCE/VERSION/path/READ_OPERATION * /PLAIN/USER?DN_KEY=dn&FQANS_KEY=fquanFQANS_SEPARATORfquan /RESOURCE/VERSION/ * path/WRITE_OPERATION/PLAIN/USER?DN_KEY=dn&FQANS_KEY=fquanFQANS_SEPARATORfquan - * + * * /RESOURCE/VERSION/path/READ_OPERATION */ } diff --git a/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResource.java b/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResource.java index b5b44562..a1da8c8b 100644 --- a/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResource.java +++ b/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResource.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.remote.resource; @@ -15,26 +14,21 @@ * language governing permissions and limitations under the License. */ -/** -* -*/ - -import java.io.File; +/** */ import it.grid.storm.authz.path.model.PathOperation; import it.grid.storm.authz.path.model.SRMFileRequest; +import it.grid.storm.authz.remote.Constants; +import java.io.File; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.ws.rs.QueryParam; -import it.grid.storm.authz.remote.Constants; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION + "/{filePath}") public class AuthorizationResource { @@ -50,17 +44,22 @@ public class AuthorizationResource { @GET @Path("/" + Constants.READ_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserReadPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserReadPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving read operation authorization on file '{}' User provides " + "a VOMS proxy", + log.info( + "Serving read operation authorization on file '{}' User provides " + "a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), PathOperation.READ_FILE) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + PathOperation.READ_FILE) + .toString(); } /** @@ -73,17 +72,22 @@ public String evaluateVomsGridUserReadPermission(@PathParam("filePath") String f @GET @Path("/" + Constants.WRITE_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserWritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserWritePermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving write operation authorization on file '{}' User provides " + "a VOMS proxy", + log.info( + "Serving write operation authorization on file '{}' User provides " + "a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), PathOperation.WRITE_FILE) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + PathOperation.WRITE_FILE) + .toString(); } /** @@ -94,20 +98,31 @@ public String evaluateVomsGridUserWritePermission(@PathParam("filePath") String * @throws WebApplicationException */ @GET - @Path("/" + Constants.PREPARE_TO_PUT_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + + Constants.PREPARE_TO_PUT_OPERATION + + "/" + + Constants.VOMS_EXTENSIONS + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserPTPPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserPTPPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving prepareToPut operation authorization on file '{}'. " - + "User provides a VOMS proxy", filePath); + log.info( + "Serving prepareToPut operation authorization on file '{}'. " + + "User provides a VOMS proxy", + filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.PTP) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTP) + .toString(); } /** @@ -118,21 +133,33 @@ public String evaluateVomsGridUserPTPPermission(@PathParam("filePath") String fi * @throws WebApplicationException */ @GET - @Path("/" + Constants.PREPARE_TO_PUT_OVERWRITE_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + + Constants.PREPARE_TO_PUT_OVERWRITE_OPERATION + + "/" + + Constants.VOMS_EXTENSIONS + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserPTPOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserPTPOverwritePermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving prepareToPut Overwrite operation authorization on " - + "file '{}'. User provides a VOMS proxy", filePath); + log.info( + "Serving prepareToPut Overwrite operation authorization on " + + "file '{}'. User provides a VOMS proxy", + filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.PTP_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTP_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -143,20 +170,31 @@ public String evaluateVomsGridUserPTPOverwritePermission(@PathParam("filePath") * @throws WebApplicationException */ @GET - @Path("/" + Constants.PREPARE_TO_GET_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + + Constants.PREPARE_TO_GET_OPERATION + + "/" + + Constants.VOMS_EXTENSIONS + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserPTGPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserPTGPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving prepareToGet operation authorization on file '{}'. " - + "User provides a VOMS proxy", filePath); + log.info( + "Serving prepareToGet operation authorization on file '{}'. " + + "User provides a VOMS proxy", + filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.PTG) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTG) + .toString(); } /** @@ -169,26 +207,32 @@ public String evaluateVomsGridUserPTGPermission(@PathParam("filePath") String fi @GET @Path("/" + Constants.RM_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserRmPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserRmPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving rm operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving rm operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); File file = new File(parameters.getFilePathDecoded()); if (file.isDirectory()) { - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.RMD) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.RMD) + .toString(); } else { - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.RM) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.RM) + .toString(); } - } /** @@ -201,17 +245,22 @@ public String evaluateVomsGridUserRmPermission(@PathParam("filePath") String fil @GET @Path("/" + Constants.LS_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserLsPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserLsPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving ls operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving ls operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.LS) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.LS) + .toString(); } /** @@ -224,17 +273,22 @@ public String evaluateVomsGridUserLsPermission(@PathParam("filePath") String fil @GET @Path("/" + Constants.MKDIR_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserMkdirPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserMkdirPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving mkdir operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving mkdir operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MD) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MD) + .toString(); } /** @@ -247,17 +301,22 @@ public String evaluateVomsGridUserMkdirPermission(@PathParam("filePath") String @GET @Path("/" + Constants.CP_FROM_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserCpFromPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserCpFromPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving cpFrom operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving cpFrom operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.CPfrom) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPfrom) + .toString(); } /** @@ -268,21 +327,33 @@ public String evaluateVomsGridUserCpFromPermission(@PathParam("filePath") String * @throws WebApplicationException */ @GET - @Path("/" + Constants.CP_TO_OVERWRITE_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + + Constants.CP_TO_OVERWRITE_OPERATION + + "/" + + Constants.VOMS_EXTENSIONS + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserCpToOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserCpToOverwritePermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving cpTo Overwrite operation authorization on file '{}'. " - + "User provides a VOMS proxy", filePath); + log.info( + "Serving cpTo Overwrite operation authorization on file '{}'. " + + "User provides a VOMS proxy", + filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.CPto_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPto_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -295,17 +366,22 @@ public String evaluateVomsGridUserCpToOverwritePermission(@PathParam("filePath") @GET @Path("/" + Constants.CP_TO_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserCpToPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserCpToPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving cpTo operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving cpTo operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.CPto) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPto) + .toString(); } /** @@ -316,38 +392,54 @@ public String evaluateVomsGridUserCpToPermission(@PathParam("filePath") String f * @throws WebApplicationException */ @GET - @Path("/" + Constants.MOVE_FROM_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + Constants.MOVE_FROM_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserMvFromPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserMvFromPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving mvFrom operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving mvFrom operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MV_source) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_source) + .toString(); } @GET - @Path("/" + Constants.MOVE_TO_OVERWRITE_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" - + Constants.USER) + @Path( + "/" + + Constants.MOVE_TO_OVERWRITE_OPERATION + + "/" + + Constants.VOMS_EXTENSIONS + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserMvToOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserMvToOverwritePermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving mvTo operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving mvTo operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.MV_dest_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_dest_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -360,17 +452,22 @@ public String evaluateVomsGridUserMvToOverwritePermission(@PathParam("filePath") @GET @Path("/" + Constants.MOVE_TO_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserMvToPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserMvToPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { - log.info("Serving mvTo operation authorization on file '{}'. " + "User provides a VOMS proxy", + log.info( + "Serving mvTo operation authorization on file '{}'. " + "User provides a VOMS proxy", filePath); RequestParameters parameters = new RequestParameters(filePath, DN, FQANS); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MV_dest) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_dest) + .toString(); } /** @@ -382,15 +479,18 @@ public String evaluateVomsGridUserMvToPermission(@PathParam("filePath") String f @GET @Path("/" + Constants.READ_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserReadPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserReadPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving read operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), PathOperation.READ_FILE) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + PathOperation.READ_FILE) + .toString(); } /** @@ -402,32 +502,44 @@ public String evaluateGridUserReadPermission(@PathParam("filePath") String fileP @GET @Path("/" + Constants.WRITE_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserWritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserWritePermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving write operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), PathOperation.WRITE_FILE) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + PathOperation.WRITE_FILE) + .toString(); } @GET - @Path("/" + Constants.PREPARE_TO_PUT_OVERWRITE_OPERATION + "/" + Constants.PLAIN + "/" - + Constants.USER) + @Path( + "/" + + Constants.PREPARE_TO_PUT_OVERWRITE_OPERATION + + "/" + + Constants.PLAIN + + "/" + + Constants.USER) @Produces("text/plain") - public String evaluateGridUserPTPOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserPTPOverwritePermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving prepareToPut Overwrite operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.PTP_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTP_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -439,17 +551,19 @@ public String evaluateGridUserPTPOverwritePermission(@PathParam("filePath") Stri @GET @Path("/" + Constants.PREPARE_TO_PUT_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserPTPPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserPTPPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving prepareToPut operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.PTP) - .toString(); - + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTP) + .toString(); } /** @@ -462,15 +576,18 @@ public String evaluateGridUserPTPPermission(@PathParam("filePath") String filePa @GET @Path("/" + Constants.PREPARE_TO_GET_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserPTGPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserPTGPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving prepareToGet operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.PTG) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.PTG) + .toString(); } /** @@ -483,24 +600,28 @@ public String evaluateGridUserPTGPermission(@PathParam("filePath") String filePa @GET @Path("/" + Constants.RM_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserRmPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserRmPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving rm operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); File file = new File(parameters.getFilePathDecoded()); if (file.isDirectory()) { - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.RMD) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.RMD) + .toString(); } else { - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.RM) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.RM) + .toString(); } - } /** @@ -513,15 +634,18 @@ public String evaluateGridUserRmPermission(@PathParam("filePath") String filePat @GET @Path("/" + Constants.LS_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserLsPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserLsPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving ls operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.LS) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.LS) + .toString(); } /** @@ -534,15 +658,18 @@ public String evaluateGridUserLsPermission(@PathParam("filePath") String filePat @GET @Path("/" + Constants.MKDIR_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserMkdirPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserMkdirPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving mkdir operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MD) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MD) + .toString(); } /** @@ -555,15 +682,18 @@ public String evaluateGridUserMkdirPermission(@PathParam("filePath") String file @GET @Path("/" + Constants.CP_FROM_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserCpFromPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserCpFromPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving cpFrom operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.CPfrom) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPfrom) + .toString(); } /** @@ -575,16 +705,20 @@ public String evaluateGridUserCpFromPermission(@PathParam("filePath") String fil @GET @Path("/" + Constants.CP_TO_OVERWRITE_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserCpToOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserCpToOverwritePermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving cpTo Overwrite operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.CPto_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPto_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -596,15 +730,18 @@ public String evaluateGridUserCpToOverwritePermission(@PathParam("filePath") Str @GET @Path("/" + Constants.CP_TO_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserCpToPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserCpToPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving cpTo operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.CPto) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.CPto) + .toString(); } /** @@ -617,15 +754,18 @@ public String evaluateGridUserCpToPermission(@PathParam("filePath") String fileP @GET @Path("/" + Constants.MOVE_FROM_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserMvFromPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserMvFromPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving mvFrom operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MV_source) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_source) + .toString(); } /** @@ -637,16 +777,20 @@ public String evaluateGridUserMvFromPermission(@PathParam("filePath") String fil @GET @Path("/" + Constants.MOVE_TO_OVERWRITE_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserMvToOverwritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserMvToOverwritePermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving mvTo operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return Boolean - .valueOf(PermissionEvaluator.evaluateVomsGridUserPermission(parameters.getDNDecoded(), - parameters.getFQANSDecoded(), parameters.getFilePathDecoded(), - SRMFileRequest.MV_dest_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_dest_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -658,15 +802,18 @@ public String evaluateGridUserMvToOverwritePermission(@PathParam("filePath") Str @GET @Path("/" + Constants.MOVE_TO_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserMvToPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserMvToPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { log.info("Serving mvTo operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath, DN); - return PermissionEvaluator - .evaluateVomsGridUserPermission(parameters.getDNDecoded(), parameters.getFQANSDecoded(), - parameters.getFilePathDecoded(), SRMFileRequest.MV_dest) - .toString(); + return PermissionEvaluator.evaluateVomsGridUserPermission( + parameters.getDNDecoded(), + parameters.getFQANSDecoded(), + parameters.getFilePathDecoded(), + SRMFileRequest.MV_dest) + .toString(); } /** @@ -683,9 +830,9 @@ public String evaluateAnonymousReadPermission(@PathParam("filePath") String file log.info("Serving Anonymous read operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), PathOperation.READ_FILE) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), PathOperation.READ_FILE) + .toString(); } /** @@ -703,9 +850,9 @@ public String evaluateAnonymousWritePermission(@PathParam("filePath") String fil log.info("Serving Anonymous write operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), PathOperation.WRITE_FILE) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), PathOperation.WRITE_FILE) + .toString(); } @GET @@ -714,13 +861,15 @@ public String evaluateAnonymousWritePermission(@PathParam("filePath") String fil public String evaluateAnonymousPTPOverwritePermission(@PathParam("filePath") String filePath) throws WebApplicationException { - log.info("Serving Anonymous prepareToPut Overwrite operation " + "authorization on file '{}'", + log.info( + "Serving Anonymous prepareToPut Overwrite operation " + "authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return Boolean - .valueOf(PermissionEvaluator.evaluateAnonymousPermission(parameters.getFilePathDecoded(), - SRMFileRequest.PTP_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.PTP_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -738,10 +887,9 @@ public String evaluateAnonymousPTPPermission(@PathParam("filePath") String fileP log.info("Serving Anonymous prepareToPut operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.PTP) - .toString(); - + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.PTP) + .toString(); } /** @@ -759,9 +907,9 @@ public String evaluateAnonymousPTGPermission(@PathParam("filePath") String fileP log.info("Serving Anonymous prepareToGet operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.PTG) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.PTG) + .toString(); } /** @@ -781,15 +929,14 @@ public String evaluateAnonymousRmPermission(@PathParam("filePath") String filePa RequestParameters parameters = new RequestParameters(filePath); File file = new File(parameters.getFilePathDecoded()); if (file.isDirectory()) { - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.RMD) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.RMD) + .toString(); } else { - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.RM) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.RM) + .toString(); } - } /** @@ -807,9 +954,9 @@ public String evaluateAnonymousLsPermission(@PathParam("filePath") String filePa log.info("Serving Anonymous ls operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.LS) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.LS) + .toString(); } /** @@ -827,9 +974,9 @@ public String evaluateAnonymousPermission(@PathParam("filePath") String filePath log.info("Serving Anonymous mkdir operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.MD) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.MD) + .toString(); } /** @@ -847,9 +994,9 @@ public String evaluateAnonymousCpFromPermission(@PathParam("filePath") String fi log.info("Serving Anonymous cpFrom operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.CPfrom) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.CPfrom) + .toString(); } /** @@ -867,9 +1014,9 @@ public String evaluateAnonymousCpToPermission(@PathParam("filePath") String file log.info("Serving Anonymous cpTo operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.CPto) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.CPto) + .toString(); } /** @@ -885,13 +1032,14 @@ public String evaluateAnonymousCpToPermission(@PathParam("filePath") String file public String evaluateAnonymousCpToOverwritePermission(@PathParam("filePath") String filePath) throws WebApplicationException { - log.info("Serving Anonymous cpTo Overwrite operation authorization on" + " file '{}'", - filePath); + log.info( + "Serving Anonymous cpTo Overwrite operation authorization on" + " file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return Boolean - .valueOf(PermissionEvaluator.evaluateAnonymousPermission(parameters.getFilePathDecoded(), - SRMFileRequest.CPto_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.CPto_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -909,9 +1057,9 @@ public String evaluateAnonymousMvFromPermission(@PathParam("filePath") String fi log.info("Serving Anonymous mvFrom operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.MV_source) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.MV_source) + .toString(); } /** @@ -929,10 +1077,11 @@ public String evaluateAnonymousMvToOverwritePermission(@PathParam("filePath") St log.info("Serving Anonymous mvTo operation authorization on " + "file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return Boolean - .valueOf(PermissionEvaluator.evaluateAnonymousPermission(parameters.getFilePathDecoded(), - SRMFileRequest.MV_dest_Overwrite) && PermissionEvaluator.isOverwriteAllowed()) - .toString(); + return Boolean.valueOf( + PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.MV_dest_Overwrite) + && PermissionEvaluator.isOverwriteAllowed()) + .toString(); } /** @@ -950,8 +1099,8 @@ public String evaluateAnonymousMvToPermission(@PathParam("filePath") String file log.info("Serving Anonymous mvTo operation authorization on file '{}'", filePath); RequestParameters parameters = new RequestParameters(filePath); - return PermissionEvaluator - .evaluateAnonymousPermission(parameters.getFilePathDecoded(), SRMFileRequest.MV_dest) - .toString(); + return PermissionEvaluator.evaluateAnonymousPermission( + parameters.getFilePathDecoded(), SRMFileRequest.MV_dest) + .toString(); } } diff --git a/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResourceCompat_1_0.java b/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResourceCompat_1_0.java index f5216575..4daaf3cf 100644 --- a/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResourceCompat_1_0.java +++ b/src/main/java/it/grid/storm/authz/remote/resource/AuthorizationResourceCompat_1_0.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.remote.resource; @@ -15,21 +14,16 @@ * language governing permissions and limitations under the License. */ -/** -* -*/ - +/** */ +import it.grid.storm.authz.remote.Constants; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; -import javax.ws.rs.WebApplicationException; import javax.ws.rs.QueryParam; -import it.grid.storm.authz.remote.Constants; +import javax.ws.rs.WebApplicationException; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION_1_0 + "/{filePath}") public class AuthorizationResourceCompat_1_0 { @@ -45,8 +39,10 @@ public class AuthorizationResourceCompat_1_0 { @GET @Path("/" + Constants.READ_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserReadPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserReadPermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { return resourceInstance.evaluateVomsGridUserReadPermission(filePath, DN, FQANS); @@ -62,8 +58,10 @@ public String evaluateVomsGridUserReadPermission(@PathParam("filePath") String f @GET @Path("/" + Constants.WRITE_OPERATION + "/" + Constants.VOMS_EXTENSIONS + "/" + Constants.USER) @Produces("text/plain") - public String evaluateVomsGridUserWritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN, @QueryParam(Constants.FQANS_KEY) String FQANS) + public String evaluateVomsGridUserWritePermission( + @PathParam("filePath") String filePath, + @QueryParam(Constants.DN_KEY) String DN, + @QueryParam(Constants.FQANS_KEY) String FQANS) throws WebApplicationException { return resourceInstance.evaluateVomsGridUserWritePermission(filePath, DN, FQANS); @@ -78,8 +76,9 @@ public String evaluateVomsGridUserWritePermission(@PathParam("filePath") String @GET @Path("/" + Constants.READ_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserReadPermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserReadPermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { return resourceInstance.evaluateGridUserReadPermission(filePath, DN); } @@ -93,8 +92,9 @@ public String evaluateGridUserReadPermission(@PathParam("filePath") String fileP @GET @Path("/" + Constants.WRITE_OPERATION + "/" + Constants.PLAIN + "/" + Constants.USER) @Produces("text/plain") - public String evaluateGridUserWritePermission(@PathParam("filePath") String filePath, - @QueryParam(Constants.DN_KEY) String DN) throws WebApplicationException { + public String evaluateGridUserWritePermission( + @PathParam("filePath") String filePath, @QueryParam(Constants.DN_KEY) String DN) + throws WebApplicationException { return resourceInstance.evaluateGridUserWritePermission(filePath, DN); } diff --git a/src/main/java/it/grid/storm/authz/remote/resource/PermissionEvaluator.java b/src/main/java/it/grid/storm/authz/remote/resource/PermissionEvaluator.java index 99ef444a..8c528728 100644 --- a/src/main/java/it/grid/storm/authz/remote/resource/PermissionEvaluator.java +++ b/src/main/java/it/grid/storm/authz/remote/resource/PermissionEvaluator.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.remote.resource; @@ -8,32 +7,6 @@ import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static javax.ws.rs.core.Response.Status.NOT_FOUND; -import java.util.Arrays; -import java.util.List; - -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/* - * - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not - * use this file except in compliance with the License. You may obtain a copy of - * the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; import it.grid.storm.authz.path.model.PathOperation; @@ -52,6 +25,12 @@ import it.grid.storm.namespace.model.Protocol; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.srm.types.TOverwriteMode; +import java.util.Arrays; +import java.util.List; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class PermissionEvaluator { @@ -60,12 +39,12 @@ class PermissionEvaluator { public static Boolean isOverwriteAllowed() { return OverwriteModeConverter.getInstance() - .toSTORM(Configuration.getInstance().getDefaultOverwriteMode()) - .equals(TOverwriteMode.ALWAYS); + .toSTORM(Configuration.getInstance().getDefaultOverwriteMode()) + .equals(TOverwriteMode.ALWAYS); } - static Boolean evaluateVomsGridUserPermission(String DNDecoded, String FQANSDecoded, - String filePathDecoded, PathOperation operation) { + static Boolean evaluateVomsGridUserPermission( + String DNDecoded, String FQANSDecoded, String filePathDecoded, PathOperation operation) { String[] FQANSArray = parseFQANS(FQANSDecoded); GridUserInterface gu = buildGridUser(DNDecoded, FQANSArray); @@ -74,27 +53,39 @@ static Boolean evaluateVomsGridUserPermission(String DNDecoded, String FQANSDeco try { fileVFS = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath(filePathDecoded); } catch (NamespaceException e) { - log.error("Unable to determine a VFS that maps the requested file " - + "path '{}'. NamespaceException: {}", filePathDecoded, e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to determine a VFS that maps the requested file " + + "path '{}'. NamespaceException: {}", + filePathDecoded, + e.getMessage()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } if (!fileVFS.getCapabilities().getAllManagedProtocols().contains(Protocol.HTTPS)) { - log.debug("User '{}' is not authorized to access the requeste file '{}' via " + "HTTPS", gu, + log.debug( + "User '{}' is not authorized to access the requeste file '{}' via " + "HTTPS", + gu, filePathDecoded); return Boolean.valueOf(false); } if (!fileVFS.isApproachableByUser(gu)) { log.debug( "User '{}' is not authorized to approach the requested " + "Storage Area '{}' via HTTPS", - gu, fileVFS.getAliasName()); + gu, + fileVFS.getAliasName()); return Boolean.valueOf(false); } StFN fileStFN = buildStFN(filePathDecoded, fileVFS); AuthzDecision decision = AuthzDirector.getPathAuthz().authorize(gu, operation, fileStFN); - log.info("Authorization decision for user '{}{}' requesting {} on {} is " + "[{}]", DNDecoded, - FQANSDecoded == null ? "" : " - " + FQANSDecoded, operation, filePathDecoded, decision); + log.info( + "Authorization decision for user '{}{}' requesting {} on {} is " + "[{}]", + DNDecoded, + FQANSDecoded == null ? "" : " - " + FQANSDecoded, + operation, + filePathDecoded, + decision); return evaluateDecision(decision); } @@ -106,8 +97,9 @@ static Boolean evaluateVomsGridUserPermission(String DNDecoded, String FQANSDeco * @return never null * @throws WebApplicationException */ - static Boolean evaluateVomsGridUserPermission(String DNDecoded, String FQANSDecoded, - String filePathDecoded, SRMFileRequest request) throws WebApplicationException { + static Boolean evaluateVomsGridUserPermission( + String DNDecoded, String FQANSDecoded, String filePathDecoded, SRMFileRequest request) + throws WebApplicationException { String[] FQANSArray = parseFQANS(FQANSDecoded); GridUserInterface gu = buildGridUser(DNDecoded, FQANSArray); @@ -116,21 +108,32 @@ static Boolean evaluateVomsGridUserPermission(String DNDecoded, String FQANSDeco try { fileVFS = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath(filePathDecoded); } catch (NamespaceException e) { - log.error("Unable to determine a VFS that maps the requested file " - + "path '{}'. NamespaceException: {}", filePathDecoded, e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to determine a VFS that maps the requested file " + + "path '{}'. NamespaceException: {}", + filePathDecoded, + e.getMessage()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } if (!fileVFS.isApproachableByUser(gu)) { - log.debug("User '{}' is not authorized to approach the requeste Storage " + "Area '{}'", gu, + log.debug( + "User '{}' is not authorized to approach the requeste Storage " + "Area '{}'", + gu, fileVFS.getAliasName()); return Boolean.valueOf(false); } StFN fileStFN = buildStFN(filePathDecoded, fileVFS); AuthzDecision decision = AuthzDirector.getPathAuthz().authorize(gu, request, fileStFN); - log.info("Authorization decision for user '{}{}' requesting {} on {} is " + "[{}]", DNDecoded, - FQANSDecoded == null ? "" : " - " + FQANSDecoded, request, filePathDecoded, decision); + log.info( + "Authorization decision for user '{}{}' requesting {} on {} is " + "[{}]", + DNDecoded, + FQANSDecoded == null ? "" : " - " + FQANSDecoded, + request, + filePathDecoded, + decision); return evaluateDecision(decision); } @@ -140,19 +143,27 @@ static Boolean evaluateAnonymousPermission(String filePathDecoded, PathOperation try { fileVFS = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath(filePathDecoded); } catch (NamespaceException e) { - log.error("Unable to determine a VFS that maps the requested file " - + "path '{}'. NamespaceException: {}", filePathDecoded, e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to determine a VFS that maps the requested file " + + "path '{}'. NamespaceException: {}", + filePathDecoded, + e.getMessage()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } if (!fileVFS.getCapabilities().getAllManagedProtocols().contains(Protocol.HTTP)) { - log.debug("The requeste Storage Area '{}' is not approachable via " + "HTTPS", + log.debug( + "The requeste Storage Area '{}' is not approachable via " + "HTTPS", fileVFS.getAliasName()); return Boolean.FALSE; } - log.info("Authorization decision for Anonymous user requesting {} on {} " + "is [{}]", request, - filePathDecoded, AuthzDecision.PERMIT); + log.info( + "Authorization decision for Anonymous user requesting {} on {} " + "is [{}]", + request, + filePathDecoded, + AuthzDecision.PERMIT); return Boolean.TRUE; } @@ -162,22 +173,30 @@ static Boolean evaluateAnonymousPermission(String filePathDecoded, SRMFileReques try { fileVFS = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath(filePathDecoded); } catch (NamespaceException e) { - log.error("Unable to determine a VFS that maps the requested file " - + "path '{}'. NamespaceException: {}", filePathDecoded, e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to determine a VFS that maps the requested file " + + "path '{}'. NamespaceException: {}", + filePathDecoded, + e.getMessage()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } if (!fileVFS.isApproachableByAnonymous() && !(request.isReadOnly() && fileVFS.isHttpWorldReadable())) { - log.debug("The requeste Storage Area '{}' is not approachable by " + "anonymous users", + log.debug( + "The requeste Storage Area '{}' is not approachable by " + "anonymous users", fileVFS.getAliasName()); return Boolean.valueOf(false); } StFN fileStFN = buildStFN(filePathDecoded, fileVFS); AuthzDecision decision = AuthzDirector.getPathAuthz().authorizeAnonymous(request, fileStFN); - log.info("Authorization decision for Anonymous user requesting {} " + "on {} is [{}]", request, - filePathDecoded, decision); + log.info( + "Authorization decision for Anonymous user requesting {} " + "on {} is [{}]", + request, + filePathDecoded, + decision); return evaluateDecision(decision); } @@ -190,14 +209,17 @@ private static Boolean evaluateDecision(AuthzDecision decision) { return Boolean.valueOf(false); } else { if (decision.equals(AuthzDecision.INDETERMINATE)) { - log.warn("Authorization decision is INDETERMINATE! Unable to " - + "determine authorization of the user to perform requested " - + "operation on the resource"); + log.warn( + "Authorization decision is INDETERMINATE! Unable to " + + "determine authorization of the user to perform requested " + + "operation on the resource"); return Boolean.valueOf(false); } else { - log.warn("Authorization decision has an unknown value '{}'! " - + "Unable to determine authorization of the user to perform " - + "requested operation on the resource", decision); + log.warn( + "Authorization decision has an unknown value '{}'! " + + "Unable to determine authorization of the user to perform " + + "requested operation on the resource", + decision); return Boolean.valueOf(false); } } @@ -214,11 +236,14 @@ private static StFN buildStFN(String filePathDecoded, VirtualFS fileVFS) VFSRootPath = fileVFS.getRootPath(); if (VFSRootPath == null) { - log.error("Unable to build StFN for path '{}'. VFS: {} has null " + "RootPath", - filePathDecoded, fileVFS.getAliasName()); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to build StFN for path the provided path") - .build()); + log.error( + "Unable to build StFN for path '{}'. VFS: {} has null " + "RootPath", + filePathDecoded, + fileVFS.getAliasName()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to build StFN for path the provided path") + .build()); } if (!VFSRootPath.startsWith("/")) { VFSRootPath = "/" + VFSRootPath; @@ -231,11 +256,14 @@ private static StFN buildStFN(String filePathDecoded, VirtualFS fileVFS) if (VFSMappingRules != null && VFSMappingRules.size() > 0) { VFSStFNRoot = VFSMappingRules.get(0).getStFNRoot(); if (VFSStFNRoot == null) { - log.error("Unable to build StFN for path '{}'. VFS: {} has null " + "StFNRoot", - filePathDecoded, fileVFS.getAliasName()); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to build StFN for path the provided path") - .build()); + log.error( + "Unable to build StFN for path '{}'. VFS: {} has null " + "StFNRoot", + filePathDecoded, + fileVFS.getAliasName()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to build StFN for path the provided path") + .build()); } if (!VFSStFNRoot.startsWith("/")) { VFSStFNRoot = "/" + VFSStFNRoot; @@ -245,42 +273,54 @@ private static StFN buildStFN(String filePathDecoded, VirtualFS fileVFS) } log.debug("Chosen StFNRoot {}", VFSStFNRoot); } else { - log.error("Unable to determine the StFNRoot for file path's VFS. " - + "VFSMappingRules is empty!"); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determinate the StFNRoot for file path's VFS") - .build()); + log.error( + "Unable to determine the StFNRoot for file path's VFS. " + + "VFSMappingRules is empty!"); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determinate the StFNRoot for file path's VFS") + .build()); } } else { - log.error("None of the VFS maps the requested file path '{}'. " + "fileVFS is null!", + log.error( + "None of the VFS maps the requested file path '{}'. " + "fileVFS is null!", filePathDecoded); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } } catch (NamespaceException e) { - log.error("Unable to determine a VFS that maps the requested file " - + "path '{}'. NamespaceException: {}", filePathDecoded, e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to determine a VFS that maps the requested file " + + "path '{}'. NamespaceException: {}", + filePathDecoded, + e.getMessage()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } if (!filePathDecoded.startsWith(VFSRootPath)) { log.error("The provided file path does not starts with the VFSRoot " + "of its VFS"); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("The provided file path does not starts with the VFSRoot of its VFS") - .build()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("The provided file path does not starts with the VFSRoot of its VFS") + .build()); } String fileStFNpath = VFSStFNRoot + filePathDecoded.substring(VFSRootPath.length(), filePathDecoded.length()); try { return StFN.make(fileStFNpath); } catch (InvalidStFNAttributeException e) { - log.error("Unable to build StFN for path '{}'. " + "InvalidStFNAttributeException: {}", - fileStFNpath, e.getMessage()); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to determine file path\'s associated virtual file system") - .build()); + log.error( + "Unable to build StFN for path '{}'. " + "InvalidStFNAttributeException: {}", + fileStFNpath, + e.getMessage()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to determine file path\'s associated virtual file system") + .build()); } } @@ -298,11 +338,18 @@ private static GridUserInterface buildGridUser(String DNDecoded, String[] FQANSA log.error( "Unable to build the GridUserInterface object for DN '{}' " + "and FQANS '{}'. IllegalArgumentException: {}", - DNDecoded, Arrays.toString(FQANSArray), e.getMessage()); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to build a GridUser for DN \'" + DNDecoded + "\' and FQANS \'" - + Arrays.toString(FQANSArray) + "\'. Missing argument(s)") - .build()); + DNDecoded, + Arrays.toString(FQANSArray), + e.getMessage()); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to build a GridUser for DN \'" + + DNDecoded + + "\' and FQANS \'" + + Arrays.toString(FQANSArray) + + "\'. Missing argument(s)") + .build()); } } @@ -320,7 +367,7 @@ private static String[] parseFQANS(String fQANS) { /** * Creates a GridUserInterface from the provided DN and FQANS - * + * * @param dn * @param fqansStringVector * @return the VOMS grid user corresponding to the provided parameters. never null @@ -342,15 +389,18 @@ private static GridUserInterface loadVomsGridUser(String dn, String[] fqansStrin try { gridUser = GridUserManager.makeVOMSGridUser(dn, fqansVector); } catch (IllegalArgumentException e) { - log.error("Unexpected error on voms grid user creation. Contact " - + "StoRM Support : IllegalArgumentException {}", e.getMessage(), e); + log.error( + "Unexpected error on voms grid user creation. Contact " + + "StoRM Support : IllegalArgumentException {}", + e.getMessage(), + e); } return gridUser; } /** * Creates a GridUserInterface from the provided DN - * + * * @param dn * @return the grid user corresponding to the provided parameter. never null * @throws IllegalArgumentException @@ -363,5 +413,4 @@ private static GridUserInterface loadGridUser(String dn) throws IllegalArgumentE } return GridUserManager.makeGridUser(dn); } - } diff --git a/src/main/java/it/grid/storm/authz/remote/resource/RequestParameters.java b/src/main/java/it/grid/storm/authz/remote/resource/RequestParameters.java index 94a60e3b..847190f3 100644 --- a/src/main/java/it/grid/storm/authz/remote/resource/RequestParameters.java +++ b/src/main/java/it/grid/storm/authz/remote/resource/RequestParameters.java @@ -1,24 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.remote.resource; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import it.grid.storm.authz.remote.Constants; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; - import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.authz.remote.Constants; - class RequestParameters { private static final Logger log = LoggerFactory.getLogger(RequestParameters.class); @@ -62,31 +58,42 @@ private static String decodeAndCheckFilePath(String filePath) throws WebApplicat try { filePathDecoded = URLDecoder.decode(filePath, Constants.ENCODING_SCHEME); } catch (UnsupportedEncodingException e) { - log.error("Unable to decode filePath parameter. " + "UnsupportedEncodingException: {}", - e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode filePath paramether, unsupported encoding \'" - + Constants.ENCODING_SCHEME + "\'") - .build()); + log.error( + "Unable to decode filePath parameter. " + "UnsupportedEncodingException: {}", + e.getMessage(), + e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode filePath paramether, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); } log.debug("Decoded filePath = {}", filePathDecoded); if (filePathDecoded == null || filePathDecoded.trim().equals("")) { - log.error("Unable to evaluate permissions. Some parameters are missing: " + "filePath {}", + log.error( + "Unable to evaluate permissions. Some parameters are missing: " + "filePath {}", filePathDecoded); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to evaluate permissions. Some parameters are missing") - .build()); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to evaluate permissions. Some parameters are missing") + .build()); } URI filePathURI; try { filePathURI = new URI(filePathDecoded); } catch (URISyntaxException e) { - log.error("Unable to evaluate permissions on path {}. " + "URISyntaxException: {}", - filePathDecoded, e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to evaluate permissions. Invalid file path") - .build()); + log.error( + "Unable to evaluate permissions on path {}. " + "URISyntaxException: {}", + filePathDecoded, + e.getMessage(), + e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to evaluate permissions. Invalid file path") + .build()); } return filePathURI.normalize().toString(); } @@ -97,20 +104,26 @@ private static String decodeAndCheckDN(String DN) throws WebApplicationException try { DNDecoded = URLDecoder.decode(DN, Constants.ENCODING_SCHEME); } catch (UnsupportedEncodingException e) { - log.error("Unable to decode DN parameter. " + "UnsupportedEncodingException: {}", - e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode DN paramether, unsupported encoding \'" - + Constants.ENCODING_SCHEME + "\'") - .build()); + log.error( + "Unable to decode DN parameter. " + "UnsupportedEncodingException: {}", + e.getMessage(), + e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode DN paramether, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); } log.debug("Decoded DN = {}", DNDecoded); if (DNDecoded == null || DNDecoded.trim().equals("")) { log.error("Unable to evaluate permissions. Some parameters are missing : DN {}", DNDecoded); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to evaluate permissions. Some parameters are missing") - .build()); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to evaluate permissions. Some parameters are missing") + .build()); } return DNDecoded; } @@ -121,52 +134,50 @@ private static String decodeAndCheckFQANS(String FQANS) throws WebApplicationExc try { FQANSDecoded = URLDecoder.decode(FQANS, Constants.ENCODING_SCHEME); } catch (UnsupportedEncodingException e) { - log.error("Unable to decode FQANS parameter. " + "UnsupportedEncodingException: {}", - e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode FQANS paramether, unsupported encoding \'" - + Constants.ENCODING_SCHEME + "\'") - .build()); + log.error( + "Unable to decode FQANS parameter. " + "UnsupportedEncodingException: {}", + e.getMessage(), + e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode FQANS paramether, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); } log.debug("Decoded FQANS = {}", FQANSDecoded); if (FQANSDecoded == null || FQANSDecoded.trim().equals("")) { - log.error("Unable to evaluate permissions. Some parameters are " + "missing : FQANS {}", - FQANS); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to evaluate permissions. Some parameters are missing") - .build()); + log.error( + "Unable to evaluate permissions. Some parameters are " + "missing : FQANS {}", FQANS); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to evaluate permissions. Some parameters are missing") + .build()); } return FQANSDecoded; } - /** - * @return the dNDecoded - */ + /** @return the dNDecoded */ public String getDNDecoded() { return DNDecoded; } - /** - * @return the fQANSDecoded - */ + /** @return the fQANSDecoded */ public String getFQANSDecoded() { return FQANSDecoded; } - /** - * @return the filePathDecoded - */ + /** @return the filePathDecoded */ public String getFilePathDecoded() { return filePathDecoded; } - /** - * @return the hasVomsExtension - */ + /** @return the hasVomsExtension */ public boolean hasVomsExtension() { return hasVomsExtension; diff --git a/src/main/java/it/grid/storm/authz/sa/AuthzDBInterface.java b/src/main/java/it/grid/storm/authz/sa/AuthzDBInterface.java index 4f0a1709..1743e505 100644 --- a/src/main/java/it/grid/storm/authz/sa/AuthzDBInterface.java +++ b/src/main/java/it/grid/storm/authz/sa/AuthzDBInterface.java @@ -1,12 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa; import it.grid.storm.authz.sa.model.SpaceACE; import it.grid.storm.namespace.model.SAAuthzType; - import java.util.List; public interface AuthzDBInterface { @@ -14,5 +12,4 @@ public interface AuthzDBInterface { public SAAuthzType getAuthzDBType(); public List getOrderedListOfACE(); - } diff --git a/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderException.java b/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderException.java index 2778274f..c823e550 100644 --- a/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderException.java +++ b/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderException.java @@ -1,14 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa; public class AuthzDBReaderException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; public AuthzDBReaderException() { diff --git a/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderInterface.java b/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderInterface.java index ba863b5c..a97451ee 100644 --- a/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderInterface.java +++ b/src/main/java/it/grid/storm/authz/sa/AuthzDBReaderInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa; @@ -8,15 +7,13 @@ public interface AuthzDBReaderInterface { - public void addAuthzDB(String dbFileName) throws AuthzDBReaderException; + public void addAuthzDB(String dbFileName) throws AuthzDBReaderException; - public List getAuthzDBNames(); + public List getAuthzDBNames(); - public void onChangeAuthzDB(String authzDBName) throws AuthzDBReaderException; + public void onChangeAuthzDB(String authzDBName) throws AuthzDBReaderException; - public AuthzDBInterface getAuthzDB(String authzDBName) - throws AuthzDBReaderException; - - public long getLastParsed(String dbFileName) throws AuthzDBReaderException; + public AuthzDBInterface getAuthzDB(String authzDBName) throws AuthzDBReaderException; + public long getLastParsed(String dbFileName) throws AuthzDBReaderException; } diff --git a/src/main/java/it/grid/storm/authz/sa/SpaceAuthz.java b/src/main/java/it/grid/storm/authz/sa/SpaceAuthz.java index 6e65f538..4dd45a64 100644 --- a/src/main/java/it/grid/storm/authz/sa/SpaceAuthz.java +++ b/src/main/java/it/grid/storm/authz/sa/SpaceAuthz.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa; @@ -18,7 +17,6 @@ public abstract class SpaceAuthz implements SpaceAuthzInterface { * @todo: 3) IMPLEMENT PRINCIPAL LIST PERSISTENCE * @todo: 4) IMPLEMENT RECALCULATE CACHE */ - public SpaceAuthz() { super(); @@ -35,5 +33,4 @@ public AuthzDBInterface getAuthzDB() { return authzDB; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/SpaceDBAuthz.java b/src/main/java/it/grid/storm/authz/sa/SpaceDBAuthz.java index 4f605e59..c83317cc 100644 --- a/src/main/java/it/grid/storm/authz/sa/SpaceDBAuthz.java +++ b/src/main/java/it/grid/storm/authz/sa/SpaceDBAuthz.java @@ -1,24 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa; -import java.io.File; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.authz.sa.model.SRMSpaceRequest; import it.grid.storm.config.Configuration; import it.grid.storm.griduser.GridUserInterface; +import java.io.File; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class SpaceDBAuthz extends SpaceAuthz { private static final Logger log = LoggerFactory.getLogger(SpaceDBAuthz.class); @@ -29,13 +22,9 @@ public class SpaceDBAuthz extends SpaceAuthz { private static String configurationPATH; private String dbFileName; - public SpaceDBAuthz() { - - } + public SpaceDBAuthz() {} - /** - * @return - */ + /** @return */ public static SpaceDBAuthz makeEmpty() { SpaceDBAuthz result = new SpaceDBAuthz(); @@ -53,17 +42,13 @@ public SpaceDBAuthz(String dbFileName) { } } - /** - * @param string - */ + /** @param string */ void setSpaceAuthzDBID(String id) { spaceAuthzDBID = id; } - /** - * - */ + /** */ @Override public boolean authorize(GridUserInterface guser, SRMSpaceRequest srmSpaceOp) { @@ -76,14 +61,9 @@ public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp) { return false; } + /** ******************************************************************** BUILDINGs METHODS */ - /********************************************************************** - * BUILDINGs METHODS - */ - - /** - * Check the existence of the AuthzDB file - */ + /** Check the existence of the AuthzDB file */ private boolean existsAuthzDBFile(String dbFileName) { String fileName = configurationPATH + File.separator + dbFileName; @@ -96,7 +76,7 @@ private boolean existsAuthzDBFile(String dbFileName) { /** * Return the AuthzDB FileName - * + * * @return */ String getAuthzDBFileName() { @@ -109,12 +89,9 @@ public String getSpaceAuthzID() { return spaceAuthzDBID; } - /** - * - */ + /** */ public void refresh() { // empty } - } diff --git a/src/main/java/it/grid/storm/authz/sa/SpaceFixedAuthz.java b/src/main/java/it/grid/storm/authz/sa/SpaceFixedAuthz.java index b1604433..a8083d0a 100644 --- a/src/main/java/it/grid/storm/authz/sa/SpaceFixedAuthz.java +++ b/src/main/java/it/grid/storm/authz/sa/SpaceFixedAuthz.java @@ -1,49 +1,38 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa; import it.grid.storm.authz.sa.model.AuthzDBFixed; import it.grid.storm.authz.sa.model.SRMSpaceRequest; import it.grid.storm.griduser.GridUserInterface; -/** - * @author zappi - */ +/** @author zappi */ public class SpaceFixedAuthz extends SpaceAuthz { - private static final String FIXED_ID = "fixed-space-authz"; - - public SpaceFixedAuthz(AuthzDBFixed fixedAuthzDB) - throws AuthzDBReaderException { - - } - - @Override - public boolean authorize(GridUserInterface guser, SRMSpaceRequest srmSpaceOp) { + private static final String FIXED_ID = "fixed-space-authz"; - // @todo : implement the simple algorithm. - return true; - } + public SpaceFixedAuthz(AuthzDBFixed fixedAuthzDB) throws AuthzDBReaderException {} - @Override - public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp) { + @Override + public boolean authorize(GridUserInterface guser, SRMSpaceRequest srmSpaceOp) { - // TODO Auto-generated method stub - return true; - } + // @todo : implement the simple algorithm. + return true; + } - public String getSpaceAuthzID() { + @Override + public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp) { - return FIXED_ID; - } + // TODO Auto-generated method stub + return true; + } - public void refresh() { + public String getSpaceAuthzID() { - } + return FIXED_ID; + } + public void refresh() {} } diff --git a/src/main/java/it/grid/storm/authz/sa/model/AceType.java b/src/main/java/it/grid/storm/authz/sa/model/AceType.java index 298208bb..f1bd492a 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/AceType.java +++ b/src/main/java/it/grid/storm/authz/sa/model/AceType.java @@ -1,14 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; public class AceType { - public final static AceType ALLOW = new AceType("ALLOW"); - public final static AceType DENY = new AceType("DENY"); - public final static AceType UNKNOWN = new AceType("UNKNOWN"); + public static final AceType ALLOW = new AceType("ALLOW"); + public static final AceType DENY = new AceType("DENY"); + public static final AceType UNKNOWN = new AceType("UNKNOWN"); private String aceType; @@ -32,5 +31,4 @@ public String toString() { return aceType; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/AuthzDBFixed.java b/src/main/java/it/grid/storm/authz/sa/model/AuthzDBFixed.java index b28f7cbe..c4f4d0f4 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/AuthzDBFixed.java +++ b/src/main/java/it/grid/storm/authz/sa/model/AuthzDBFixed.java @@ -1,26 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBInterface; import it.grid.storm.namespace.model.SAAuthzType; - import java.util.List; -/** - * @author zappi - * - */ +/** @author zappi */ public abstract class AuthzDBFixed implements AuthzDBInterface { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.AuthzDBInterface#getAuthzDBType() */ public SAAuthzType getAuthzDBType() { @@ -30,9 +23,8 @@ public SAAuthzType getAuthzDBType() { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.AuthzDBInterface#getOrderedListOfACE() */ public abstract List getOrderedListOfACE(); - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/DNEveryonePattern.java b/src/main/java/it/grid/storm/authz/sa/model/DNEveryonePattern.java index 10fdc723..7144ee78 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/DNEveryonePattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/DNEveryonePattern.java @@ -1,26 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBReaderException; import it.grid.storm.griduser.DistinguishedName; import it.grid.storm.griduser.SubjectAttribute; -/** - * @author zappi - * - */ +/** @author zappi */ public class DNEveryonePattern extends DNPattern implements Everyone { - /** - * CONSTRUCTOR - */ - + /** CONSTRUCTOR */ public DNEveryonePattern() throws AuthzDBReaderException { super("*"); @@ -31,7 +22,7 @@ public DNEveryonePattern() throws AuthzDBReaderException { /* * Return always true because the pattern is built programmatically, and it is supposed to be * valid. - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#isValidPattern() */ @Override @@ -42,7 +33,7 @@ public boolean isValidPattern() throws AuthzDBReaderException { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#match(it.grid.storm.griduser * .SubjectAttribute) */ @@ -55,5 +46,4 @@ public boolean match(SubjectAttribute subjectAttribute) { } return false; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/DNPattern.java b/src/main/java/it/grid/storm/authz/sa/model/DNPattern.java index c557d6b7..bb0608e6 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/DNPattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/DNPattern.java @@ -1,25 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBReaderException; import it.grid.storm.griduser.DistinguishedName; import it.grid.storm.griduser.SubjectAttribute; - import java.util.ArrayList; import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; -/** - * @author zappi - * - */ +/** @author zappi */ public class DNPattern implements SubjectPattern { private static final String ADMIT_ALL = ".*"; @@ -44,10 +37,7 @@ public class DNPattern implements SubjectPattern { protected boolean checkValidity; - /** - * CONSTRUCTOR - */ - + /** CONSTRUCTOR */ public DNPattern(String dnPatternString) throws AuthzDBReaderException { this(dnPatternString, true); @@ -90,7 +80,6 @@ private void generatePattern() { if (rules[i].startsWith("DC=")) { domainComponentPatternString = rules[i].substring(3, rules[i].length()); } - } } else { countryPatternString = ADMIT_ALL; @@ -100,23 +89,32 @@ private void generatePattern() { commonNamePatternString = ADMIT_ALL; domainComponentPatternString = ADMIT_ALL; } - init(countryPatternString, organizationPatternString, organizationalUnitPatternString, - localityPatternString, commonNamePatternString, domainComponentPatternString); + init( + countryPatternString, + organizationPatternString, + organizationalUnitPatternString, + localityPatternString, + commonNamePatternString, + domainComponentPatternString); } } /** * private method used to initialize everything - * + * * @param countryPatternString String * @param organizationPatternString String * @param organizationalUnitPatternString String * @param localityPatternString String * @param commonNameString String */ - protected void init(String countryPatternString, String organizationPatternString, - String organizationalUnitPatternString, String localityPatternString, - String commonNamePatternString, String domainComponentPatternString) { + protected void init( + String countryPatternString, + String organizationPatternString, + String organizationalUnitPatternString, + String localityPatternString, + String commonNamePatternString, + String domainComponentPatternString) { this.countryPatternString = countryPatternString; this.organizationPatternString = organizationPatternString; @@ -190,12 +188,11 @@ protected void init(String countryPatternString, String organizationPatternStrin domainComponentPatternString = ADMIT_ALL; domainComponentPattern = Pattern.compile(ADMIT_ALL); } - } /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#match(it.grid.storm.griduser .FQAN) */ // @Override @@ -257,7 +254,7 @@ public boolean match(SubjectAttribute subjectAttribute) { CharSequence organizationalUnit = null; String nameStr = null; Matcher organizationalUnitMatcher = null; - for (Iterator name = organizationalUnitNames.iterator(); name.hasNext();) { + for (Iterator name = organizationalUnitNames.iterator(); name.hasNext(); ) { nameStr = name.next(); organizationalUnit = nameStr.subSequence(0, nameStr.length()); organizationalUnitMatcher = organizationalUnitPattern.matcher(organizationalUnit); @@ -279,7 +276,7 @@ public boolean match(SubjectAttribute subjectAttribute) { CharSequence commonName = null; String commonNameStr = null; Matcher commonNameMatcher = null; - for (Iterator scanCN = commonNames.iterator(); scanCN.hasNext();) { + for (Iterator scanCN = commonNames.iterator(); scanCN.hasNext(); ) { commonNameStr = scanCN.next(); commonName = commonNameStr.subSequence(0, commonNameStr.length()); commonNameMatcher = commonNamePattern.matcher(commonName); @@ -301,7 +298,7 @@ public boolean match(SubjectAttribute subjectAttribute) { CharSequence domainComponent = null; String domainComponentStr = null; Matcher domainComponentMatcher = null; - for (Iterator scanDC = domainComponents.iterator(); scanDC.hasNext();) { + for (Iterator scanDC = domainComponents.iterator(); scanDC.hasNext(); ) { domainComponentStr = scanDC.next(); domainComponent = domainComponentStr.subSequence(0, domainComponentStr.length()); domainComponentMatcher = domainComponentPattern.matcher(domainComponent); @@ -319,8 +316,13 @@ public boolean match(SubjectAttribute subjectAttribute) { // Total Result // NOTE : At this point result should be always TRUE! - result = countryMatch && organizationMatch && organizationalUnitMatch && localityMatch - && commonNameMatch && domainComponentMatch; + result = + countryMatch + && organizationMatch + && organizationalUnitMatch + && localityMatch + && commonNameMatch + && domainComponentMatch; return result; } return result; @@ -328,7 +330,7 @@ public boolean match(SubjectAttribute subjectAttribute) { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#validatePattern() */ // @Override diff --git a/src/main/java/it/grid/storm/authz/sa/model/EGEEFQANPattern.java b/src/main/java/it/grid/storm/authz/sa/model/EGEEFQANPattern.java index 101ea4ad..c41171af 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/EGEEFQANPattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/EGEEFQANPattern.java @@ -1,34 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; +import it.grid.storm.authz.sa.AuthzDBReaderException; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.authz.sa.AuthzDBReaderException; - public class EGEEFQANPattern extends FQANPattern { private static final Logger log = LoggerFactory.getLogger(EGEEFQANPattern.class); // To verify the Regular Expression visit the site // "http://www.fileformat.info/tool/regex.htm" - static private Pattern fqanWildcardPattern = + private static Pattern fqanWildcardPattern = Pattern.compile("/[\\w-\\.]+(((/[\\w-\\.]+)*)(/\\u002A)?)(/Role=(([\\w-\\.]+)|(\\u002A)))?"); private String fqanRE = null; private String voName = null; private final boolean checkValidity; - /** - * CONSTRUCTOR - */ - + /** CONSTRUCTOR */ public EGEEFQANPattern(String fqanRE) throws AuthzDBReaderException { this(fqanRE, true); @@ -45,18 +39,16 @@ public EGEEFQANPattern(String fqanRE, boolean checkValidity) throws AuthzDBReade /** * PRIVATE SETTER /VO[/group[/subgroup(s)]][/Role=role] - * - * - voName = VO - groupPattern = '/group[/subgroup(s)]' - rolePattern = 'role' - * - **/ - + * + *

- voName = VO - groupPattern = '/group[/subgroup(s)]' - rolePattern = 'role' + */ private void generatePattern() throws AuthzDBReaderException { /** * --------- CAPABILITY is not more permitted --------- //Remove capability if present int * capIndex = fqanRE.indexOf("/Capability="); if (capIndex>0) { fqanRE = fqanRE.substring(0, * capIndex); } - **/ + */ // Retrieve Role String if present String role = null; @@ -92,9 +84,7 @@ private void generatePattern() throws AuthzDBReaderException { log.debug("RolePattern = {}", rolePatternString); } - /** - * validateMR - */ + /** validateMR */ @Override public boolean isValidPattern() throws AuthzDBReaderException { @@ -109,5 +99,4 @@ public boolean isValidPattern() throws AuthzDBReaderException { } return true; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/Everyone.java b/src/main/java/it/grid/storm/authz/sa/model/Everyone.java index bae46742..32ebdc8e 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/Everyone.java +++ b/src/main/java/it/grid/storm/authz/sa/model/Everyone.java @@ -1,18 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; -/** - * @author zappi - * - */ +/** @author zappi */ public interface Everyone { public static final String EVERYONE = "EVERYONE"; - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/FQANEveryonePattern.java b/src/main/java/it/grid/storm/authz/sa/model/FQANEveryonePattern.java index 292fced3..8dcac28e 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/FQANEveryonePattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/FQANEveryonePattern.java @@ -1,26 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBReaderException; import it.grid.storm.griduser.FQAN; import it.grid.storm.griduser.SubjectAttribute; -/** - * @author zappi - * - */ +/** @author zappi */ public class FQANEveryonePattern extends FQANPattern implements Everyone { /* * Return always true because the pattern is built programmatically, and it is supposed to be * valid. - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#isValidPattern() */ @Override @@ -31,7 +25,7 @@ public boolean isValidPattern() throws AuthzDBReaderException { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#match(it.grid.storm.griduser .FQAN) */ @Override @@ -49,5 +43,4 @@ public String toString() { return Everyone.EVERYONE; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/FQANPattern.java b/src/main/java/it/grid/storm/authz/sa/model/FQANPattern.java index 8dd034cb..07ed6bac 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/FQANPattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/FQANPattern.java @@ -1,12 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; import it.grid.storm.griduser.FQAN; import it.grid.storm.griduser.SubjectAttribute; - import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -20,7 +18,7 @@ public abstract class FQANPattern implements SubjectPattern { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.sa.model.SubjectPattern#match(it.grid.storm.griduser .FQAN) */ public boolean match(SubjectAttribute sa) { @@ -46,5 +44,4 @@ public String toString() { result.append(" FQAN.RolePatternMatchinRule = " + rolePatternString + sep); return result.toString(); } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/FileAuthzDB.java b/src/main/java/it/grid/storm/authz/sa/model/FileAuthzDB.java index 85bd99e6..139f8abd 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/FileAuthzDB.java +++ b/src/main/java/it/grid/storm/authz/sa/model/FileAuthzDB.java @@ -1,18 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBInterface; import it.grid.storm.authz.sa.AuthzDBReaderException; import it.grid.storm.namespace.model.SAAuthzType; - import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; - import org.apache.commons.configuration.PropertiesConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,8 +55,15 @@ public SAAuthzType getAuthzDBType() { public String getHeader() { - return "" + getMajorVersion() + "." + getMinorVersion() + " - " + versionDescription + " [" - + authzDBType + "]"; + return "" + + getMajorVersion() + + "." + + getMinorVersion() + + " - " + + versionDescription + + " [" + + authzDBType + + "]"; } public List getOrderedListOfACE() { @@ -95,12 +99,8 @@ private List populateACL() { String value = authzDB.getString(key); log.debug("KEY: {} VALUE: {}", key, value); /** @todo IMPLEMENT PARSING OF VALUE */ - } - /** - * @todo Add the default ACL - */ + /** @todo Add the default ACL */ return spaceACL; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SRMSpaceRequest.java b/src/main/java/it/grid/storm/authz/sa/model/SRMSpaceRequest.java index dd553c8f..c101126c 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SRMSpaceRequest.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SRMSpaceRequest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; @@ -9,47 +8,69 @@ public class SRMSpaceRequest { /** * RELEASE_SPACE (D) UPDATE_SPACE (U) READ_FROM_SPACE (R) WRITE_TO_SPACE (W) STAGE_TO_SPACE (S) * REPLICATE_FROM_SPACE(C) PURGE_FROM_SPACE (P) QUERY_SPACE (Q) MODIFY_SPACE_ACL (M) - **/ + */ // Operations to SPACE+SURL - public final static SRMSpaceRequest PTP = new SRMSpaceRequest("srmPrepareToPut", "PTP", - new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); - public final static SRMSpaceRequest PTG = new SRMSpaceRequest("srmPrepareToGet", "PTG", - new SpaceOperation[] {SpaceOperation.READ_FROM_SPACE, SpaceOperation.REPLICATE_FROM_SPACE}); - public final static SRMSpaceRequest BOL = new SRMSpaceRequest("srmBringOnLine", "BOL", - new SpaceOperation[] {SpaceOperation.STAGE_TO_SPACE, SpaceOperation.REPLICATE_FROM_SPACE}); - public final static SRMSpaceRequest CPto = new SRMSpaceRequest("srmCopy to", "CPto", - new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); - public final static SRMSpaceRequest CPfrom = new SRMSpaceRequest("srmCopy from", "CPfrom", - new SpaceOperation[] {SpaceOperation.READ_FROM_SPACE, SpaceOperation.REPLICATE_FROM_SPACE}); + public static final SRMSpaceRequest PTP = + new SRMSpaceRequest( + "srmPrepareToPut", "PTP", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); + + public static final SRMSpaceRequest PTG = + new SRMSpaceRequest( + "srmPrepareToGet", + "PTG", + new SpaceOperation[] { + SpaceOperation.READ_FROM_SPACE, SpaceOperation.REPLICATE_FROM_SPACE + }); + public static final SRMSpaceRequest BOL = + new SRMSpaceRequest( + "srmBringOnLine", + "BOL", + new SpaceOperation[] { + SpaceOperation.STAGE_TO_SPACE, SpaceOperation.REPLICATE_FROM_SPACE + }); + public static final SRMSpaceRequest CPto = + new SRMSpaceRequest( + "srmCopy to", "CPto", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); + public static final SRMSpaceRequest CPfrom = + new SRMSpaceRequest( + "srmCopy from", + "CPfrom", + new SpaceOperation[] { + SpaceOperation.READ_FROM_SPACE, SpaceOperation.REPLICATE_FROM_SPACE + }); // Space Operations - public final static SRMSpaceRequest PFS = new SRMSpaceRequest("srmPurgeFromSpace", "PFS", - new SpaceOperation[] {SpaceOperation.PURGE_FROM_SPACE}); - public final static SRMSpaceRequest RS = new SRMSpaceRequest("srmReleaseSpace", "RS", - new SpaceOperation[] {SpaceOperation.RELEASE_SPACE}); - public final static SRMSpaceRequest QS = new SRMSpaceRequest("srmGetSpaceMetadata", "QS", - new SpaceOperation[] {SpaceOperation.QUERY_SPACE, SpaceOperation.UPDATE_SPACE}); + public static final SRMSpaceRequest PFS = + new SRMSpaceRequest( + "srmPurgeFromSpace", "PFS", new SpaceOperation[] {SpaceOperation.PURGE_FROM_SPACE}); + public static final SRMSpaceRequest RS = + new SRMSpaceRequest( + "srmReleaseSpace", "RS", new SpaceOperation[] {SpaceOperation.RELEASE_SPACE}); + public static final SRMSpaceRequest QS = + new SRMSpaceRequest( + "srmGetSpaceMetadata", + "QS", + new SpaceOperation[] {SpaceOperation.QUERY_SPACE, SpaceOperation.UPDATE_SPACE}); // OVERLOAD with OP - public final static SRMSpaceRequest RM = + public static final SRMSpaceRequest RM = new SRMSpaceRequest("srmRemove", "RM", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); - public final static SRMSpaceRequest RMD = new SRMSpaceRequest("srmRemoveDir", "RMD", - new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); - public final static SRMSpaceRequest MD = + public static final SRMSpaceRequest RMD = + new SRMSpaceRequest( + "srmRemoveDir", "RMD", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); + public static final SRMSpaceRequest MD = new SRMSpaceRequest("srmMakeDir", "MD", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); - public final static SRMSpaceRequest LS = + public static final SRMSpaceRequest LS = new SRMSpaceRequest("srmLS", "LS", new SpaceOperation[] {SpaceOperation.READ_FROM_SPACE}); - public final static SRMSpaceRequest MV = + public static final SRMSpaceRequest MV = new SRMSpaceRequest("srmMove", "MV", new SpaceOperation[] {SpaceOperation.WRITE_TO_SPACE}); private String description; private String srmOp; private SpaceAccessMask requestedSpaceOps; - /** - * SRMOperation - */ + /** SRMOperation */ private SRMSpaceRequest(String description, String srmOp, SpaceOperation[] spaceOps) { this.description = description; diff --git a/src/main/java/it/grid/storm/authz/sa/model/SpaceACE.java b/src/main/java/it/grid/storm/authz/sa/model/SpaceACE.java index 880633e7..cbbd1f82 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SpaceACE.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SpaceACE.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; @@ -14,21 +13,15 @@ public class SpaceACE { private SpaceAccessMask spaceAccessMask; private AceType aceType; - public SpaceACE() { + public SpaceACE() {} - } - - /** - * @return the aceNumber - */ + /** @return the aceNumber */ public int getAceNumber() { return aceNumber; } - /** - * @param aceNumber the aceNumber to set - */ + /** @param aceNumber the aceNumber to set */ public void setAceNumber(int aceNumber) { this.aceNumber = aceNumber; @@ -64,9 +57,7 @@ public SubjectPattern getSubjectPattern() { return subjectPattern; } - /** - * @return the spacePermission - */ + /** @return the spacePermission */ public SpaceAccessMask getSpaceAccessMask() { return spaceAccessMask; @@ -81,8 +72,15 @@ public AceType getAceType() { public String toString() { String spacePermissionStr = spaceAccessMask.toString(); - return "SpaceACE (" + getAceNumber() + "): " + getSubjectType() + ":" + getSubjectPattern() - + ":" + spacePermissionStr + ":" + aceType; + return "SpaceACE (" + + getAceNumber() + + "): " + + getSubjectType() + + ":" + + getSubjectPattern() + + ":" + + spacePermissionStr + + ":" + + aceType; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SpaceAccessMask.java b/src/main/java/it/grid/storm/authz/sa/model/SpaceAccessMask.java index 3aec7239..650e005b 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SpaceAccessMask.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SpaceAccessMask.java @@ -1,19 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import java.util.ArrayList; import java.util.List; -/** - * @author zappi - * - */ +/** @author zappi */ public class SpaceAccessMask { private List spAccessMask; @@ -50,5 +44,4 @@ public String toString() { } return spacePermissionStr; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SpaceOperation.java b/src/main/java/it/grid/storm/authz/sa/model/SpaceOperation.java index 0839a664..023ea7f2 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SpaceOperation.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SpaceOperation.java @@ -1,23 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; /** * RELEASE_SPACE (D) UPDATE_SPACE (U) READ_FROM_SPACE (R) WRITE_TO_SPACE (W) STAGE_TO_SPACE (S) * REPLICATE_FROM_SPACE(C) PURGE_FROM_SPACE (P) QUERY_SPACE (Q) MODIFY_SPACE_ACL (M) - **/ - + */ public enum SpaceOperation { - RELEASE_SPACE('D', "RELEASE_SPACE", "Release space"), UPDATE_SPACE('U', "UPDATE_SPACE", - "Update space"), READ_FROM_SPACE('R', "READ_FROM_SPACE", "Read from space"), WRITE_TO_SPACE( - 'W', "WRITE_TO_SPACE", "Write to space"), STAGE_TO_SPACE('S', "STAGE_TO_SPACE", - "Stage to space"), REPLICATE_FROM_SPACE('C', "REPLICATE_FROM_SPACE", - "Replicate from space"), PURGE_FROM_SPACE('P', "PURGE_FROM_SPACE", - "Purge from space"), QUERY_SPACE('Q', "QUERY_SPACE", - "Query space"), MODIFY_SPACE_ACL('M', "MODIFY_SPACE_ACL", - "Modify space acl"), UNDEFINED('?', "UNDEFINED", "Undefined"); + RELEASE_SPACE('D', "RELEASE_SPACE", "Release space"), + UPDATE_SPACE('U', "UPDATE_SPACE", "Update space"), + READ_FROM_SPACE('R', "READ_FROM_SPACE", "Read from space"), + WRITE_TO_SPACE('W', "WRITE_TO_SPACE", "Write to space"), + STAGE_TO_SPACE('S', "STAGE_TO_SPACE", "Stage to space"), + REPLICATE_FROM_SPACE('C', "REPLICATE_FROM_SPACE", "Replicate from space"), + PURGE_FROM_SPACE('P', "PURGE_FROM_SPACE", "Purge from space"), + QUERY_SPACE('Q', "QUERY_SPACE", "Query space"), + MODIFY_SPACE_ACL('M', "MODIFY_SPACE_ACL", "Modify space acl"), + UNDEFINED('?', "UNDEFINED", "Undefined"); private final char operation; private final String operationName; @@ -81,5 +81,4 @@ public int getNumberOfSpaceOp() { return SpaceOperation.values().length - 1; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SubjectPattern.java b/src/main/java/it/grid/storm/authz/sa/model/SubjectPattern.java index 70404755..973b302a 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SubjectPattern.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SubjectPattern.java @@ -1,23 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.sa.model; import it.grid.storm.authz.sa.AuthzDBReaderException; import it.grid.storm.griduser.SubjectAttribute; -/** - * @author zappi - * - */ +/** @author zappi */ public interface SubjectPattern { public abstract boolean match(SubjectAttribute subjectAttribute); public abstract boolean isValidPattern() throws AuthzDBReaderException; - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SubjectPatternOld.java b/src/main/java/it/grid/storm/authz/sa/model/SubjectPatternOld.java index 8e123351..e68f54b6 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SubjectPatternOld.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SubjectPatternOld.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; @@ -50,5 +49,4 @@ public EGEEFQANPattern getFQANPattern() { return this.fqanMR; } - } diff --git a/src/main/java/it/grid/storm/authz/sa/model/SubjectType.java b/src/main/java/it/grid/storm/authz/sa/model/SubjectType.java index c06a42ea..da0de590 100644 --- a/src/main/java/it/grid/storm/authz/sa/model/SubjectType.java +++ b/src/main/java/it/grid/storm/authz/sa/model/SubjectType.java @@ -1,14 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.model; public class SubjectType { - public final static SubjectType DN = new SubjectType("DN"); - public final static SubjectType FQAN = new SubjectType("FQAN"); - public final static SubjectType UNKNOWN = new SubjectType("UNKNOWN"); + public static final SubjectType DN = new SubjectType("DN"); + public static final SubjectType FQAN = new SubjectType("FQAN"); + public static final SubjectType UNKNOWN = new SubjectType("UNKNOWN"); private final String subjectType; diff --git a/src/main/java/it/grid/storm/authz/sa/test/MockSpaceAuthz.java b/src/main/java/it/grid/storm/authz/sa/test/MockSpaceAuthz.java index 980f8a53..28a23e42 100644 --- a/src/main/java/it/grid/storm/authz/sa/test/MockSpaceAuthz.java +++ b/src/main/java/it/grid/storm/authz/sa/test/MockSpaceAuthz.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.authz.sa.test; @@ -8,7 +7,6 @@ import it.grid.storm.authz.sa.AuthzDBInterface; import it.grid.storm.authz.sa.model.SRMSpaceRequest; import it.grid.storm.griduser.GridUserInterface; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,13 +15,11 @@ public class MockSpaceAuthz implements SpaceAuthzInterface { private static final String MOCK_ID = "mock-space-authz"; private static final Logger log = LoggerFactory.getLogger(MockSpaceAuthz.class); - public MockSpaceAuthz() { - - } + public MockSpaceAuthz() {} /** * authorize - * + * * @param guser GridUserInterface * @param srmSpaceOp SRMSpaceRequest * @return boolean @@ -44,7 +40,7 @@ public boolean authorizeAnonymous(SRMSpaceRequest srmSpaceOp) { /** * setAuthzDB - * + * * @param authzDB AuthzDBInterface */ public void setAuthzDB(AuthzDBInterface authzDB) { diff --git a/src/main/java/it/grid/storm/authz/util/ConfigurationWatcher.java b/src/main/java/it/grid/storm/authz/util/ConfigurationWatcher.java index f1060b3f..cf7024df 100644 --- a/src/main/java/it/grid/storm/authz/util/ConfigurationWatcher.java +++ b/src/main/java/it/grid/storm/authz/util/ConfigurationWatcher.java @@ -1,18 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.util; import java.io.File; import java.util.TimerTask; -/** - * @author ritz - */ +/** @author ritz */ public abstract class ConfigurationWatcher extends TimerTask { private long timeStamp; @@ -37,5 +32,4 @@ public final void run() { // Take some actions on file changed protected abstract void onChange(); - } diff --git a/src/main/java/it/grid/storm/authz/util/PathAuthzConfigurationWatcher.java b/src/main/java/it/grid/storm/authz/util/PathAuthzConfigurationWatcher.java index 29f3e1cb..b5a1e8ba 100644 --- a/src/main/java/it/grid/storm/authz/util/PathAuthzConfigurationWatcher.java +++ b/src/main/java/it/grid/storm/authz/util/PathAuthzConfigurationWatcher.java @@ -1,27 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.authz.util; import java.io.File; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author ritz - */ +/** @author ritz */ public class PathAuthzConfigurationWatcher extends ConfigurationWatcher { private static final Logger log = LoggerFactory.getLogger(PathAuthzConfigurationWatcher.class); - /** - * @param file - */ + /** @param file */ public PathAuthzConfigurationWatcher(File file) { super(file); @@ -30,7 +22,7 @@ public PathAuthzConfigurationWatcher(File file) { /* * (non-Javadoc) - * + * * @see it.grid.storm.authz.util.ConfigurationWatcher#onChange() */ @Override @@ -40,5 +32,4 @@ protected void onChange() { // Force the reload of the configuration file } - } diff --git a/src/main/java/it/grid/storm/balancer/BalancingStrategy.java b/src/main/java/it/grid/storm/balancer/BalancingStrategy.java index df7157d3..46259909 100644 --- a/src/main/java/it/grid/storm/balancer/BalancingStrategy.java +++ b/src/main/java/it/grid/storm/balancer/BalancingStrategy.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer; diff --git a/src/main/java/it/grid/storm/balancer/BalancingStrategyType.java b/src/main/java/it/grid/storm/balancer/BalancingStrategyType.java index 3506a2e7..15da211e 100644 --- a/src/main/java/it/grid/storm/balancer/BalancingStrategyType.java +++ b/src/main/java/it/grid/storm/balancer/BalancingStrategyType.java @@ -1,20 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer; import java.util.EnumSet; public enum BalancingStrategyType { - RANDOM("random", Weighted.NOWEIGHT), ROUNDROBIN("round-robin", Weighted.NOWEIGHT), WEIGHT("weight", Weighted.WEIGHTED), SMART_RR("smart-rr", Weighted.NOWEIGHT); private enum Weighted { - WEIGHTED, NOWEIGHT + WEIGHTED, + NOWEIGHT }; private String name; diff --git a/src/main/java/it/grid/storm/balancer/Node.java b/src/main/java/it/grid/storm/balancer/Node.java index 5df709d4..05ddfc97 100644 --- a/src/main/java/it/grid/storm/balancer/Node.java +++ b/src/main/java/it/grid/storm/balancer/Node.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer; @@ -15,5 +14,4 @@ public interface Node { int getPort(); boolean checkServer(); - } diff --git a/src/main/java/it/grid/storm/balancer/cache/Responsiveness.java b/src/main/java/it/grid/storm/balancer/cache/Responsiveness.java index 7768e901..8e899f29 100644 --- a/src/main/java/it/grid/storm/balancer/cache/Responsiveness.java +++ b/src/main/java/it/grid/storm/balancer/cache/Responsiveness.java @@ -1,9 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.cache; public enum Responsiveness { - RESPONSIVE, UNRESPONSIVE, UNKNOWN; + RESPONSIVE, + UNRESPONSIVE, + UNKNOWN; } diff --git a/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCache.java b/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCache.java index efce0fc0..1030bbf3 100644 --- a/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCache.java +++ b/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCache.java @@ -1,22 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.cache; +import com.google.common.collect.Maps; +import it.grid.storm.balancer.Node; +import it.grid.storm.config.Configuration; import java.util.Map; import java.util.Optional; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Maps; - -import it.grid.storm.balancer.Node; -import it.grid.storm.config.Configuration; - public enum ResponsivenessCache { - INSTANCE(Configuration.getInstance().getServerPoolStatusCheckTimeout()); private static final Logger log = LoggerFactory.getLogger(ResponsivenessCache.class); @@ -59,6 +54,4 @@ public boolean isCached(Node n) { private Optional getEntry(Node node) { return Optional.ofNullable(cache.get(node)); } - - } diff --git a/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCacheEntry.java b/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCacheEntry.java index 527f48a5..15621020 100644 --- a/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCacheEntry.java +++ b/src/main/java/it/grid/storm/balancer/cache/ResponsivenessCacheEntry.java @@ -1,14 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.cache; +import it.grid.storm.balancer.Node; import java.text.SimpleDateFormat; import java.util.Date; -import it.grid.storm.balancer.Node; - public class ResponsivenessCacheEntry { private final Node cachedNode; diff --git a/src/main/java/it/grid/storm/balancer/exception/BalancingStrategyException.java b/src/main/java/it/grid/storm/balancer/exception/BalancingStrategyException.java index e2f4f629..dd4a674c 100644 --- a/src/main/java/it/grid/storm/balancer/exception/BalancingStrategyException.java +++ b/src/main/java/it/grid/storm/balancer/exception/BalancingStrategyException.java @@ -1,18 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.exception; public class BalancingStrategyException extends Exception { - /** - * - */ - private static final long serialVersionUID = 3934865023265768076L; + /** */ + private static final long serialVersionUID = 3934865023265768076L; - public BalancingStrategyException(String string) { + public BalancingStrategyException(String string) { - super(string); - } + super(string); + } } diff --git a/src/main/java/it/grid/storm/balancer/node/AbstractNode.java b/src/main/java/it/grid/storm/balancer/node/AbstractNode.java index 7e59372b..dc081af2 100644 --- a/src/main/java/it/grid/storm/balancer/node/AbstractNode.java +++ b/src/main/java/it/grid/storm/balancer/node/AbstractNode.java @@ -1,20 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.node; +import it.grid.storm.balancer.Node; +import it.grid.storm.namespace.model.Protocol; import java.io.IOException; import java.util.Objects; import java.util.Optional; - import org.apache.commons.net.telnet.TelnetClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.balancer.Node; -import it.grid.storm.namespace.model.Protocol; - public abstract class AbstractNode implements Node { private static final Logger log = LoggerFactory.getLogger(AbstractNode.class); @@ -108,16 +105,14 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; AbstractNode other = (AbstractNode) obj; - return Objects.equals(hostname, other.hostname) && id == other.id && port == other.port - && Objects.equals(protocol, other.protocol) && Objects.equals(weight, other.weight); + return Objects.equals(hostname, other.hostname) + && id == other.id + && port == other.port + && Objects.equals(protocol, other.protocol) + && Objects.equals(weight, other.weight); } - - } diff --git a/src/main/java/it/grid/storm/balancer/node/FTPNode.java b/src/main/java/it/grid/storm/balancer/node/FTPNode.java index fb9899a5..76095062 100644 --- a/src/main/java/it/grid/storm/balancer/node/FTPNode.java +++ b/src/main/java/it/grid/storm/balancer/node/FTPNode.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.node; diff --git a/src/main/java/it/grid/storm/balancer/node/HttpNode.java b/src/main/java/it/grid/storm/balancer/node/HttpNode.java index 767d0884..b8cc7e5a 100644 --- a/src/main/java/it/grid/storm/balancer/node/HttpNode.java +++ b/src/main/java/it/grid/storm/balancer/node/HttpNode.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.node; diff --git a/src/main/java/it/grid/storm/balancer/node/HttpsNode.java b/src/main/java/it/grid/storm/balancer/node/HttpsNode.java index fd7c1022..001ee289 100644 --- a/src/main/java/it/grid/storm/balancer/node/HttpsNode.java +++ b/src/main/java/it/grid/storm/balancer/node/HttpsNode.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.node; diff --git a/src/main/java/it/grid/storm/balancer/strategy/AbstractBalancingStrategy.java b/src/main/java/it/grid/storm/balancer/strategy/AbstractBalancingStrategy.java index b0b4792d..39eacec7 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/AbstractBalancingStrategy.java +++ b/src/main/java/it/grid/storm/balancer/strategy/AbstractBalancingStrategy.java @@ -1,18 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - import com.google.common.base.Preconditions; import com.google.common.collect.Lists; - import it.grid.storm.balancer.BalancingStrategy; import it.grid.storm.balancer.BalancingStrategyType; import it.grid.storm.balancer.Node; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; public abstract class AbstractBalancingStrategy implements BalancingStrategy { @@ -22,8 +19,8 @@ public abstract class AbstractBalancingStrategy implements BalancingStrategy { public AbstractBalancingStrategy(List pool) { Preconditions.checkNotNull(pool, "Unable to build BalancingStrategy: received null node pool"); - Preconditions.checkArgument(pool.size() > 0, - "Unable to build BalancingStrategy: received empty node pool"); + Preconditions.checkArgument( + pool.size() > 0, "Unable to build BalancingStrategy: received empty node pool"); this.nodePool = Lists.newCopyOnWriteArrayList(pool); } diff --git a/src/main/java/it/grid/storm/balancer/strategy/BalancingStrategyFactory.java b/src/main/java/it/grid/storm/balancer/strategy/BalancingStrategyFactory.java index c1e65cf5..ae72386e 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/BalancingStrategyFactory.java +++ b/src/main/java/it/grid/storm/balancer/strategy/BalancingStrategyFactory.java @@ -1,19 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; -import java.util.List; - import it.grid.storm.balancer.BalancingStrategy; import it.grid.storm.balancer.BalancingStrategyType; import it.grid.storm.balancer.Node; +import java.util.List; public class BalancingStrategyFactory { - public static BalancingStrategy getBalancingStrategy( - BalancingStrategyType type, List pool) throws IllegalArgumentException { + public static BalancingStrategy getBalancingStrategy(BalancingStrategyType type, List pool) + throws IllegalArgumentException { switch (type) { case RANDOM: @@ -27,5 +25,4 @@ public static BalancingStrategy getBalancingStrategy( } throw new IllegalArgumentException("StrategyFactory: Unknown BalancingStrategyType: " + type); } - } diff --git a/src/main/java/it/grid/storm/balancer/strategy/CyclicCounter.java b/src/main/java/it/grid/storm/balancer/strategy/CyclicCounter.java index 17332212..8447de11 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/CyclicCounter.java +++ b/src/main/java/it/grid/storm/balancer/strategy/CyclicCounter.java @@ -1,12 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; -import java.util.concurrent.atomic.AtomicInteger; - import com.google.common.base.Preconditions; +import java.util.concurrent.atomic.AtomicInteger; public class CyclicCounter { @@ -15,21 +13,20 @@ public class CyclicCounter { public CyclicCounter(int maxVal) { - Preconditions.checkArgument(maxVal >=0, "Maximum counter value should be >= 0"); + Preconditions.checkArgument(maxVal >= 0, "Maximum counter value should be >= 0"); this.maxVal = maxVal; counter = new AtomicInteger(0); } public int next() { - return counter.getAndUpdate(value -> - { - value++; - if (value >= maxVal) { - value = 0; - } - return value; - } ); + return counter.getAndUpdate( + value -> { + value++; + if (value >= maxVal) { + value = 0; + } + return value; + }); } - } diff --git a/src/main/java/it/grid/storm/balancer/strategy/RandomStrategy.java b/src/main/java/it/grid/storm/balancer/strategy/RandomStrategy.java index d44e464c..d58459f4 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/RandomStrategy.java +++ b/src/main/java/it/grid/storm/balancer/strategy/RandomStrategy.java @@ -1,20 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; import static it.grid.storm.balancer.BalancingStrategyType.RANDOM; +import it.grid.storm.balancer.Node; import java.util.Date; import java.util.List; import java.util.Random; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.balancer.Node; - public class RandomStrategy extends AbstractBalancingStrategy { private static final Logger log = LoggerFactory.getLogger(RandomStrategy.class); diff --git a/src/main/java/it/grid/storm/balancer/strategy/RoundRobinStrategy.java b/src/main/java/it/grid/storm/balancer/strategy/RoundRobinStrategy.java index 00374657..5ee29a4d 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/RoundRobinStrategy.java +++ b/src/main/java/it/grid/storm/balancer/strategy/RoundRobinStrategy.java @@ -1,19 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; import static it.grid.storm.balancer.BalancingStrategyType.ROUNDROBIN; +import it.grid.storm.balancer.Node; +import it.grid.storm.balancer.exception.BalancingStrategyException; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.balancer.Node; -import it.grid.storm.balancer.exception.BalancingStrategyException; - public class RoundRobinStrategy extends AbstractBalancingStrategy { private static final Logger log = LoggerFactory.getLogger(RoundRobinStrategy.class); diff --git a/src/main/java/it/grid/storm/balancer/strategy/SmartRoundRobinStrategy.java b/src/main/java/it/grid/storm/balancer/strategy/SmartRoundRobinStrategy.java index 4822f424..bfaa5b8b 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/SmartRoundRobinStrategy.java +++ b/src/main/java/it/grid/storm/balancer/strategy/SmartRoundRobinStrategy.java @@ -1,21 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; import static it.grid.storm.balancer.BalancingStrategyType.SMART_RR; import static it.grid.storm.balancer.cache.Responsiveness.RESPONSIVE; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.balancer.Node; import it.grid.storm.balancer.cache.Responsiveness; import it.grid.storm.balancer.cache.ResponsivenessCache; import it.grid.storm.balancer.exception.BalancingStrategyException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SmartRoundRobinStrategy extends RoundRobinStrategy { diff --git a/src/main/java/it/grid/storm/balancer/strategy/WeightStrategy.java b/src/main/java/it/grid/storm/balancer/strategy/WeightStrategy.java index 8d70f3ff..1ce1d606 100644 --- a/src/main/java/it/grid/storm/balancer/strategy/WeightStrategy.java +++ b/src/main/java/it/grid/storm/balancer/strategy/WeightStrategy.java @@ -1,18 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; +import com.google.common.collect.Lists; +import it.grid.storm.balancer.Node; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - -import it.grid.storm.balancer.Node; - public class WeightStrategy extends RoundRobinStrategy { private static final Logger log = LoggerFactory.getLogger(WeightStrategy.class); diff --git a/src/main/java/it/grid/storm/catalogs/AnonymousFileTransferData.java b/src/main/java/it/grid/storm/catalogs/AnonymousFileTransferData.java index c0d8c428..e0201e2f 100644 --- a/src/main/java/it/grid/storm/catalogs/AnonymousFileTransferData.java +++ b/src/main/java/it/grid/storm/catalogs/AnonymousFileTransferData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -9,65 +8,42 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TTURL; -/** - * @author Michele Dibenedetto - * - */ - -public abstract class AnonymousFileTransferData extends - SurlMultyOperationRequestData implements FileTransferData { +public abstract class AnonymousFileTransferData extends SurlMultyOperationRequestData + implements FileTransferData { - protected TURLPrefix transferProtocols; - protected TTURL transferURL; + protected TURLPrefix transferProtocols; + protected TTURL transferURL; - public AnonymousFileTransferData(TSURL toSURL, TURLPrefix transferProtocols, - TReturnStatus status, TTURL transferURL) - throws InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException { + public AnonymousFileTransferData( + TSURL toSURL, TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL) + throws InvalidFileTransferDataAttributesException, InvalidSurlRequestDataAttributesException { - super(toSURL, status); - if (transferProtocols == null || transferURL == null) { - throw new InvalidFileTransferDataAttributesException(toSURL, - transferProtocols, status, transferURL); - } - this.transferProtocols = transferProtocols; - this.transferURL = transferURL; - } + super(toSURL, status); + if (transferProtocols == null || transferURL == null) { + throw new InvalidFileTransferDataAttributesException( + toSURL, transferProtocols, status, transferURL); + } + this.transferProtocols = transferProtocols; + this.transferURL = transferURL; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.FileTransferData#getTransferProtocols() - */ - @Override - public final TURLPrefix getTransferProtocols() { + @Override + public final TURLPrefix getTransferProtocols() { - return transferProtocols; - } + return transferProtocols; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.FileTransferData#getTransferURL() - */ - @Override - public final TTURL getTransferURL() { + @Override + public final TTURL getTransferURL() { - return transferURL; - } + return transferURL; + } - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.catalogs.FileTransferData#setTransferURL(it.grid.storm.srm - * .types.TTURL) - */ - @Override - public final void setTransferURL(final TTURL turl) { + @Override + public final void setTransferURL(final TTURL turl) { - if (turl != null) { - transferURL = turl; - } - } + if (turl != null) { + transferURL = turl; + } + } } diff --git a/src/main/java/it/grid/storm/catalogs/AnonymousPtGData.java b/src/main/java/it/grid/storm/catalogs/AnonymousPtGData.java index 9bbc05fe..31f1563b 100644 --- a/src/main/java/it/grid/storm/catalogs/AnonymousPtGData.java +++ b/src/main/java/it/grid/storm/catalogs/AnonymousPtGData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -12,212 +11,212 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a PrepareToGetChunkData, that is part of a multifile - * PrepareToGet srm request. It contains data about: the requestToken, the - * fromSURL, the requested lifeTime of pinning, the TDirOption which tells - * whether the requested SURL is a directory and if it must be recursed at all - * levels, as well as the desired number of levels to recurse, the desired - * transferProtocols in order of preference, the fileSize, and the transferURL - * for the supplied SURL. - * + * This class represents a PrepareToGetChunkData, that is part of a multifile PrepareToGet srm + * request. It contains data about: the requestToken, the fromSURL, the requested lifeTime of + * pinning, the TDirOption which tells whether the requested SURL is a directory and if it must be + * recursed at all levels, as well as the desired number of levels to recurse, the desired + * transferProtocols in order of preference, the fileSize, and the transferURL for the supplied + * SURL. + * * @author EGRID - ICTP Trieste * @date March 21st, 2005 * @version 3.0 */ -public class AnonymousPtGData extends AnonymousFileTransferData implements - PtGData { - - private static final Logger log = LoggerFactory - .getLogger(AnonymousPtGData.class); - - /** requested lifetime of TURL: it is the pin time! */ - protected TLifeTimeInSeconds pinLifeTime; - /** specifies if the request regards a directory and related info */ - protected TDirOption dirOption; - /** size of file */ - protected TSizeInBytes fileSize; - - /** - * @param requestToken - * @param fromSURL - * @param lifeTime - * @param dirOption - * @param desiredProtocols - * @param fileSize - * @param status - * @param transferURL - * @throws InvalidPtGDataAttributesException - */ - public AnonymousPtGData(TSURL SURL, TLifeTimeInSeconds lifeTime, - TDirOption dirOption, TURLPrefix desiredProtocols, TSizeInBytes fileSize, - TReturnStatus status, TTURL transferURL) - throws InvalidPtGDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(SURL, desiredProtocols, status, transferURL); - if (lifeTime == null || dirOption == null || fileSize == null) { - log.debug("Invalid arguments: lifeTime={}, dirOption={}, fileSize={}", - lifeTime, dirOption, fileSize); - throw new InvalidPtGDataAttributesException(SURL, lifeTime, dirOption, - desiredProtocols, fileSize, status, transferURL); - - } - this.pinLifeTime = lifeTime; - this.dirOption = dirOption; - this.fileSize = fileSize; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtGData#getPinLifeTime() - */ - @Override - public TLifeTimeInSeconds getPinLifeTime() { - - return pinLifeTime; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtGData#getDirOption() - */ - @Override - public TDirOption getDirOption() { - - return dirOption; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtGData#getFileSize() - */ - @Override - public TSizeInBytes getFileSize() { - - return fileSize; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.catalogs.PtGData#setFileSize(it.grid.storm.srm.types.TSizeInBytes - * ) - */ - @Override - public void setFileSize(TSizeInBytes size) { - - if (size != null) { - fileSize = size; - } - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.catalogs.PtGData#changeStatusSRM_FILE_PINNED(java.lang.String - * ) - */ - @Override - public void changeStatusSRM_FILE_PINNED(String explanation) { - - setStatus(TStatusCode.SRM_FILE_PINNED, explanation); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("PtGChunkData [pinLifeTime="); - builder.append(pinLifeTime); - builder.append(", dirOption="); - builder.append(dirOption); - builder.append(", fileSize="); - builder.append(fileSize); - builder.append(", transferProtocols="); - builder.append(transferProtocols); - builder.append(", SURL="); - builder.append(SURL); - builder.append(", status="); - builder.append(status); - builder.append(", transferURL="); - builder.append(transferURL); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((dirOption == null) ? 0 : dirOption.hashCode()); - result = prime * result + ((fileSize == null) ? 0 : fileSize.hashCode()); - result = prime * result - + ((pinLifeTime == null) ? 0 : pinLifeTime.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - AnonymousPtGData other = (AnonymousPtGData) obj; - if (dirOption == null) { - if (other.dirOption != null) { - return false; - } - } else if (!dirOption.equals(other.dirOption)) { - return false; - } - if (fileSize == null) { - if (other.fileSize != null) { - return false; - } - } else if (!fileSize.equals(other.fileSize)) { - return false; - } - if (pinLifeTime == null) { - if (other.pinLifeTime != null) { - return false; - } - } else if (!pinLifeTime.equals(other.pinLifeTime)) { - return false; - } - return true; - } - +public class AnonymousPtGData extends AnonymousFileTransferData implements PtGData { + + private static final Logger log = LoggerFactory.getLogger(AnonymousPtGData.class); + + /** requested lifetime of TURL: it is the pin time! */ + protected TLifeTimeInSeconds pinLifeTime; + /** specifies if the request regards a directory and related info */ + protected TDirOption dirOption; + /** size of file */ + protected TSizeInBytes fileSize; + + /** + * @param requestToken + * @param fromSURL + * @param lifeTime + * @param dirOption + * @param desiredProtocols + * @param fileSize + * @param status + * @param transferURL + * @throws InvalidPtGDataAttributesException + */ + public AnonymousPtGData( + TSURL SURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtGDataAttributesException, InvalidFileTransferDataAttributesException, + InvalidSurlRequestDataAttributesException { + + super(SURL, desiredProtocols, status, transferURL); + if (lifeTime == null || dirOption == null || fileSize == null) { + log.debug( + "Invalid arguments: lifeTime={}, dirOption={}, fileSize={}", + lifeTime, + dirOption, + fileSize); + throw new InvalidPtGDataAttributesException( + SURL, lifeTime, dirOption, desiredProtocols, fileSize, status, transferURL); + } + this.pinLifeTime = lifeTime; + this.dirOption = dirOption; + this.fileSize = fileSize; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtGData#getPinLifeTime() + */ + @Override + public TLifeTimeInSeconds getPinLifeTime() { + + return pinLifeTime; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtGData#getDirOption() + */ + @Override + public TDirOption getDirOption() { + + return dirOption; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtGData#getFileSize() + */ + @Override + public TSizeInBytes getFileSize() { + + return fileSize; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.catalogs.PtGData#setFileSize(it.grid.storm.srm.types.TSizeInBytes + * ) + */ + @Override + public void setFileSize(TSizeInBytes size) { + + if (size != null) { + fileSize = size; + } + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.catalogs.PtGData#changeStatusSRM_FILE_PINNED(java.lang.String + * ) + */ + @Override + public void changeStatusSRM_FILE_PINNED(String explanation) { + + setStatus(TStatusCode.SRM_FILE_PINNED, explanation); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("PtGChunkData [pinLifeTime="); + builder.append(pinLifeTime); + builder.append(", dirOption="); + builder.append(dirOption); + builder.append(", fileSize="); + builder.append(fileSize); + builder.append(", transferProtocols="); + builder.append(transferProtocols); + builder.append(", SURL="); + builder.append(SURL); + builder.append(", status="); + builder.append(status); + builder.append(", transferURL="); + builder.append(transferURL); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((dirOption == null) ? 0 : dirOption.hashCode()); + result = prime * result + ((fileSize == null) ? 0 : fileSize.hashCode()); + result = prime * result + ((pinLifeTime == null) ? 0 : pinLifeTime.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + AnonymousPtGData other = (AnonymousPtGData) obj; + if (dirOption == null) { + if (other.dirOption != null) { + return false; + } + } else if (!dirOption.equals(other.dirOption)) { + return false; + } + if (fileSize == null) { + if (other.fileSize != null) { + return false; + } + } else if (!fileSize.equals(other.fileSize)) { + return false; + } + if (pinLifeTime == null) { + if (other.pinLifeTime != null) { + return false; + } + } else if (!pinLifeTime.equals(other.pinLifeTime)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/catalogs/AnonymousPtPData.java b/src/main/java/it/grid/storm/catalogs/AnonymousPtPData.java index 796bfadf..6c752ad9 100644 --- a/src/main/java/it/grid/storm/catalogs/AnonymousPtPData.java +++ b/src/main/java/it/grid/storm/catalogs/AnonymousPtPData.java @@ -1,11 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TFileStorageType; import it.grid.storm.srm.types.TLifeTimeInSeconds; @@ -16,212 +13,234 @@ import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ -public class AnonymousPtPData extends AnonymousFileTransferData implements - PtPData { - - private static final Logger log = LoggerFactory.getLogger(AnonymousPtPData.class); - - protected TSpaceToken spaceToken; - protected TLifeTimeInSeconds pinLifetime; - protected TLifeTimeInSeconds fileLifetime; - protected TFileStorageType fileStorageType; - protected TOverwriteMode overwriteOption; - protected TSizeInBytes expectedFileSize; - - public AnonymousPtPData(TSURL toSURL, TLifeTimeInSeconds pinLifetime, - TLifeTimeInSeconds fileLifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TSizeInBytes expectedFileSize, - TURLPrefix transferProtocols, TOverwriteMode overwriteOption, - TReturnStatus status, TTURL transferURL) - throws InvalidPtPDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(toSURL, transferProtocols, status, transferURL); - if (pinLifetime == null || fileLifetime == null || spaceToken == null - || fileStorageType == null || expectedFileSize == null - || overwriteOption == null) { - log.debug("Invalid arguments: pinLifetime={}, fileLifetime={}, " - + "spaceToken={}, fileStorageType={}, expectedFileSize={}, " - + "overwriteOption={}", pinLifetime, fileLifetime, spaceToken, - fileStorageType, expectedFileSize, overwriteOption); - throw new InvalidPtPDataAttributesException(toSURL, pinLifetime, - fileLifetime, fileStorageType, spaceToken, expectedFileSize, - transferProtocols, overwriteOption, status, transferURL); - } - this.spaceToken = spaceToken; - this.pinLifetime = pinLifetime; - this.fileLifetime = fileLifetime; - this.fileStorageType = fileStorageType; - this.expectedFileSize = expectedFileSize; - this.overwriteOption = overwriteOption; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#getSpaceToken() - */ - @Override - public final TSpaceToken getSpaceToken() { - - return spaceToken; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#pinLifetime() - */ - @Override - public TLifeTimeInSeconds pinLifetime() { - - return pinLifetime; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#fileLifetime() - */ - @Override - public TLifeTimeInSeconds fileLifetime() { - - return fileLifetime; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#fileStorageType() - */ - @Override - public TFileStorageType fileStorageType() { - - return fileStorageType; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#expectedFileSize() - */ - @Override - public TSizeInBytes expectedFileSize() { - - return expectedFileSize; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.catalogs.PtPData#overwriteOption() - */ - @Override - public TOverwriteMode overwriteOption() { - - return overwriteOption; - } - - /** - * Method that sets the status of this request to SRM_SPACE_AVAILABLE; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - @Override - public void changeStatusSRM_SPACE_AVAILABLE(String explanation) { - - setStatus(TStatusCode.SRM_SPACE_AVAILABLE, explanation); - } - - /** - * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - @Override - public void changeStatusSRM_DUPLICATION_ERROR(String explanation) { - - setStatus(TStatusCode.SRM_DUPLICATION_ERROR, explanation); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("PtPChunkData\n"); - sb.append("toSURL="); - sb.append(SURL); - sb.append("; "); - sb.append("pinLifetime="); - sb.append(pinLifetime); - sb.append("; "); - sb.append("fileLifetime="); - sb.append(fileLifetime); - sb.append("; "); - sb.append("fileStorageType="); - sb.append(fileStorageType); - sb.append("; "); - sb.append("spaceToken="); - sb.append(spaceToken); - sb.append("; "); - sb.append("expectedFileSize="); - sb.append(expectedFileSize); - sb.append("; "); - sb.append("transferProtocols="); - sb.append(transferProtocols); - sb.append("; "); - sb.append("overwriteOption="); - sb.append(overwriteOption); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append("; "); - sb.append("transferURL="); - sb.append(transferURL); - sb.append("; "); - return sb.toString(); - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + SURL.hashCode(); - hash = 37 * hash + pinLifetime.hashCode(); - hash = 37 * hash + fileLifetime.hashCode(); - hash = 37 * hash + fileStorageType.hashCode(); - hash = 37 * hash + spaceToken.hashCode(); - hash = 37 * hash + expectedFileSize.hashCode(); - hash = 37 * hash + transferProtocols.hashCode(); - hash = 37 * hash + overwriteOption.hashCode(); - hash = 37 * hash + status.hashCode(); - hash = 37 * hash + transferURL.hashCode(); - return hash; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof AnonymousPtPData)) { - return false; - } - AnonymousPtPData cd = (AnonymousPtPData) o; - return SURL.equals(cd.SURL) && pinLifetime.equals(cd.pinLifetime) - && fileLifetime.equals(cd.fileLifetime) - && fileStorageType.equals(cd.fileStorageType) - && spaceToken.equals(cd.spaceToken) - && expectedFileSize.equals(cd.expectedFileSize) - && transferProtocols.equals(cd.transferProtocols) - && overwriteOption.equals(cd.overwriteOption) && status.equals(cd.status) - && transferURL.equals(cd.transferURL); - } +/** @author Michele Dibenedetto */ +public class AnonymousPtPData extends AnonymousFileTransferData implements PtPData { + + private static final Logger log = LoggerFactory.getLogger(AnonymousPtPData.class); + + protected TSpaceToken spaceToken; + protected TLifeTimeInSeconds pinLifetime; + protected TLifeTimeInSeconds fileLifetime; + protected TFileStorageType fileStorageType; + protected TOverwriteMode overwriteOption; + protected TSizeInBytes expectedFileSize; + + public AnonymousPtPData( + TSURL toSURL, + TLifeTimeInSeconds pinLifetime, + TLifeTimeInSeconds fileLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes expectedFileSize, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtPDataAttributesException, InvalidFileTransferDataAttributesException, + InvalidSurlRequestDataAttributesException { + + super(toSURL, transferProtocols, status, transferURL); + if (pinLifetime == null + || fileLifetime == null + || spaceToken == null + || fileStorageType == null + || expectedFileSize == null + || overwriteOption == null) { + log.debug( + "Invalid arguments: pinLifetime={}, fileLifetime={}, " + + "spaceToken={}, fileStorageType={}, expectedFileSize={}, " + + "overwriteOption={}", + pinLifetime, + fileLifetime, + spaceToken, + fileStorageType, + expectedFileSize, + overwriteOption); + throw new InvalidPtPDataAttributesException( + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); + } + this.spaceToken = spaceToken; + this.pinLifetime = pinLifetime; + this.fileLifetime = fileLifetime; + this.fileStorageType = fileStorageType; + this.expectedFileSize = expectedFileSize; + this.overwriteOption = overwriteOption; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#getSpaceToken() + */ + @Override + public final TSpaceToken getSpaceToken() { + + return spaceToken; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#pinLifetime() + */ + @Override + public TLifeTimeInSeconds pinLifetime() { + + return pinLifetime; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#fileLifetime() + */ + @Override + public TLifeTimeInSeconds fileLifetime() { + + return fileLifetime; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#fileStorageType() + */ + @Override + public TFileStorageType fileStorageType() { + + return fileStorageType; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#expectedFileSize() + */ + @Override + public TSizeInBytes expectedFileSize() { + + return expectedFileSize; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.catalogs.PtPData#overwriteOption() + */ + @Override + public TOverwriteMode overwriteOption() { + + return overwriteOption; + } + + /** + * Method that sets the status of this request to SRM_SPACE_AVAILABLE; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public void changeStatusSRM_SPACE_AVAILABLE(String explanation) { + + setStatus(TStatusCode.SRM_SPACE_AVAILABLE, explanation); + } + + /** + * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public void changeStatusSRM_DUPLICATION_ERROR(String explanation) { + + setStatus(TStatusCode.SRM_DUPLICATION_ERROR, explanation); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("PtPChunkData\n"); + sb.append("toSURL="); + sb.append(SURL); + sb.append("; "); + sb.append("pinLifetime="); + sb.append(pinLifetime); + sb.append("; "); + sb.append("fileLifetime="); + sb.append(fileLifetime); + sb.append("; "); + sb.append("fileStorageType="); + sb.append(fileStorageType); + sb.append("; "); + sb.append("spaceToken="); + sb.append(spaceToken); + sb.append("; "); + sb.append("expectedFileSize="); + sb.append(expectedFileSize); + sb.append("; "); + sb.append("transferProtocols="); + sb.append(transferProtocols); + sb.append("; "); + sb.append("overwriteOption="); + sb.append(overwriteOption); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append("; "); + sb.append("transferURL="); + sb.append(transferURL); + sb.append("; "); + return sb.toString(); + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + SURL.hashCode(); + hash = 37 * hash + pinLifetime.hashCode(); + hash = 37 * hash + fileLifetime.hashCode(); + hash = 37 * hash + fileStorageType.hashCode(); + hash = 37 * hash + spaceToken.hashCode(); + hash = 37 * hash + expectedFileSize.hashCode(); + hash = 37 * hash + transferProtocols.hashCode(); + hash = 37 * hash + overwriteOption.hashCode(); + hash = 37 * hash + status.hashCode(); + hash = 37 * hash + transferURL.hashCode(); + return hash; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof AnonymousPtPData)) { + return false; + } + AnonymousPtPData cd = (AnonymousPtPData) o; + return SURL.equals(cd.SURL) + && pinLifetime.equals(cd.pinLifetime) + && fileLifetime.equals(cd.fileLifetime) + && fileStorageType.equals(cd.fileStorageType) + && spaceToken.equals(cd.spaceToken) + && expectedFileSize.equals(cd.expectedFileSize) + && transferProtocols.equals(cd.transferProtocols) + && overwriteOption.equals(cd.overwriteOption) + && status.equals(cd.status) + && transferURL.equals(cd.transferURL); + } } diff --git a/src/main/java/it/grid/storm/catalogs/BoLChunkCatalog.java b/src/main/java/it/grid/storm/catalogs/BoLChunkCatalog.java index ea71f6f2..abf8dc64 100644 --- a/src/main/java/it/grid/storm/catalogs/BoLChunkCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/BoLChunkCatalog.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -9,7 +8,6 @@ import it.grid.storm.common.types.TimeUnit; import it.grid.storm.config.Configuration; import it.grid.storm.griduser.GridUserInterface; -// import it.grid.storm.namespace.SurlStatusStore; import it.grid.storm.srm.types.InvalidTDirOptionAttributesException; import it.grid.storm.srm.types.InvalidTRequestTokenAttributesException; import it.grid.storm.srm.types.InvalidTSURLAttributesException; @@ -22,784 +20,753 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Class that represents StoRMs BoLChunkCatalog: it collects BoLChunkData and - * provides methods for looking up a BoLChunkData based on TRequestToken, as - * well as for adding a new entry and removing an existing one. - * + * Class that represents StoRMs BoLChunkCatalog: it collects BoLChunkData and provides methods for + * looking up a BoLChunkData based on TRequestToken, as well as for adding a new entry and removing + * an existing one. + * * @author CNAF * @date Aug 2009 * @version 1.0 */ public class BoLChunkCatalog { - private static final Logger log = LoggerFactory - .getLogger(BoLChunkCatalog.class); - - /* only instance of BoLChunkCatalog present in StoRM! */ - private static final BoLChunkCatalog cat = new BoLChunkCatalog(); - private final BoLChunkDAO dao = BoLChunkDAO.getInstance(); - - /* - * Timer object in charge of transiting expired requests from SRM_FILE_PINNED - * to SRM_RELEASED! - */ - private final Timer transiter = new Timer(); - /* Delay time before starting cleaning thread! */ - private final long delay = Configuration.getInstance() - .getTransitInitialDelay() * 1000; - /* Period of execution of cleaning! */ - private final long period = Configuration.getInstance() - .getTransitTimeInterval() * 1000; - - /** - * Private constructor that starts the internal timer needed to periodically - * check and transit requests whose pinLifetime has expired and are in - * SRM_FILE_PINNED, to SRM_RELEASED. - */ - private BoLChunkCatalog() { - - TimerTask transitTask = new TimerTask() { - - @Override - public void run() { - - transitExpiredSRM_SUCCESS(); - } - }; - transiter.scheduleAtFixedRate(transitTask, delay, period); - } - - /** - * Method that returns the only instance of BoLChunkCatalog available. - */ - public static BoLChunkCatalog getInstance() { - - return cat; - } - - /** - * Method that returns a Collection of BoLChunkData Objects matching the - * supplied TRequestToken. - * - * If any of the data associated to the TRequestToken is not well formed and - * so does not allow a BoLChunkData Object to be created, then that part of - * the request is dropped and gets logged, and the processing continues with - * the next part. All valid chunks get returned: the others get dropped. - * - * If there are no chunks to process then an empty Collection is returned, and - * a message gets logged. - */ - synchronized public Collection lookup(TRequestToken rt) { - - Collection chunkCollection = dao.find(rt); - log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkCollection); - List list = new ArrayList(); - - if (chunkCollection.isEmpty()) { - log.warn("BoL CHUNK CATALOG! No chunks found in persistence for specified " - + "request: {}", rt); - return list; - } - - BoLPersistentChunkData chunk; - for (BoLChunkDataTO chunkTO : chunkCollection) { - chunk = makeOne(chunkTO, rt); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(completeTO(chunkTO, chunk)); - } catch (InvalidReducedBoLChunkDataAttributesException e) { - log.warn("BoL CHUNK CATALOG! unable to add missing informations on DB " - + "to the request: {}", e.getMessage()); - } - } - log.debug("BoL CHUNK CATALOG: returning " + list); - return list; - } - - /** - * Generates a BoLChunkData from the received BoLChunkDataTO - * - * @param auxTO - * @param rt - * @return - */ - private BoLPersistentChunkData makeOne(BoLChunkDataTO auxTO, TRequestToken rt) { - - StringBuilder errorSb = new StringBuilder(); - TSURL fromSURL = null; - try { - fromSURL = TSURL.makeFromStringValidate(auxTO.getFromSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (auxTO.normalizedStFN() != null) { - fromSURL.setNormalizedStFN(auxTO.normalizedStFN()); - } - if (auxTO.sulrUniqueID() != null) { - fromSURL.setUniqueID(auxTO.sulrUniqueID().intValue()); - } - // lifeTime - TLifeTimeInSeconds lifeTime = null; - try { - long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM( - auxTO.getLifeTime()); - // Check for max value allowed - long max = Configuration.getInstance().getPinLifetimeMaximum(); - if (pinLifeTime > max) { - log.warn("PinLifeTime is greater than the max value allowed. " - + "Drop the value to the max = {} seconds", max); - pinLifeTime = max; - } - lifeTime = TLifeTimeInSeconds.make(pinLifeTime, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // dirOption - TDirOption dirOption = null; - try { - dirOption = new TDirOption(auxTO.getDirOption(), - auxTO.getAllLevelRecursive(), auxTO.getNumLevel()); - } catch (InvalidTDirOptionAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // transferProtocols - TURLPrefix transferProtocols = TransferProtocolListConverter.toSTORM(auxTO - .getProtocolList()); - if (transferProtocols.size() == 0) { - errorSb.append("\nEmpty list of TransferProtocols or" - + " could not translate TransferProtocols!"); - /* fail construction of BoLChunkData! */ - transferProtocols = null; - } - // fileSize - TSizeInBytes fileSize = null; - try { - fileSize = TSizeInBytes.make(auxTO.getFileSize(), SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - auxTO.getStatus()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + auxTO.getStatus()); - } else { - status = new TReturnStatus(code, auxTO.getErrString()); - } - // transferURL - /* - * whatever is read is just meaningless because BoL will fill it in!!! So - * create an Empty TTURL by default! Vital to avoid problems with unknown - * DPM NULL/EMPTY logic policy! - */ - TTURL transferURL = TTURL.makeEmpty(); - // make BoLChunkData - BoLPersistentChunkData aux = null; - try { - aux = new BoLPersistentChunkData(rt, fromSURL, lifeTime, dirOption, - transferProtocols, fileSize, status, transferURL, - auxTO.getDeferredStartTime()); - aux.setPrimaryKey(auxTO.getPrimaryKey()); - } catch (InvalidSurlRequestDataAttributesException e) { - dao.signalMalformedBoLChunk(auxTO); - log.warn("BoL CHUNK CATALOG! Retrieved malformed BoL " - + "chunk data from persistence. Dropping chunk from request {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * - * Adds to the received BoLChunkDataTO the normalized StFN and the SURL unique - * ID taken from the BoLChunkData - * - * @param chunkTO - * @param chunk - */ - private void completeTO(ReducedBoLChunkDataTO chunkTO, - final ReducedBoLChunkData chunk) { - - chunkTO.setNormalizedStFN(chunk.fromSURL().normalizedStFN()); - chunkTO.setSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); - } - - /** - * - * Creates a ReducedBoLChunkDataTO from the received BoLChunkDataTO and - * completes it with the normalized StFN and the SURL unique ID taken from the - * PtGChunkData - * - * @param chunkTO - * @param chunk - * @return - * @throws InvalidReducedBoLChunkDataAttributesException - */ - private ReducedBoLChunkDataTO completeTO(BoLChunkDataTO chunkTO, - final BoLPersistentChunkData chunk) - throws InvalidReducedBoLChunkDataAttributesException { - - ReducedBoLChunkDataTO reducedChunkTO = this.reduce(chunkTO); - this.completeTO(reducedChunkTO, this.reduce(chunk)); - return reducedChunkTO; - } - - /** - * Creates a ReducedBoLChunkData from the data contained in the received - * BoLChunkData - * - * @param chunk - * @return - * @throws InvalidReducedBoLChunkDataAttributesException - */ - private ReducedBoLChunkData reduce(BoLPersistentChunkData chunk) - throws InvalidReducedBoLChunkDataAttributesException { - - ReducedBoLChunkData reducedChunk = new ReducedBoLChunkData(chunk.getSURL(), - chunk.getStatus()); - reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); - return reducedChunk; - } - - /** - * Creates a ReducedBoLChunkDataTO from the data contained in the received - * BoLChunkDataTO - * - * @param chunkTO - * @return - */ - private ReducedBoLChunkDataTO reduce(BoLChunkDataTO chunkTO) { - - ReducedBoLChunkDataTO reducedChunkTO = new ReducedBoLChunkDataTO(); - reducedChunkTO.setPrimaryKey(chunkTO.getPrimaryKey()); - reducedChunkTO.setFromSURL(chunkTO.getFromSURL()); - reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); - reducedChunkTO.setSurlUniqueID(chunkTO.sulrUniqueID()); - reducedChunkTO.setStatus(chunkTO.getStatus()); - reducedChunkTO.setErrString(chunkTO.getErrString()); - return reducedChunkTO; - } - - /** - * Checks if the received BoLChunkDataTO contains the fields not set by the - * front end but required - * - * @param chunkTO - * @return - */ - private boolean isComplete(BoLChunkDataTO chunkTO) { - - return (chunkTO.normalizedStFN() != null) - && (chunkTO.sulrUniqueID() != null); - } - - /** - * Checks if the received ReducedBoLChunkDataTO contains the fields not set by - * the front end but required - * - * @param reducedChunkTO - * @return - */ - // TODO MICHELE USER_SURL new method - private boolean isComplete(ReducedBoLChunkDataTO reducedChunkTO) { - - return (reducedChunkTO.normalizedStFN() != null) - && (reducedChunkTO.surlUniqueID() != null); - } - - /** - * Method used to update into Persistence a retrieved BoLChunkData. In case - * any error occurs, the operation does not proceed but no Exception is - * thrown. Error messages get logged. - * - * Only fileSize, StatusCode, errString and transferURL are updated. Likewise - * for the request pinLifetime. - */ - synchronized public void update(BoLPersistentChunkData cd) { - - BoLChunkDataTO to = new BoLChunkDataTO(); - /* Primary key needed by DAO Object */ - to.setPrimaryKey(cd.getPrimaryKey()); - to.setFileSize(cd.getFileSize().value()); - to.setStatus(StatusCodeConverter.getInstance().toDB( - cd.getStatus().getStatusCode())); - to.setErrString(cd.getStatus().getExplanation()); - to.setLifeTime(PinLifetimeConverter.getInstance().toDB( - cd.getLifeTime().value())); - // TODO MICHELE USER_SURL fill new fields - to.setNormalizedStFN(cd.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(cd.getSURL().uniqueId())); - - dao.update(to); - // TODO MICHELE SURL STORE - // SurlStatusStore.getInstance().storeSurlStatus(cd.getSURL(), - // cd.getStatus().getStatusCode()); - } - - /** - * Refresh method. TODO THIS IS A WORK IN PROGRESS!!!! This method have to - * synch the ChunkData information with the database status. - * - * @param auxTO - * @param BoLPersistentChunkData - * inputChunk - * @return BoLChunkData outputChunk - */ - synchronized public BoLPersistentChunkData refreshStatus( - BoLPersistentChunkData inputChunk) { - - /* Currently not used */ - // Call the dao refresh method to synch with the db status - BoLChunkDataTO auxTO = dao.refresh(inputChunk.getPrimaryKey()); - - log.debug("BoL CHUNK CATALOG: retrieved data {}", auxTO); - if (auxTO == null) { - log.warn("BoL CHUNK CATALOG! Empty TO found in persistence for specified " - + "request: {}", inputChunk.getPrimaryKey()); - return inputChunk; - } - - /* - * In this first version the only field updated is the Status. Once - * updated, the new status is rewritten into the input ChunkData - */ - - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM(auxTO.getStatus()); - if (code != TStatusCode.EMPTY) { - status = new TReturnStatus(code, auxTO.getErrString()); - } - inputChunk.setStatus(status); - return inputChunk; - } - - /** - * Method that returns a Collection of ReducedBoLChunkData Objects associated - * to the supplied TRequestToken. - * - * If any of the data retrieved for a given chunk is not well formed and so - * does not allow a ReducedBoLChunkData Object to be created, then that chunk - * is dropped and gets logged, while processing continues with the next one. - * All valid chunks get returned: the others get dropped. - * - * If there are no chunks associated to the given TRequestToken, then an empty - * Collection is returned and a messagge gets logged. - */ - synchronized public Collection lookupReducedBoLChunkData( - TRequestToken rt) { - - Collection reducedChunkDataTOs = dao.findReduced(rt - .getValue()); - log.debug("BoL CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); - ArrayList list = new ArrayList(); - if (reducedChunkDataTOs.isEmpty()) { - log.debug("BoL CHUNK CATALOG! No chunks found in persistence for {}", rt); - } else { - ReducedBoLChunkData reducedChunkData = null; - for (ReducedBoLChunkDataTO reducedChunkDataTO : reducedChunkDataTOs) { - reducedChunkData = makeOneReduced(reducedChunkDataTO); - if (reducedChunkData != null) { - list.add(reducedChunkData); - if (!this.isComplete(reducedChunkDataTO)) { - completeTO(reducedChunkDataTO, reducedChunkData); - dao.updateIncomplete(reducedChunkDataTO); - } - } - } - log.debug("BoL CHUNK CATALOG: returning {}", list); - } - return list; - } - - public Collection lookupReducedBoLChunkData( - TRequestToken requestToken, Collection surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.findReduced( - requestToken, surlsUniqueIDs, surlsArray); - return buildReducedChunkDataList(chunkDataTOCollection); - } - - public Collection lookupBoLChunkData(TSURL surl, - GridUserInterface user) { - - return lookupBoLChunkData(Arrays.asList(new TSURL[] { surl }), user); - } - - public Collection lookupBoLChunkData(TSURL surl) { - - return lookupBoLChunkData(Arrays.asList(new TSURL[] { surl })); - } - - private Collection lookupBoLChunkData( - List surls, GridUserInterface user) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, - surlsArray, user.getDn()); - log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildChunkDataList(chunkDataTOCollection); - } - - public Collection lookupBoLChunkData(List surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, - surlsArray); - log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildChunkDataList(chunkDataTOCollection); - } - - private Collection buildChunkDataList( - Collection chunkDataTOCollection) { - - List list = new ArrayList(); - BoLPersistentChunkData chunk; - for (BoLChunkDataTO chunkTO : chunkDataTOCollection) { - chunk = makeOne(chunkTO); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(this.completeTO(chunkTO, chunk)); - } catch (InvalidReducedBoLChunkDataAttributesException e) { - log.warn("BoL CHUNK CATALOG! unable to add missing informations " - + "on DB to the request: {}", e.getMessage()); - } - } - log.debug("BoL CHUNK CATALOG: returning {}", list); - return list; - } - - private BoLPersistentChunkData makeOne(BoLChunkDataTO chunkTO) { - - try { - return makeOne(chunkTO, new TRequestToken(chunkTO.getRequestToken(), - chunkTO.getTimeStamp())); - } catch (InvalidTRequestTokenAttributesException e) { - throw new IllegalStateException( - "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " - + e); - } - } - - /** - * Method that returns a Collection of ReducedBoLChunkData Objects matching - * the supplied GridUser and Collection of TSURLs. - * - * If any of the data retrieved for a given chunk is not well formed and so - * does not allow a ReducedBoLChunkData Object to be created, then that chunk - * is dropped and gets logged, while processing continues with the next one. - * All valid chunks get returned: the others get dropped. - * - * If there are no chunks associated to the given GridUser and Collection of - * TSURLs, then an empty Collection is returned and a message gets logged. - */ - synchronized public Collection lookupReducedBoLChunkData( - GridUserInterface gu, Collection tsurlCollection) { - - int[] surlsUniqueIDs = new int[tsurlCollection.size()]; - String[] surls = new String[tsurlCollection.size()]; - int index = 0; - for (TSURL tsurl : tsurlCollection) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.findReduced( - gu.getDn(), surlsUniqueIDs, surls); - log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildReducedChunkDataList(chunkDataTOCollection); - } - - private Collection buildReducedChunkDataList( - Collection chunkDataTOCollection) { - - ArrayList list = new ArrayList(); - ReducedBoLChunkData reducedChunkData; - for (ReducedBoLChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { - reducedChunkData = makeOneReduced(reducedChunkDataTO); - if (reducedChunkData != null) { - list.add(reducedChunkData); - if (!this.isComplete(reducedChunkDataTO)) { - this.completeTO(reducedChunkDataTO, reducedChunkData); - dao.updateIncomplete(reducedChunkDataTO); - } - } - } - log.debug("BoL CHUNK CATALOG: returning {}", list); - return list; - } - - /** - * @param auxTO - * @return - */ - private ReducedBoLChunkData makeOneReduced( - ReducedBoLChunkDataTO reducedChunkDataTO) { - - StringBuilder errorSb = new StringBuilder(); - // fromSURL - TSURL fromSURL = null; - try { - fromSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.fromSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (reducedChunkDataTO.normalizedStFN() != null) { - fromSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); - } - if (reducedChunkDataTO.surlUniqueID() != null) { - fromSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - reducedChunkDataTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + reducedChunkDataTO.status()); - } else { - status = new TReturnStatus(code, reducedChunkDataTO.errString()); - } - // make ReducedBoLChunkData - ReducedBoLChunkData aux = null; - try { - aux = new ReducedBoLChunkData(fromSURL, status); - aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); - } catch (InvalidReducedBoLChunkDataAttributesException e) { - log.warn("BoL CHUNK CATALOG! Retrieved malformed " - + "Reduced BoL chunk data from persistence: dropping reduced chunk..."); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * Method used to add into Persistence a new entry. The supplied BoLChunkData - * gets the primary key changed to the value assigned in Persistence. - * - * This method is intended to be used by a recursive BoL request: the parent - * request supplies a directory which must be expanded, so all new children - * requests resulting from the files in the directory are added into - * persistence. - * - * So this method does _not_ add a new SRM prepare_to_get request into the DB! - * - * The only children data written into the DB are: sourceSURL, TDirOption, - * statusCode and explanation. - * - * In case of any error the operation does not proceed, but no Exception is - * thrown! Proper messages get logged by underlaying DAO. - */ - synchronized public void addChild(BoLPersistentChunkData chunkData) { - - BoLChunkDataTO to = new BoLChunkDataTO(); - // needed for now to find ID of request! Must be changed soon! - to.setRequestToken(chunkData.getRequestToken().toString()); - to.setFromSURL(chunkData.getSURL().toString()); - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - - to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); - to.setDirOption(chunkData.getDirOption().isDirectory()); - to.setNumLevel(chunkData.getDirOption().getNumLevel()); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - to.setDeferredStartTime(chunkData.getDeferredStartTime()); - - /* add the entry and update the Primary Key field */ - dao.addChild(to); - chunkData.setPrimaryKey(to.getPrimaryKey()); - } - - /** - * Method used to add into Persistence a new entry. The supplied BoLChunkData - * gets the primary key changed to the value assigned in the Persistence. The - * method requires the GridUser to whom associate the added request. - * - * This method is intended to be used by an srmCopy request in push mode which - * implies a local srmBoL. The only fields from BoLChunkData that are - * considered are: the requestToken, the sourceSURL, the pinLifetime, the - * dirOption, the protocolList, the status and error string. - * - * So this method _adds_ a new SRM prepare_to_get request into the DB! - * - * In case of any error the operation does not proceed, but no Exception is - * thrown! The underlaying DAO logs proper error messages. - */ - synchronized public void add(BoLPersistentChunkData chunkData, - GridUserInterface gu) { - - /* Currently NOT used */ - BoLChunkDataTO to = new BoLChunkDataTO(); - to.setRequestToken(chunkData.getRequestToken().toString()); - to.setFromSURL(chunkData.getSURL().toString()); - // TODO MICHELE USER_SURL fill new fields - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - - to.setLifeTime(new Long(chunkData.getLifeTime().value()).intValue()); - to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); - to.setDirOption(chunkData.getDirOption().isDirectory()); - to.setNumLevel(chunkData.getDirOption().getNumLevel()); - to.setProtocolList(TransferProtocolListConverter.toDB(chunkData - .getTransferProtocols())); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - to.setDeferredStartTime(chunkData.getDeferredStartTime()); - - /* add the entry and update the Primary Key field! */ - dao.addNew(to, gu.getDn()); - chunkData.setPrimaryKey(to.getPrimaryKey()); - } - - /** - * Method used to establish if in Persistence there is a BoLChunkData working - * on the supplied SURL, and whose state is SRM_FILE_PINNED, in which case - * true is returned. In case none are found or there is any problem, false is - * returned. This method is intended to be used by srmMv. - */ - synchronized public boolean isSRM_FILE_PINNED(TSURL surl) { - - return (dao.numberInSRM_SUCCESS(surl.uniqueId()) > 0); - } - - /** - * Method used to transit the specified Collection of ReducedBoLChunkData from - * SRM_FILE_PINNED to SRM_RELEASED. Chunks in any other starting state are not - * transited. In case of any error nothing is done, but proper error messages - * get logged by the underlaying DAO. - */ - synchronized public void transitSRM_SUCCESStoSRM_RELEASED( - Collection chunks, TRequestToken token) { - - if (chunks == null || chunks.isEmpty()) { - return; - } - - long[] primaryKeys = new long[chunks.size()]; - int index = 0; - for (ReducedBoLChunkData chunkData : chunks) { - if (chunkData != null) { - primaryKeys[index] = chunkData.primaryKey(); - index++; - } - } - dao.transitSRM_SUCCESStoSRM_RELEASED(primaryKeys, token); - } - - /** - * This method is intended to be used by srmRm to transit all BoL chunks on - * the given SURL which are in the SRM_FILE_PINNED state, to SRM_ABORTED. The - * supplied String will be used as explanation in those chunks return status. - * The global status of the request is _not_ changed. - * - * The TURL of those requests will automatically be set to empty. Notice that - * both removeAllJit(SURL) and removeVolatile(SURL) are automatically invoked - * on PinnedFilesCatalog, to remove any entry and corresponding physical ACLs. - * - * Beware, that the chunks may be part of requests that have finished, or that - * still have not finished because other chunks are being processed. - */ - synchronized public void transitSRM_SUCCESStoSRM_ABORTED(TSURL surl, - String explanation) { - - /* Currently NOT used */ - if (explanation == null) { - explanation = ""; - } - dao.transitSRM_SUCCESStoSRM_ABORTED(surl.uniqueId(), surl.toString(), - explanation); - } - - /** - * Method used to force transition to SRM_RELEASED from SRM_FILE_PINNED, of - * all BoL Requests whose pinLifetime has expired and the state still has not - * been changed (a user forgot to run srmReleaseFiles)! - */ - synchronized public void transitExpiredSRM_SUCCESS() { - - dao.transitExpiredSRM_SUCCESS(); - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, - newStatusCode, explanation); - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - List surlList, TStatusCode expectedStatusCode, - TStatusCode newStatusCode) { - - int[] surlsUniqueIDs = new int[surlList.size()]; - String[] surls = new String[surlList.size()]; - int index = 0; - for (TSURL tsurl : surlList) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - dao.updateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode); - } - + private static final Logger log = LoggerFactory.getLogger(BoLChunkCatalog.class); + + /* only instance of BoLChunkCatalog present in StoRM! */ + private static final BoLChunkCatalog cat = new BoLChunkCatalog(); + private final BoLChunkDAO dao = BoLChunkDAO.getInstance(); + + /* + * Timer object in charge of transiting expired requests from SRM_FILE_PINNED + * to SRM_RELEASED! + */ + private final Timer transiter = new Timer(); + /* Delay time before starting cleaning thread! */ + private final long delay = Configuration.getInstance().getTransitInitialDelay() * 1000; + /* Period of execution of cleaning! */ + private final long period = Configuration.getInstance().getTransitTimeInterval() * 1000; + + /** + * Private constructor that starts the internal timer needed to periodically check and transit + * requests whose pinLifetime has expired and are in SRM_FILE_PINNED, to SRM_RELEASED. + */ + private BoLChunkCatalog() { + + TimerTask transitTask = + new TimerTask() { + + @Override + public void run() { + + transitExpiredSRM_SUCCESS(); + } + }; + transiter.scheduleAtFixedRate(transitTask, delay, period); + } + + /** Method that returns the only instance of BoLChunkCatalog available. */ + public static BoLChunkCatalog getInstance() { + + return cat; + } + + /** + * Method that returns a Collection of BoLChunkData Objects matching the supplied TRequestToken. + * + *

If any of the data associated to the TRequestToken is not well formed and so does not allow + * a BoLChunkData Object to be created, then that part of the request is dropped and gets logged, + * and the processing continues with the next part. All valid chunks get returned: the others get + * dropped. + * + *

If there are no chunks to process then an empty Collection is returned, and a message gets + * logged. + */ + public synchronized Collection lookup(TRequestToken rt) { + + Collection chunkCollection = dao.find(rt); + log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkCollection); + List list = new ArrayList(); + + if (chunkCollection.isEmpty()) { + log.warn( + "BoL CHUNK CATALOG! No chunks found in persistence for specified " + "request: {}", rt); + return list; + } + + BoLPersistentChunkData chunk; + for (BoLChunkDataTO chunkTO : chunkCollection) { + chunk = makeOne(chunkTO, rt); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(completeTO(chunkTO, chunk)); + } catch (InvalidReducedBoLChunkDataAttributesException e) { + log.warn( + "BoL CHUNK CATALOG! unable to add missing informations on DB " + "to the request: {}", + e.getMessage()); + } + } + log.debug("BoL CHUNK CATALOG: returning " + list); + return list; + } + + /** + * Generates a BoLChunkData from the received BoLChunkDataTO + * + * @param auxTO + * @param rt + * @return + */ + private BoLPersistentChunkData makeOne(BoLChunkDataTO auxTO, TRequestToken rt) { + + StringBuilder errorSb = new StringBuilder(); + TSURL fromSURL = null; + try { + fromSURL = TSURL.makeFromStringValidate(auxTO.getFromSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (auxTO.normalizedStFN() != null) { + fromSURL.setNormalizedStFN(auxTO.normalizedStFN()); + } + if (auxTO.sulrUniqueID() != null) { + fromSURL.setUniqueID(auxTO.sulrUniqueID().intValue()); + } + // lifeTime + TLifeTimeInSeconds lifeTime = null; + try { + long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM(auxTO.getLifeTime()); + // Check for max value allowed + long max = Configuration.getInstance().getPinLifetimeMaximum(); + if (pinLifeTime > max) { + log.warn( + "PinLifeTime is greater than the max value allowed. " + + "Drop the value to the max = {} seconds", + max); + pinLifeTime = max; + } + lifeTime = TLifeTimeInSeconds.make(pinLifeTime, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // dirOption + TDirOption dirOption = null; + try { + dirOption = + new TDirOption(auxTO.getDirOption(), auxTO.getAllLevelRecursive(), auxTO.getNumLevel()); + } catch (InvalidTDirOptionAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // transferProtocols + TURLPrefix transferProtocols = TransferProtocolListConverter.toSTORM(auxTO.getProtocolList()); + if (transferProtocols.size() == 0) { + errorSb.append( + "\nEmpty list of TransferProtocols or" + " could not translate TransferProtocols!"); + /* fail construction of BoLChunkData! */ + transferProtocols = null; + } + // fileSize + TSizeInBytes fileSize = null; + try { + fileSize = TSizeInBytes.make(auxTO.getFileSize(), SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(auxTO.getStatus()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + auxTO.getStatus()); + } else { + status = new TReturnStatus(code, auxTO.getErrString()); + } + // transferURL + /* + * whatever is read is just meaningless because BoL will fill it in!!! So + * create an Empty TTURL by default! Vital to avoid problems with unknown + * DPM NULL/EMPTY logic policy! + */ + TTURL transferURL = TTURL.makeEmpty(); + // make BoLChunkData + BoLPersistentChunkData aux = null; + try { + aux = + new BoLPersistentChunkData( + rt, + fromSURL, + lifeTime, + dirOption, + transferProtocols, + fileSize, + status, + transferURL, + auxTO.getDeferredStartTime()); + aux.setPrimaryKey(auxTO.getPrimaryKey()); + } catch (InvalidSurlRequestDataAttributesException e) { + dao.signalMalformedBoLChunk(auxTO); + log.warn( + "BoL CHUNK CATALOG! Retrieved malformed BoL " + + "chunk data from persistence. Dropping chunk from request {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Adds to the received BoLChunkDataTO the normalized StFN and the SURL unique ID taken from the + * BoLChunkData + * + * @param chunkTO + * @param chunk + */ + private void completeTO(ReducedBoLChunkDataTO chunkTO, final ReducedBoLChunkData chunk) { + + chunkTO.setNormalizedStFN(chunk.fromSURL().normalizedStFN()); + chunkTO.setSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); + } + + /** + * Creates a ReducedBoLChunkDataTO from the received BoLChunkDataTO and completes it with the + * normalized StFN and the SURL unique ID taken from the PtGChunkData + * + * @param chunkTO + * @param chunk + * @return + * @throws InvalidReducedBoLChunkDataAttributesException + */ + private ReducedBoLChunkDataTO completeTO( + BoLChunkDataTO chunkTO, final BoLPersistentChunkData chunk) + throws InvalidReducedBoLChunkDataAttributesException { + + ReducedBoLChunkDataTO reducedChunkTO = this.reduce(chunkTO); + this.completeTO(reducedChunkTO, this.reduce(chunk)); + return reducedChunkTO; + } + + /** + * Creates a ReducedBoLChunkData from the data contained in the received BoLChunkData + * + * @param chunk + * @return + * @throws InvalidReducedBoLChunkDataAttributesException + */ + private ReducedBoLChunkData reduce(BoLPersistentChunkData chunk) + throws InvalidReducedBoLChunkDataAttributesException { + + ReducedBoLChunkData reducedChunk = new ReducedBoLChunkData(chunk.getSURL(), chunk.getStatus()); + reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); + return reducedChunk; + } + + /** + * Creates a ReducedBoLChunkDataTO from the data contained in the received BoLChunkDataTO + * + * @param chunkTO + * @return + */ + private ReducedBoLChunkDataTO reduce(BoLChunkDataTO chunkTO) { + + ReducedBoLChunkDataTO reducedChunkTO = new ReducedBoLChunkDataTO(); + reducedChunkTO.setPrimaryKey(chunkTO.getPrimaryKey()); + reducedChunkTO.setFromSURL(chunkTO.getFromSURL()); + reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); + reducedChunkTO.setSurlUniqueID(chunkTO.sulrUniqueID()); + reducedChunkTO.setStatus(chunkTO.getStatus()); + reducedChunkTO.setErrString(chunkTO.getErrString()); + return reducedChunkTO; + } + + /** + * Checks if the received BoLChunkDataTO contains the fields not set by the front end but required + * + * @param chunkTO + * @return + */ + private boolean isComplete(BoLChunkDataTO chunkTO) { + + return (chunkTO.normalizedStFN() != null) && (chunkTO.sulrUniqueID() != null); + } + + /** + * Checks if the received ReducedBoLChunkDataTO contains the fields not set by the front end but + * required + * + * @param reducedChunkTO + * @return + */ + // TODO MICHELE USER_SURL new method + private boolean isComplete(ReducedBoLChunkDataTO reducedChunkTO) { + + return (reducedChunkTO.normalizedStFN() != null) && (reducedChunkTO.surlUniqueID() != null); + } + + /** + * Method used to update into Persistence a retrieved BoLChunkData. In case any error occurs, the + * operation does not proceed but no Exception is thrown. Error messages get logged. + * + *

Only fileSize, StatusCode, errString and transferURL are updated. Likewise for the request + * pinLifetime. + */ + public synchronized void update(BoLPersistentChunkData cd) { + + BoLChunkDataTO to = new BoLChunkDataTO(); + /* Primary key needed by DAO Object */ + to.setPrimaryKey(cd.getPrimaryKey()); + to.setFileSize(cd.getFileSize().value()); + to.setStatus(StatusCodeConverter.getInstance().toDB(cd.getStatus().getStatusCode())); + to.setErrString(cd.getStatus().getExplanation()); + to.setLifeTime(PinLifetimeConverter.getInstance().toDB(cd.getLifeTime().value())); + // TODO MICHELE USER_SURL fill new fields + to.setNormalizedStFN(cd.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(cd.getSURL().uniqueId())); + + dao.update(to); + // TODO MICHELE SURL STORE + // SurlStatusStore.getInstance().storeSurlStatus(cd.getSURL(), + // cd.getStatus().getStatusCode()); + } + + /** + * Refresh method. TODO THIS IS A WORK IN PROGRESS!!!! This method have to synch the ChunkData + * information with the database status. + * + * @param auxTO + * @param BoLPersistentChunkData inputChunk + * @return BoLChunkData outputChunk + */ + public synchronized BoLPersistentChunkData refreshStatus(BoLPersistentChunkData inputChunk) { + + /* Currently not used */ + // Call the dao refresh method to synch with the db status + BoLChunkDataTO auxTO = dao.refresh(inputChunk.getPrimaryKey()); + + log.debug("BoL CHUNK CATALOG: retrieved data {}", auxTO); + if (auxTO == null) { + log.warn( + "BoL CHUNK CATALOG! Empty TO found in persistence for specified " + "request: {}", + inputChunk.getPrimaryKey()); + return inputChunk; + } + + /* + * In this first version the only field updated is the Status. Once + * updated, the new status is rewritten into the input ChunkData + */ + + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(auxTO.getStatus()); + if (code != TStatusCode.EMPTY) { + status = new TReturnStatus(code, auxTO.getErrString()); + } + inputChunk.setStatus(status); + return inputChunk; + } + + /** + * Method that returns a Collection of ReducedBoLChunkData Objects associated to the supplied + * TRequestToken. + * + *

If any of the data retrieved for a given chunk is not well formed and so does not allow a + * ReducedBoLChunkData Object to be created, then that chunk is dropped and gets logged, while + * processing continues with the next one. All valid chunks get returned: the others get dropped. + * + *

If there are no chunks associated to the given TRequestToken, then an empty Collection is + * returned and a messagge gets logged. + */ + public synchronized Collection lookupReducedBoLChunkData(TRequestToken rt) { + + Collection reducedChunkDataTOs = dao.findReduced(rt.getValue()); + log.debug("BoL CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); + ArrayList list = new ArrayList(); + if (reducedChunkDataTOs.isEmpty()) { + log.debug("BoL CHUNK CATALOG! No chunks found in persistence for {}", rt); + } else { + ReducedBoLChunkData reducedChunkData = null; + for (ReducedBoLChunkDataTO reducedChunkDataTO : reducedChunkDataTOs) { + reducedChunkData = makeOneReduced(reducedChunkDataTO); + if (reducedChunkData != null) { + list.add(reducedChunkData); + if (!this.isComplete(reducedChunkDataTO)) { + completeTO(reducedChunkDataTO, reducedChunkData); + dao.updateIncomplete(reducedChunkDataTO); + } + } + } + log.debug("BoL CHUNK CATALOG: returning {}", list); + } + return list; + } + + public Collection lookupReducedBoLChunkData( + TRequestToken requestToken, Collection surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.findReduced(requestToken, surlsUniqueIDs, surlsArray); + return buildReducedChunkDataList(chunkDataTOCollection); + } + + public Collection lookupBoLChunkData(TSURL surl, GridUserInterface user) { + + return lookupBoLChunkData(Arrays.asList(new TSURL[] {surl}), user); + } + + public Collection lookupBoLChunkData(TSURL surl) { + + return lookupBoLChunkData(Arrays.asList(new TSURL[] {surl})); + } + + private Collection lookupBoLChunkData( + List surls, GridUserInterface user) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.find(surlsUniqueIDs, surlsArray, user.getDn()); + log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildChunkDataList(chunkDataTOCollection); + } + + public Collection lookupBoLChunkData(List surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, surlsArray); + log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildChunkDataList(chunkDataTOCollection); + } + + private Collection buildChunkDataList( + Collection chunkDataTOCollection) { + + List list = new ArrayList(); + BoLPersistentChunkData chunk; + for (BoLChunkDataTO chunkTO : chunkDataTOCollection) { + chunk = makeOne(chunkTO); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(this.completeTO(chunkTO, chunk)); + } catch (InvalidReducedBoLChunkDataAttributesException e) { + log.warn( + "BoL CHUNK CATALOG! unable to add missing informations " + "on DB to the request: {}", + e.getMessage()); + } + } + log.debug("BoL CHUNK CATALOG: returning {}", list); + return list; + } + + private BoLPersistentChunkData makeOne(BoLChunkDataTO chunkTO) { + + try { + return makeOne(chunkTO, new TRequestToken(chunkTO.getRequestToken(), chunkTO.getTimeStamp())); + } catch (InvalidTRequestTokenAttributesException e) { + throw new IllegalStateException( + "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " + e); + } + } + + /** + * Method that returns a Collection of ReducedBoLChunkData Objects matching the supplied GridUser + * and Collection of TSURLs. + * + *

If any of the data retrieved for a given chunk is not well formed and so does not allow a + * ReducedBoLChunkData Object to be created, then that chunk is dropped and gets logged, while + * processing continues with the next one. All valid chunks get returned: the others get dropped. + * + *

If there are no chunks associated to the given GridUser and Collection of TSURLs, then an + * empty Collection is returned and a message gets logged. + */ + public synchronized Collection lookupReducedBoLChunkData( + GridUserInterface gu, Collection tsurlCollection) { + + int[] surlsUniqueIDs = new int[tsurlCollection.size()]; + String[] surls = new String[tsurlCollection.size()]; + int index = 0; + for (TSURL tsurl : tsurlCollection) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.findReduced(gu.getDn(), surlsUniqueIDs, surls); + log.debug("BoL CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildReducedChunkDataList(chunkDataTOCollection); + } + + private Collection buildReducedChunkDataList( + Collection chunkDataTOCollection) { + + ArrayList list = new ArrayList(); + ReducedBoLChunkData reducedChunkData; + for (ReducedBoLChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { + reducedChunkData = makeOneReduced(reducedChunkDataTO); + if (reducedChunkData != null) { + list.add(reducedChunkData); + if (!this.isComplete(reducedChunkDataTO)) { + this.completeTO(reducedChunkDataTO, reducedChunkData); + dao.updateIncomplete(reducedChunkDataTO); + } + } + } + log.debug("BoL CHUNK CATALOG: returning {}", list); + return list; + } + + /** + * @param auxTO + * @return + */ + private ReducedBoLChunkData makeOneReduced(ReducedBoLChunkDataTO reducedChunkDataTO) { + + StringBuilder errorSb = new StringBuilder(); + // fromSURL + TSURL fromSURL = null; + try { + fromSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.fromSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (reducedChunkDataTO.normalizedStFN() != null) { + fromSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); + } + if (reducedChunkDataTO.surlUniqueID() != null) { + fromSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(reducedChunkDataTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + reducedChunkDataTO.status()); + } else { + status = new TReturnStatus(code, reducedChunkDataTO.errString()); + } + // make ReducedBoLChunkData + ReducedBoLChunkData aux = null; + try { + aux = new ReducedBoLChunkData(fromSURL, status); + aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); + } catch (InvalidReducedBoLChunkDataAttributesException e) { + log.warn( + "BoL CHUNK CATALOG! Retrieved malformed " + + "Reduced BoL chunk data from persistence: dropping reduced chunk..."); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Method used to add into Persistence a new entry. The supplied BoLChunkData gets the primary key + * changed to the value assigned in Persistence. + * + *

This method is intended to be used by a recursive BoL request: the parent request supplies a + * directory which must be expanded, so all new children requests resulting from the files in the + * directory are added into persistence. + * + *

So this method does _not_ add a new SRM prepare_to_get request into the DB! + * + *

The only children data written into the DB are: sourceSURL, TDirOption, statusCode and + * explanation. + * + *

In case of any error the operation does not proceed, but no Exception is thrown! Proper + * messages get logged by underlaying DAO. + */ + public synchronized void addChild(BoLPersistentChunkData chunkData) { + + BoLChunkDataTO to = new BoLChunkDataTO(); + // needed for now to find ID of request! Must be changed soon! + to.setRequestToken(chunkData.getRequestToken().toString()); + to.setFromSURL(chunkData.getSURL().toString()); + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + + to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); + to.setDirOption(chunkData.getDirOption().isDirectory()); + to.setNumLevel(chunkData.getDirOption().getNumLevel()); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + to.setDeferredStartTime(chunkData.getDeferredStartTime()); + + /* add the entry and update the Primary Key field */ + dao.addChild(to); + chunkData.setPrimaryKey(to.getPrimaryKey()); + } + + /** + * Method used to add into Persistence a new entry. The supplied BoLChunkData gets the primary key + * changed to the value assigned in the Persistence. The method requires the GridUser to whom + * associate the added request. + * + *

This method is intended to be used by an srmCopy request in push mode which implies a local + * srmBoL. The only fields from BoLChunkData that are considered are: the requestToken, the + * sourceSURL, the pinLifetime, the dirOption, the protocolList, the status and error string. + * + *

So this method _adds_ a new SRM prepare_to_get request into the DB! + * + *

In case of any error the operation does not proceed, but no Exception is thrown! The + * underlaying DAO logs proper error messages. + */ + public synchronized void add(BoLPersistentChunkData chunkData, GridUserInterface gu) { + + /* Currently NOT used */ + BoLChunkDataTO to = new BoLChunkDataTO(); + to.setRequestToken(chunkData.getRequestToken().toString()); + to.setFromSURL(chunkData.getSURL().toString()); + // TODO MICHELE USER_SURL fill new fields + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + + to.setLifeTime(new Long(chunkData.getLifeTime().value()).intValue()); + to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); + to.setDirOption(chunkData.getDirOption().isDirectory()); + to.setNumLevel(chunkData.getDirOption().getNumLevel()); + to.setProtocolList(TransferProtocolListConverter.toDB(chunkData.getTransferProtocols())); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + to.setDeferredStartTime(chunkData.getDeferredStartTime()); + + /* add the entry and update the Primary Key field! */ + dao.addNew(to, gu.getDn()); + chunkData.setPrimaryKey(to.getPrimaryKey()); + } + + /** + * Method used to establish if in Persistence there is a BoLChunkData working on the supplied + * SURL, and whose state is SRM_FILE_PINNED, in which case true is returned. In case none are + * found or there is any problem, false is returned. This method is intended to be used by srmMv. + */ + public synchronized boolean isSRM_FILE_PINNED(TSURL surl) { + + return (dao.numberInSRM_SUCCESS(surl.uniqueId()) > 0); + } + + /** + * Method used to transit the specified Collection of ReducedBoLChunkData from SRM_FILE_PINNED to + * SRM_RELEASED. Chunks in any other starting state are not transited. In case of any error + * nothing is done, but proper error messages get logged by the underlaying DAO. + */ + public synchronized void transitSRM_SUCCESStoSRM_RELEASED( + Collection chunks, TRequestToken token) { + + if (chunks == null || chunks.isEmpty()) { + return; + } + + long[] primaryKeys = new long[chunks.size()]; + int index = 0; + for (ReducedBoLChunkData chunkData : chunks) { + if (chunkData != null) { + primaryKeys[index] = chunkData.primaryKey(); + index++; + } + } + dao.transitSRM_SUCCESStoSRM_RELEASED(primaryKeys, token); + } + + /** + * This method is intended to be used by srmRm to transit all BoL chunks on the given SURL which + * are in the SRM_FILE_PINNED state, to SRM_ABORTED. The supplied String will be used as + * explanation in those chunks return status. The global status of the request is _not_ changed. + * + *

The TURL of those requests will automatically be set to empty. Notice that both + * removeAllJit(SURL) and removeVolatile(SURL) are automatically invoked on PinnedFilesCatalog, to + * remove any entry and corresponding physical ACLs. + * + *

Beware, that the chunks may be part of requests that have finished, or that still have not + * finished because other chunks are being processed. + */ + public synchronized void transitSRM_SUCCESStoSRM_ABORTED(TSURL surl, String explanation) { + + /* Currently NOT used */ + if (explanation == null) { + explanation = ""; + } + dao.transitSRM_SUCCESStoSRM_ABORTED(surl.uniqueId(), surl.toString(), explanation); + } + + /** + * Method used to force transition to SRM_RELEASED from SRM_FILE_PINNED, of all BoL Requests whose + * pinLifetime has expired and the state still has not been changed (a user forgot to run + * srmReleaseFiles)! + */ + public synchronized void transitExpiredSRM_SUCCESS() { + + dao.transitExpiredSRM_SUCCESS(); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, newStatusCode, explanation); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + List surlList, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) { + + int[] surlsUniqueIDs = new int[surlList.size()]; + String[] surls = new String[surlList.size()]; + int index = 0; + for (TSURL tsurl : surlList) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + dao.updateStatusOnMatchingStatus( + requestToken, surlsUniqueIDs, surls, expectedStatusCode, newStatusCode); + } } diff --git a/src/main/java/it/grid/storm/catalogs/BoLChunkDAO.java b/src/main/java/it/grid/storm/catalogs/BoLChunkDAO.java index 2c4e45f4..ce60023f 100644 --- a/src/main/java/it/grid/storm/catalogs/BoLChunkDAO.java +++ b/src/main/java/it/grid/storm/catalogs/BoLChunkDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -15,7 +14,6 @@ import it.grid.storm.srm.types.TRequestType; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -34,1655 +32,1740 @@ import java.util.Map.Entry; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * DAO class for BoLChunkCatalog. This DAO is specifically designed to connect - * to a MySQL DB. The raw data found in those tables is pre-treated in order to - * turn it into the Object Model of StoRM. See Method comments for further info. - * BEWARE! DAO Adjusts for extra fields in the DB that are not present in the - * object model. - * + * DAO class for BoLChunkCatalog. This DAO is specifically designed to connect to a MySQL DB. The + * raw data found in those tables is pre-treated in order to turn it into the Object Model of StoRM. + * See Method comments for further info. BEWARE! DAO Adjusts for extra fields in the DB that are not + * present in the object model. + * * @author CNAF * @version 1.0 * @date Aug 2009 */ public class BoLChunkDAO { - private static final Logger log = LoggerFactory.getLogger(BoLChunkDAO.class); - - /** String with the name of the class for the DB driver */ - private final String driver = Configuration.getInstance().getDBDriver(); - /** String referring to the URL of the DB */ - private final String url = Configuration.getInstance().getStormDbURL(); - /** String with the password for the DB */ - private final String password = Configuration.getInstance().getDBPassword(); - /** String with the name for the DB */ - private final String name = Configuration.getInstance().getDBUserName(); - /** Connection to DB - WARNING!!! It is kept open all the time! */ - private Connection con = null; - private final static BoLChunkDAO dao = new BoLChunkDAO(); - - /** - * timer thread that will run a taask to alert when reconnecting is necessary! - */ - private Timer clock = null; - /** - * timer task that will update the boolean signaling that a reconnection is - * needed! - */ - private TimerTask clockTask = null; - /** milliseconds that must pass before reconnecting to DB */ - private final long period = Configuration.getInstance() - .getDBReconnectPeriod() * 1000; - /** initial delay in milliseconds before starting timer */ - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - /** boolean that tells whether reconnection is needed because of MySQL bug! */ - private boolean reconnect = false; - - private BoLChunkDAO() { - - setUpConnection(); - - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of the BoLChunkDAO. - */ - public static BoLChunkDAO getInstance() { - - return dao; - } - - /** - * Method used to add a new record to the DB: the supplied BoLChunkDataTO gets - * its primaryKey changed to the one assigned by the DB. The supplied - * BoLChunkData is used to fill in only the DB table where file specific info - * gets recorded: it does _not_ add a new request! So if spurious data is - * supplied, it will just stay there because of a lack of a parent request! - */ - public synchronized void addChild(BoLChunkDataTO to) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: addChild - unable to get a valid connection!"); - return; - } - String str = null; - PreparedStatement id = null; // statement to find out the ID associated to - // the request token - ResultSet rsid = null; // result set containing the ID of the request. - // insertion - try { - - /* WARNING!!!! We are forced to run a query to get the ID of the request, - * which should NOT be so because the corresponding request object should - * have been changed with the extra field! However, it is not possible - * at the moment to perform such change because of strict deadline and - * the change could wreak havoc the code. So we are forced to make this - * query!!! - */ - - // begin transaction - con.setAutoCommit(false); - logWarnings(con.getWarnings()); - - // find ID of request corresponding to given RequestToken - str = "SELECT rq.ID FROM request_queue rq WHERE rq.r_token=?"; - - id = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - id.setString(1, to.getRequestToken()); - logWarnings(id.getWarnings()); - - log.debug("BoL CHUNK DAO: addChild; {}", id.toString()); - rsid = id.executeQuery(); - logWarnings(id.getWarnings()); - - /* ID of request in request_process! */ - int request_id = extractID(rsid); - int id_s = fillBoLTables(to, request_id); - - // end transaction! - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - - // update primary key reading the generated key - to.setPrimaryKey(id_s); - } catch (SQLException e) { - log.error("BoL CHUNK DAO: unable to complete addChild! BoLChunkDataTO: {}; " - + "exception received: {}", to, e.getMessage(), e); - rollback(con); - } catch (Exception e) { - log.error("BoL CHUNK DAO: unable to complete addChild! BoLChunkDataTO: {}; " - + "exception received: {}", to, e.getMessage(), e); - rollback(con); - } finally { - close(rsid); - close(id); - } - } - - /** - * Method used to add a new record to the DB: the supplied BoLChunkDataTO gets - * its primaryKey changed to the one assigned by the DB. The client_dn must - * also be supplied as a String. The supplied BoLChunkData is used to fill in - * all the DB tables where file specific info gets recorded: it _adds_ a new - * request! - */ - public synchronized void addNew(BoLChunkDataTO to, String client_dn) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: addNew - unable to get a valid connection!"); - return; - } - String str = null; - /* Result set containing the ID of the inserted new request */ - ResultSet rs_new = null; - /* Insert new request into process_request */ - PreparedStatement addNew = null; - /* Insert protocols for request. */ - PreparedStatement addProtocols = null; // insert protocols for request. - try { - // begin transaction - con.setAutoCommit(false); - logWarnings(con.getWarnings()); - - // add to request_queue... - str = "INSERT INTO request_queue (config_RequestTypeID,client_dn,pinLifetime,status,errstring,r_token,nbreqfiles,timeStamp,deferredStartTime) VALUES (?,?,?,?,?,?,?,?,?)"; - addNew = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - logWarnings(con.getWarnings()); - /* request type set to bring online */ - addNew.setString(1, - RequestTypeConverter.getInstance().toDB(TRequestType.BRING_ON_LINE)); - logWarnings(addNew.getWarnings()); - - addNew.setString(2, client_dn); - logWarnings(addNew.getWarnings()); - - addNew.setInt(3, to.getLifeTime()); - logWarnings(addNew.getWarnings()); - - addNew.setInt( - 4, - StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_INPROGRESS)); - logWarnings(addNew.getWarnings()); - - addNew.setString(5, "New BoL Request resulting from srmCopy invocation."); - logWarnings(addNew.getWarnings()); - - addNew.setString(6, to.getRequestToken()); - logWarnings(addNew.getWarnings()); - - addNew.setInt(7, 1); // number of requested files set to 1! - logWarnings(addNew.getWarnings()); - - addNew.setTimestamp(8, new Timestamp(new Date().getTime())); - logWarnings(addNew.getWarnings()); - - addNew.setInt(9, to.getDeferredStartTime()); - logWarnings(addNew.getWarnings()); - - log.trace("BoL CHUNK DAO: addNew; {}", addNew.toString()); - addNew.execute(); - logWarnings(addNew.getWarnings()); - - rs_new = addNew.getGeneratedKeys(); - int id_new = extractID(rs_new); - - // add protocols... - str = "INSERT INTO request_TransferProtocols (request_queueID,config_ProtocolsID) VALUES (?,?)"; - addProtocols = con.prepareStatement(str); - logWarnings(con.getWarnings()); - for (Iterator i = to.getProtocolList().iterator(); i.hasNext();) { - addProtocols.setInt(1, id_new); - logWarnings(addProtocols.getWarnings()); - - addProtocols.setString(2, i.next()); - logWarnings(addProtocols.getWarnings()); - - log.trace("BoL CHUNK DAO: addNew; {}", addProtocols.toString()); - addProtocols.execute(); - logWarnings(addProtocols.getWarnings()); - } - - // addChild... - int id_s = fillBoLTables(to, id_new); - - // end transaction! - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - - // update primary key reading the generated key - to.setPrimaryKey(id_s); - } catch (SQLException e) { - log.error("BoL CHUNK DAO: Rolling back! Unable to complete addNew! " - + "BoLChunkDataTO: {}; exception received: {}", to, e.getMessage(), e); - rollback(con); - } catch (Exception e) { - log.error("BoL CHUNK DAO: unable to complete addNew! BoLChunkDataTO: {}; " - + "exception received: {}", to, e.getMessage(), e); - rollback(con); - } finally { - close(rs_new); - close(addNew); - close(addProtocols); - } - } - - /** - * To be used inside a transaction - * - * @param to - * @param requestQueueID - * @return - * @throws SQLException - * @throws Exception - */ - private synchronized int fillBoLTables(BoLChunkDataTO to, int requestQueueID) - throws SQLException, Exception { - - String str = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_do = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_b = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_s = null; - /* insert TDirOption for request */ - PreparedStatement addDirOption = null; - /* insert request_Bol for request */ - PreparedStatement addBoL = null; - PreparedStatement addChild = null; - - try { - // first fill in TDirOption - str = "INSERT INTO request_DirOption (isSourceADirectory,allLevelRecursive,numOfLevels) VALUES (?,?,?)"; - addDirOption = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - logWarnings(con.getWarnings()); - addDirOption.setBoolean(1, to.getDirOption()); - logWarnings(addDirOption.getWarnings()); - - addDirOption.setBoolean(2, to.getAllLevelRecursive()); - logWarnings(addDirOption.getWarnings()); - - addDirOption.setInt(3, to.getNumLevel()); - logWarnings(addDirOption.getWarnings()); - - log.trace("BoL CHUNK DAO: addNew; {}", addDirOption.toString()); - addDirOption.execute(); - logWarnings(addDirOption.getWarnings()); - - rs_do = addDirOption.getGeneratedKeys(); - int id_do = extractID(rs_do); - - // second fill in request_BoL... sourceSURL and TDirOption! - str = "INSERT INTO request_BoL (request_DirOptionID,request_queueID,sourceSURL,normalized_sourceSURL_StFN,sourceSURL_uniqueID) VALUES (?,?,?,?,?)"; - addBoL = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - logWarnings(con.getWarnings()); - addBoL.setInt(1, id_do); - logWarnings(addBoL.getWarnings()); - - addBoL.setInt(2, requestQueueID); - logWarnings(addBoL.getWarnings()); - - addBoL.setString(3, to.getFromSURL()); - logWarnings(addBoL.getWarnings()); - - addBoL.setString(4, to.normalizedStFN()); - logWarnings(addBoL.getWarnings()); - - addBoL.setInt(5, to.sulrUniqueID()); - logWarnings(addBoL.getWarnings()); - - log.trace("BoL CHUNK DAO: addNew; {}", addBoL.toString()); - addBoL.execute(); - logWarnings(addBoL.getWarnings()); - - rs_b = addBoL.getGeneratedKeys(); - int id_g = extractID(rs_b); - - // third fill in status_BoL... - str = "INSERT INTO status_BoL (request_BoLID,statusCode,explanation) VALUES (?,?,?)"; - addChild = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - logWarnings(con.getWarnings()); - addChild.setInt(1, id_g); - logWarnings(addChild.getWarnings()); - - addChild.setInt(2, to.getStatus()); - logWarnings(addChild.getWarnings()); - - addChild.setString(3, to.getErrString()); - logWarnings(addChild.getWarnings()); - - log.trace("BoL CHUNK DAO: addNew; " + addChild.toString()); - addChild.execute(); - logWarnings(addChild.getWarnings()); - - return id_g; - } finally { - close(rs_do); - close(rs_b); - close(rs_s); - close(addDirOption); - close(addBoL); - close(addChild); - } - } - - /** - * Method used to save the changes made to a retrieved BoLChunkDataTO, back - * into the MySQL DB. Only the fileSize, statusCode and explanation, of - * status_BoL table are written to the DB. Likewise for the request - * pinLifetime. In case of any error, an error message gets logged but no - * exception is thrown. - */ - public synchronized void update(BoLChunkDataTO to) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: update - unable to get a valid connection!"); - return; - } - PreparedStatement updateFileReq = null; - try { - // ready updateFileReq... - updateFileReq = con - .prepareStatement("UPDATE request_queue rq JOIN (status_BoL sb, request_BoL rb) ON (rq.ID=rb.request_queueID AND sb.request_BoLID=rb.ID)" - + " SET sb.fileSize=?, sb.statusCode=?, sb.explanation=?, rq.pinLifetime=?, rb.normalized_sourceSURL_StFN=?, rb.sourceSURL_uniqueID=?" - + " WHERE rb.ID=?"); - logWarnings(con.getWarnings()); - updateFileReq.setLong(1, to.getFileSize()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(2, to.getStatus()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(3, to.getErrString()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(4, to.getLifeTime()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(5, to.normalizedStFN()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(6, to.sulrUniqueID()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setLong(7, to.getPrimaryKey()); - logWarnings(updateFileReq.getWarnings()); - // execute update - log.trace("BoL CHUNK DAO: update method; {}", updateFileReq.toString()); - updateFileReq.executeUpdate(); - logWarnings(updateFileReq.getWarnings()); - } catch (SQLException e) { - log.error("BoL CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); - } finally { - close(updateFileReq); - } - } - - /** - * Updates the request_Bol represented by the received ReducedBoLChunkDataTO - * by setting its normalized_sourceSURL_StFN and sourceSURL_uniqueID - * - * @param chunkTO - */ - public synchronized void updateIncomplete(ReducedBoLChunkDataTO chunkTO) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: updateIncomplete - unable to get a valid connection!"); - return; - } - String str = "UPDATE request_BoL SET normalized_sourceSURL_StFN=?, " - + "sourceSURL_uniqueID=? WHERE ID=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - stmt.setString(1, chunkTO.normalizedStFN()); - logWarnings(stmt.getWarnings()); - - stmt.setInt(2, chunkTO.surlUniqueID()); - logWarnings(stmt.getWarnings()); - - stmt.setLong(3, chunkTO.primaryKey()); - logWarnings(stmt.getWarnings()); - - log.trace("BoL CHUNK DAO - update incomplete: {}", stmt.toString()); - stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - } catch (SQLException e) { - log.error("BoL CHUNK DAO: Unable to complete update incomplete! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * TODO WARNING! THIS IS A WORK IN PROGRESS!!! Method used to refresh the - * BoLChunkDataTO information from the MySQL DB. In this first version, only - * the statusCode is reloaded from the DB. TODO The next version must contains - * all the information related to the Chunk! In case of any error, an error - * message gets logged but no exception is thrown. - */ - public synchronized BoLChunkDataTO refresh(long primary_key) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: refresh - unable to get a valid connection!"); - return null; - } - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - - try { - // get chunks of the request - str = "SELECT statusCode " + "FROM status_BoL " - + "WHERE request_BoLID=?"; - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - find.setLong(1, primary_key); - - logWarnings(find.getWarnings()); - log.trace("BoL CHUNK DAO: refresh status method; " + find.toString()); - - rs = find.executeQuery(); - - logWarnings(find.getWarnings()); - BoLChunkDataTO aux = null; - while (rs.next()) { - aux = new BoLChunkDataTO(); - aux.setStatus(rs.getInt("statusCode")); - } - return aux; - } catch (SQLException e) { - log.error("BoL CHUNK DAO: {}", e.getMessage(), e); - return null; - } finally { - close(rs); - close(find); - } - } - - /** - * Method that queries the MySQL DB to find all entries matching the supplied - * TRequestToken. The Collection contains the corresponding BoLChunkDataTO - * objects. An initial simple query establishes the list of protocols - * associated with the request. A second complex query establishes all chunks - * associated with the request, by properly joining request_queue, - * request_BoL, status_BoL and request_DirOption. The considered fields are: - * (1) From status_BoL: the ID field which becomes the TOs primary key, and - * statusCode. (2) From request_BoL: sourceSURL (3) From request_queue: - * pinLifetime (4) From request_DirOption: isSourceADirectory, - * alLevelRecursive, numOfLevels In case of any error, a log gets written and - * an empty collection is returned. No exception is thrown. NOTE! Chunks in - * SRM_ABORTED status are NOT returned! - */ - public synchronized Collection find(TRequestToken requestToken) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - String strToken = requestToken.toString(); - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - str = "SELECT tp.config_ProtocolsID " - + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " - + "WHERE rq.r_token=?"; - - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList protocols = new ArrayList(); - find.setString(1, strToken); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO: find method; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - while (rs.next()) { - protocols.add(rs.getString("tp.config_ProtocolsID")); - } - close(rs); - close(find); - - // get chunks of the request - str = "SELECT sb.statusCode, rq.timeStamp, rq.pinLifetime, rq.deferredStartTime, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " - + "LEFT JOIN request_DirOption d ON rb.request_DirOptionID=d.ID " - + "WHERE rq.r_token=? AND sb.statusCode<>?"; - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - ArrayList list = new ArrayList(); - find.setString(1, strToken); - logWarnings(find.getWarnings()); - - find.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO: find method; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - BoLChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new BoLChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sb.statusCode")); - chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setDeferredStartTime(rs.getInt("rq.deferredStartTime")); - chunkDataTO.setRequestToken(strToken); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); - chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); - - chunkDataTO.setNormalizedStFN(rs - .getString("rb.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); - chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); - chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); - chunkDataTO.setProtocolList(protocols); - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("BOL CHUNK DAO: {}", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedBoLChunkDataTO associated to the - * given TRequestToken expressed as String. - */ - public synchronized Collection findReduced( - String reqtoken) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - // get reduced chunks - String str = "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " - + "WHERE rq.r_token=?"; - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, reqtoken); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO! findReduced with request token; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - ReducedBoLChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new ReducedBoLChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sb.statusCode")); - chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); - chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rb.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(uniqueID); - } - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("BOL CHUNK DAO: {}", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedBoLChunkDataTO associated to the - * given griduser, and whose SURLs are contained in the supplied array of - * Strings. - */ - public synchronized Collection findReduced( - TRequestToken requestToken, int[] surlUniqueIDs, String[] surls) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - /* - * NOTE: we search also on the fromSurl because otherwise we lost all - * request_Bol that have not the uniqueID set because are not yet been - * used by anybody - */ - // get reduced chunks - String str = "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " - + "WHERE rq.r_token=? AND ( rb.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlUniqueIDs) - + " AND rb.sourceSURL IN " - + makeSurlString(surls) + " ) "; - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, requestToken.getValue()); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - ReducedBoLChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new ReducedBoLChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sb.statusCode")); - chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); - chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rb.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(uniqueID); - } - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("BoL CHUNK DAO: {}", e.getMessage(), e); - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedBoLChunkDataTO associated to the - * given griduser, and whose SURLs are contained in the supplied array of - * Strings. - */ - public synchronized Collection findReduced( - String griduser, int[] surlUniqueIDs, String[] surls) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - /* - * NOTE: we search also on the fromSurl because otherwise we lost all - * request_Bol that have not the uniqueID set because are not yet been - * used by anybody - */ - // get reduced chunks - String str = "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " - + "WHERE rq.client_dn=? AND ( rb.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlUniqueIDs) - + " AND rb.sourceSURL IN " - + makeSurlString(surls) + " ) "; - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, griduser); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - ReducedBoLChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new ReducedBoLChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sb.statusCode")); - chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); - chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rb.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(uniqueID); - } - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("BoL CHUNK DAO: {}", e.getMessage(), e); - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns the number of BoL requests on the given SURL, that are - * in SRM_SUCCESS state. This method is intended to be used by BoLChunkCatalog - * in the isSRM_SUCCESS method invocation. In case of any error, 0 is - * returned. - */ - public synchronized int numberInSRM_SUCCESS(int surlUniqueID) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: numberInSRM_SUCCESS - unable to get a valid connection!"); - return 0; - } - String str = "SELECT COUNT(rb.ID) " - + "FROM status_BoL sb JOIN request_BoL rb " - + "ON (sb.request_BoLID=rb.ID) " - + "WHERE rb.sourceSURL_uniqueID=? AND sb.statusCode=?"; - PreparedStatement find = null; - ResultSet rs = null; - try { - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - /* Prepared statement spares DB-specific String notation! */ - find.setInt(1, surlUniqueID); - logWarnings(find.getWarnings()); - - find.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - logWarnings(find.getWarnings()); - - log.trace("BoL CHUNK DAO - numberInSRM_SUCCESS method: {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - int numberFileSuccessful = 0; - if (rs.next()) { - numberFileSuccessful = rs.getInt(1); - } - return numberFileSuccessful; - } catch (SQLException e) { - log.error("BoL CHUNK DAO! Unable to determine numberInSRM_SUCCESS! " - + "Returning 0! ", e.getMessage(), e); - return 0; - } finally { - close(rs); - close(find); - } - } - - /** - * Method used in extraordinary situations to signal that data retrieved from - * the DB was malformed and could not be translated into the StoRM object - * model. This method attempts to change the status of the request to - * SRM_FAILURE and record it in the DB. This operation could potentially fail - * because the source of the malformed problems could be a problematic DB; - * indeed, initially only log messages where recorded. Yet it soon became - * clear that the source of malformed data were the clients and/or FE - * recording info in the DB. In these circumstances the client would see its - * request as being in the SRM_IN_PROGRESS state for ever. Hence the pressing - * need to inform it of the encountered problems. - */ - public synchronized void signalMalformedBoLChunk(BoLChunkDataTO auxTO) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: signalMalformedBoLChunk - unable to get a valid connection!"); - return; - } - String signalSQL = "UPDATE status_BoL SET statusCode=" - + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) - + ", explanation=? WHERE request_BoLID=" + auxTO.getPrimaryKey(); - PreparedStatement signal = null; - try { - signal = con.prepareStatement(signalSQL); - logWarnings(con.getWarnings()); - /* Prepared statement spares DB-specific String notation! */ - signal.setString(1, "Request is malformed!"); - logWarnings(signal.getWarnings()); - - log.trace("BoL CHUNK DAO: signalMalformed; {}", signal.toString()); - signal.executeUpdate(); - logWarnings(signal.getWarnings()); - } catch (SQLException e) { - log.error("BoLChunkDAO! Unable to signal in DB that the request was " - + "malformed! Request: {}; Exception: {}", auxTO.toString(), - e.toString(), e); - } finally { - close(signal); - } - } - - /** - * Method that updates all expired requests in SRM_SUCCESS state, into - * SRM_RELEASED. This is needed when the client forgets to invoke - * srmReleaseFiles(). - * - * @return - */ - public synchronized List transitExpiredSRM_SUCCESS() { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: transitExpiredSRM_SUCCESS - unable to get a valid connection!"); - return new ArrayList(); - } - - HashMap expiredSurlMap = new HashMap(); - String str = null; - PreparedStatement prepStatement = null; - - /* Find all expired surls */ - try { - // start transaction - con.setAutoCommit(false); - - str = "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " - + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "WHERE sb.statusCode=?" - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - prepStatement = con.prepareStatement(str); - prepStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - - ResultSet res = prepStatement.executeQuery(); - logWarnings(prepStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rb.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("BoLChunkDAO! unable to build the TSURL from {}: " - + "InvalidTSURLAttributesException ", sourceSURL, e.getMessage()); - } - } - expiredSurlMap.put(sourceSURL, uniqueID); - } - - if (expiredSurlMap.isEmpty()) { - commit(con); - log.trace("BoLChunkDAO! No chunk of BoL request was transited from " - + "SRM_SUCCESS to SRM_RELEASED."); - return new ArrayList(); - } - } catch (SQLException e) { - log.error("BoLChunkDAO! SQLException.", e.getMessage(), e); - rollback(con); - return new ArrayList(); - } finally { - close(prepStatement); - } - - /* Update status of all successful surls to SRM_RELEASED */ - - prepStatement = null; - try { - - str = "UPDATE " - + "status_BoL sb JOIN (request_BoL rb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "SET sb.statusCode=? " - + "WHERE sb.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - prepStatement = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - prepStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - logWarnings(prepStatement.getWarnings()); - - prepStatement.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - logWarnings(prepStatement.getWarnings()); - - log.trace("BoL CHUNK DAO - transitExpiredSRM_SUCCESS method: {}", - prepStatement.toString()); - - int count = prepStatement.executeUpdate(); - logWarnings(prepStatement.getWarnings()); - - if (count == 0) { - log.trace("BoLChunkDAO! No chunk of BoL request was" - + " transited from SRM_SUCCESS to SRM_RELEASED."); - } else { - log.info("BoLChunkDAO! {} chunks of BoL requests were transited from " - + "SRM_SUCCESS to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("BoLChunkDAO! Unable to transit expired SRM_SUCCESS chunks of " - + "BoL requests, to SRM_RELEASED! ", e.getMessage(), e); - rollback(con); - return new ArrayList(); - } finally { - close(prepStatement); - } - - /* - * in order to enhance performance here we can check if there is any file - * system with tape (T1D0, T1D1), if there is not any we can skip the - * following - */ - - /* Find all not expired surls from PtG */ - - HashSet pinnedSurlSet = new HashSet(); - try { - // SURLs pinned by BoLs - str = "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " - + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "WHERE sb.statusCode=" - + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS) - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; - - ResultSet res = null; - - prepStatement = con.prepareStatement(str); - res = prepStatement.executeQuery(); - logWarnings(prepStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rb.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("BoLChunkDAO! unable to build the TSURL from {}: " - + "InvalidTSURLAttributesException ", sourceSURL, e.getMessage()); - } - } - pinnedSurlSet.add(uniqueID); - } - - close(prepStatement); - - str = "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID FROM " - + "request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "WHERE sg.statusCode=?" - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; - - prepStatement = con.prepareStatement(str); - - prepStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - - res = prepStatement.executeQuery(); - logWarnings(prepStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rg.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("BoLChunkDAO! unable to build the TSURL from {}: " - + "InvalidTSURLAttributesException {}", sourceSURL, e.getMessage()); - } - } - pinnedSurlSet.add(uniqueID); - } - - commit(con); - - } catch (SQLException e) { - log.error("BoLChunkDAO! SQLException. {}", e.getMessage(), e); - rollback(con); - } finally { - close(prepStatement); - } - - /* Remove the Extended Attribute pinned if there is not a valid surl on it */ - ArrayList expiredSurlList = new ArrayList(); - TSURL surl; - for (Entry surlEntry : expiredSurlMap.entrySet()) { - if (!pinnedSurlSet.contains(surlEntry.getValue())) { - try { - surl = TSURL.makeFromStringValidate(surlEntry.getKey()); - } catch (InvalidTSURLAttributesException e) { - log.error("Invalid SURL, cannot release the pin " - + "(Extended Attribute): {}", surlEntry.getKey()); - continue; - } - expiredSurlList.add(surl); - StoRI stori; - try { - stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); - } catch (Throwable e) { - log.error("Invalid SURL {} cannot release the pin. {}: {}", - surlEntry.getKey(), e.getClass().getCanonicalName(), e.getMessage()); - continue; - } - - if (stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { - StormEA.removePinned(stori.getAbsolutePath()); - } - } - } - return expiredSurlList; - } - - /** - * Method that transits chunks in SRM_SUCCESS to SRM_ABORTED, for the given - * SURL: the overall request status of the requests containing that chunk, is - * not changed! The TURL is set to null. Beware, that the chunks may be part - * of requests that have finished, or that still have not finished because - * other chunks are still being processed. - */ - public synchronized void transitSRM_SUCCESStoSRM_ABORTED(int surlUniqueID, - String surl, String explanation) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: transitSRM_SUCCESStoSRM_ABORTED - unable to get a valid connection!"); - return; - } - String str = "UPDATE " - + "status_BoL sb JOIN request_BoL rb ON sb.request_BoLID=rb.ID " - + "SET sb.statusCode=?, sb.explanation=?, sb.transferURL=NULL " - + "WHERE sb.statusCode=? AND (rb.sourceSURL_uniqueID=? OR rb.targetSURL=?)"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - stmt.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(stmt.getWarnings()); - - stmt.setString(2, explanation); - logWarnings(stmt.getWarnings()); - - stmt.setInt(3, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - logWarnings(stmt.getWarnings()); - - stmt.setInt(4, surlUniqueID); - logWarnings(stmt.getWarnings()); - - stmt.setString(5, surl); - logWarnings(stmt.getWarnings()); - - log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_ABORTED: {}", stmt.toString()); - int count = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - if (count > 0) { - log.info("BoL CHUNK DAO! {} chunks were transited from SRM_SUCCESS " - + "to SRM_ABORTED.", count); - } else { - log.trace("BoL CHUNK DAO! No chunks were transited from SRM_SUCCESS " - + "to SRM_ABORTED."); - } - } catch (SQLException e) { - log.error("BoL CHUNK DAO! Unable to transitSRM_SUCCESStoSRM_ABORTED! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that updates all chunks in SRM_SUCCESS state, into SRM_RELEASED. An - * array of long representing the primary key of each chunk is required: only - * they get the status changed provided their current status is SRM_SUCCESS. - * This method is used during srmReleaseFiles In case of any error nothing - * happens and no exception is thrown, but proper messages get logged. - */ - public synchronized void transitSRM_SUCCESStoSRM_RELEASED(long[] ids) { - - if (!checkConnection()) { - log.error("BoL CHUNK DAO: transitSRM_SUCCESStoSRM_RELEASED - unable to get a valid connection!"); - return; - } - String str = "UPDATE status_BoL SET statusCode=? " - + "WHERE statusCode=? AND request_BoLID IN " + makeWhereString(ids); - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - stmt.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - logWarnings(stmt.getWarnings()); - - stmt.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - logWarnings(stmt.getWarnings()); - - log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_RELEASED: {}", - stmt.toString()); - int count = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("BoL CHUNK DAO! No chunk of BoL request " - + "was transited from SRM_SUCCESS to SRM_RELEASED."); - } else { - log.info("BoL CHUNK DAO! {} chunks of BoL requests were transited " - + "from SRM_SUCCESS to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("BoL CHUNK DAO! Unable to transit chunks from SRM_SUCCESS " - + "to SRM_RELEASED! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - public synchronized void transitSRM_SUCCESStoSRM_RELEASED(long[] ids, - TRequestToken token) { - - if (token == null) { - transitSRM_SUCCESStoSRM_RELEASED(ids); - } else { - /* - * If a request token has been specified, only the related BoL requests - * have to be released. This is done adding the r.r_token="..." clause in - * the where subquery. - */ - if (!checkConnection()) { - log.error("BoL CHUNK DAO: transitSRM_SUCCESStoSRM_RELEASED - unable to get a valid connection!"); - return; - } - String str = "UPDATE " - + "status_BoL sb JOIN (request_BoL rb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "SET sb.statusCode=? " + "WHERE sb.statusCode=? AND rq.r_token='" - + token.toString() + "' AND rb.ID IN " + makeWhereString(ids); - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - stmt.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - logWarnings(stmt.getWarnings()); - - stmt.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - logWarnings(stmt.getWarnings()); - - log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_RELEASED: {}", - stmt.toString()); - int count = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("BoL CHUNK DAO! No chunk of BoL request was " - + "transited from SRM_SUCCESS to SRM_RELEASED."); - } else { - log.info("BoL CHUNK DAO! {} chunks of BoL requests were transited " - + "from SRM_SUCCESS to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("BoL CHUNK DAO! Unable to transit chunks " - + "from SRM_SUCCESS to SRM_RELEASED! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - } - - /** - * Auxiliary method used to close a ResultSet - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("BoL CHUNK DAO! Unable to close ResultSet! Exception: " + e); - } - } - } - - /** - * Auxiliary method used to close a Statement - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("BoL CHUNK DAO! Unable to close Statement {} - Exception: {}", - stmt.toString(), e.getMessage(), e); - } - } - } - - private void commit(Connection con) { - - if (con != null) { - try { - con.commit(); - con.setAutoCommit(true); - } catch (SQLException e) { - log.error("BoL, SQL EXception {}", e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to roll back a failed transaction - */ - private void rollback(Connection con) { - - if (con != null) { - try { - con.rollback(); - con.setAutoCommit(true); - log.error("BoL CHUNK DAO: roll back successful!"); - } catch (SQLException e2) { - log.error("BoL CHUNK DAO: roll back failed! {}", e2.getMessage(), e2); - } - } - } - - /** - * Private method that returns the generated ID: it throws an exception in - * case of any problem! - */ - private int extractID(ResultSet rs) throws Exception { - - if (rs == null) { - throw new Exception("BoL CHUNK DAO! Null ResultSet!"); - } - if (rs.next()) { - return rs.getInt(1); - } - log.error("BoL CHUNK DAO! It was not possible to establish " - + "the assigned autoincrement primary key!"); - throw new Exception( - "BoL CHUNK DAO! It was not possible to establish the assigned autoincrement primary key!"); - } - - /** - * Auxiliary private method that logs all SQL warnings. - */ - private void logWarnings(SQLWarning w) { - - if (w != null) { - log.debug("BoL CHUNK DAO: {}", w.toString()); - while ((w = w.getNextWarning()) != null) { - log.debug("BoL CHUNK DAO: {}", w.toString()); - } - } - } - - /** - * Method that returns a String containing all IDs. - */ - private String makeWhereString(long[] rowids) { - - StringBuilder sb = new StringBuilder("("); - int n = rowids.length; - for (int i = 0; i < n; i++) { - sb.append(rowids[i]); - if (i < (n - 1)) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surl's IDs. - */ - private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { - - StringBuilder sb = new StringBuilder("("); - for (int i = 0; i < surlUniqueIDs.length; i++) { - if (i > 0) { - sb.append(","); - } - sb.append(surlUniqueIDs[i]); - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surls. - */ - private String makeSurlString(String[] surls) { - - StringBuilder sb = new StringBuilder("("); - int n = surls.length; - - for (int i = 0; i < n; i++) { - - SURL requestedSURL; - - try { - requestedSURL = SURL.makeSURLfromString(surls[i]); - } catch (NamespaceException e) { - log.error(e.getMessage()); - log.debug("Skip '{}' during query creation", surls[i]); - continue; - } - - sb.append("'"); - sb.append(requestedSURL.getNormalFormAsString()); - sb.append("','"); - sb.append(requestedSURL.getQueryFormAsString()); - sb.append("'"); - - if (i < (n - 1)) { - sb.append(","); - } - } - - sb.append(")"); - return sb.toString(); - } - - /** - * Auxiliary method that sets up the connection to the DB, as well as the - * prepared statement. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - logWarnings(con.getWarnings()); - response = con.isValid(0); - } catch (ClassNotFoundException | SQLException e) { - log.error("BoL CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private boolean checkConnection() { - - boolean response = true; - if (reconnect) { - log.debug("BoL CHUNK DAO! Reconnecting to DB! "); - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method that tales down a connection to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - log.error("BoL CHUNK DAO! Exception in takeDownConnection method: {}", - e.getMessage(), e); - } - } - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || explanation == null) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken + " explanation=" - + explanation); - } - doUpdateStatusOnMatchingStatus(requestToken, null, null, - expectedStatusCode, newStatusCode, explanation, true, false, true); - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlsUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode) - throws IllegalArgumentException { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || surlsUniqueIDs == null || surls == null || surlsUniqueIDs.length == 0 - || surls.length == 0 || surlsUniqueIDs.length != surls.length) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken - + "surlsUniqueIDs=" + surlsUniqueIDs + " surls=" + surls); - } - doUpdateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, null, true, true, false); - } - - public synchronized void doUpdateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation, boolean withRequestToken, boolean withSurls, - boolean withExplanation) throws IllegalArgumentException { - - if ((withRequestToken && requestToken == null) - || (withExplanation && explanation == null) - || (withSurls && (surlUniqueIDs == null || surls == null))) { - throw new IllegalArgumentException( - "Unable to perform the doUpdateStatusOnMatchingStatus, " - + "invalid arguments: withRequestToken=" + withRequestToken - + " requestToken=" + requestToken + " withSurls=" + withSurls - + " surlUniqueIDs=" + surlUniqueIDs + " surls=" + surls - + " withExplaination=" + withExplanation + " explanation=" - + explanation); - } - if (!checkConnection()) { - log - .error("BOL CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); - return; - } - String str = "UPDATE status_BoL sb JOIN (request_BoL rb, request_queue rq) " - + "ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "SET sb.statusCode=? "; - if (withExplanation) { - str += " , " + buildExpainationSet(explanation); - } - str += " WHERE sb.statusCode=? "; - if (withRequestToken) { - str += " AND " + buildTokenWhereClause(requestToken); - } - if (withSurls) { - str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); - } - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); - logWarnings(stmt.getWarnings()); - - stmt - .setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); - logWarnings(stmt.getWarnings()); - - log.trace("BOL CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); - int count = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("BOL CHUNK DAO! No chunk of BOL request was updated from {} " - + "to {}.", expectedStatusCode, newStatusCode); - } else { - log.debug("BOL CHUNK DAO! {} chunks of BOL requests were updated " - + "from {} to {}.", count, expectedStatusCode, newStatusCode); - } - } catch (SQLException e) { - log.error("BOL CHUNK DAO! Unable to updated from {} to {}!", - expectedStatusCode, newStatusCode, e); - } finally { - close(stmt); - } - } - - public Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0 || dn == null) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " dn=" + dn); - } - return find(surlsUniqueIDs, surlsArray, dn, true); - } - - public Collection find(int[] surlsUniqueIDs, - String[] surlsArray) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray); - } - return find(surlsUniqueIDs, surlsArray, null, false); - } - - private synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn, boolean withDn) - throws IllegalArgumentException { - - if ((withDn && dn == null) || surlsUniqueIDs == null - || surlsUniqueIDs.length == 0 || surlsArray == null - || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " withDn=" + withDn + " dn=" + dn); - } - if (!checkConnection()) { - log.error("BoL CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - // get chunks of the request - String str = "SELECT rq.ID, rq.r_token, sb.statusCode, rq.timeStamp, rq.pinLifetime, " - + "rq.deferredStartTime, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, " - + "rb.sourceSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " - + "LEFT JOIN request_DirOption d ON rb.request_DirOptionID=d.ID " - + "WHERE ( rb.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) - + " AND rb.sourceSURL IN " - + makeSurlString(surlsArray) + " )"; - if (withDn) { - str += " AND rq.client_dn=\'" + dn + "\'"; - } - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - List list = new ArrayList(); - - log.trace("BOL CHUNK DAO - find method: {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - BoLChunkDataTO chunkDataTO = null; - while (rs.next()) { - - chunkDataTO = new BoLChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sb.statusCode")); - chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setDeferredStartTime(rs.getInt("rq.deferredStartTime")); - chunkDataTO.setRequestToken(rs.getString("rq.r_token")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); - chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); - - chunkDataTO.setNormalizedStFN(rs - .getString("rb.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); - chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); - chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("BOL CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - private String buildExpainationSet(String explanation) { - - return " sb.explanation='" + explanation + "' "; - } - - private String buildTokenWhereClause(TRequestToken requestToken) { - - return " rq.r_token='" + requestToken.toString() + "' "; - } - - private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { - - return " ( rb.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) + " AND rb.sourceSURL IN " - + makeSurlString(surls) + " ) "; - } - + private static final Logger log = LoggerFactory.getLogger(BoLChunkDAO.class); + + /** String with the name of the class for the DB driver */ + private final String driver = Configuration.getInstance().getDBDriver(); + /** String referring to the URL of the DB */ + private final String url = Configuration.getInstance().getStormDbURL(); + /** String with the password for the DB */ + private final String password = Configuration.getInstance().getDBPassword(); + /** String with the name for the DB */ + private final String name = Configuration.getInstance().getDBUserName(); + /** Connection to DB - WARNING!!! It is kept open all the time! */ + private Connection con = null; + + private static final BoLChunkDAO dao = new BoLChunkDAO(); + + /** timer thread that will run a taask to alert when reconnecting is necessary! */ + private Timer clock = null; + /** timer task that will update the boolean signaling that a reconnection is needed! */ + private TimerTask clockTask = null; + /** milliseconds that must pass before reconnecting to DB */ + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + /** initial delay in milliseconds before starting timer */ + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + /** boolean that tells whether reconnection is needed because of MySQL bug! */ + private boolean reconnect = false; + + private BoLChunkDAO() { + + setUpConnection(); + + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of the BoLChunkDAO. */ + public static BoLChunkDAO getInstance() { + + return dao; + } + + /** + * Method used to add a new record to the DB: the supplied BoLChunkDataTO gets its primaryKey + * changed to the one assigned by the DB. The supplied BoLChunkData is used to fill in only the DB + * table where file specific info gets recorded: it does _not_ add a new request! So if spurious + * data is supplied, it will just stay there because of a lack of a parent request! + */ + public synchronized void addChild(BoLChunkDataTO to) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: addChild - unable to get a valid connection!"); + return; + } + String str = null; + PreparedStatement id = null; // statement to find out the ID associated to + // the request token + ResultSet rsid = null; // result set containing the ID of the request. + // insertion + try { + + /* WARNING!!!! We are forced to run a query to get the ID of the request, + * which should NOT be so because the corresponding request object should + * have been changed with the extra field! However, it is not possible + * at the moment to perform such change because of strict deadline and + * the change could wreak havoc the code. So we are forced to make this + * query!!! + */ + + // begin transaction + con.setAutoCommit(false); + logWarnings(con.getWarnings()); + + // find ID of request corresponding to given RequestToken + str = "SELECT rq.ID FROM request_queue rq WHERE rq.r_token=?"; + + id = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + id.setString(1, to.getRequestToken()); + logWarnings(id.getWarnings()); + + log.debug("BoL CHUNK DAO: addChild; {}", id.toString()); + rsid = id.executeQuery(); + logWarnings(id.getWarnings()); + + /* ID of request in request_process! */ + int request_id = extractID(rsid); + int id_s = fillBoLTables(to, request_id); + + // end transaction! + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + + // update primary key reading the generated key + to.setPrimaryKey(id_s); + } catch (SQLException e) { + log.error( + "BoL CHUNK DAO: unable to complete addChild! BoLChunkDataTO: {}; " + + "exception received: {}", + to, + e.getMessage(), + e); + rollback(con); + } catch (Exception e) { + log.error( + "BoL CHUNK DAO: unable to complete addChild! BoLChunkDataTO: {}; " + + "exception received: {}", + to, + e.getMessage(), + e); + rollback(con); + } finally { + close(rsid); + close(id); + } + } + + /** + * Method used to add a new record to the DB: the supplied BoLChunkDataTO gets its primaryKey + * changed to the one assigned by the DB. The client_dn must also be supplied as a String. The + * supplied BoLChunkData is used to fill in all the DB tables where file specific info gets + * recorded: it _adds_ a new request! + */ + public synchronized void addNew(BoLChunkDataTO to, String client_dn) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: addNew - unable to get a valid connection!"); + return; + } + String str = null; + /* Result set containing the ID of the inserted new request */ + ResultSet rs_new = null; + /* Insert new request into process_request */ + PreparedStatement addNew = null; + /* Insert protocols for request. */ + PreparedStatement addProtocols = null; // insert protocols for request. + try { + // begin transaction + con.setAutoCommit(false); + logWarnings(con.getWarnings()); + + // add to request_queue... + str = + "INSERT INTO request_queue (config_RequestTypeID,client_dn,pinLifetime,status,errstring,r_token,nbreqfiles,timeStamp,deferredStartTime) VALUES (?,?,?,?,?,?,?,?,?)"; + addNew = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + logWarnings(con.getWarnings()); + /* request type set to bring online */ + addNew.setString(1, RequestTypeConverter.getInstance().toDB(TRequestType.BRING_ON_LINE)); + logWarnings(addNew.getWarnings()); + + addNew.setString(2, client_dn); + logWarnings(addNew.getWarnings()); + + addNew.setInt(3, to.getLifeTime()); + logWarnings(addNew.getWarnings()); + + addNew.setInt(4, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + logWarnings(addNew.getWarnings()); + + addNew.setString(5, "New BoL Request resulting from srmCopy invocation."); + logWarnings(addNew.getWarnings()); + + addNew.setString(6, to.getRequestToken()); + logWarnings(addNew.getWarnings()); + + addNew.setInt(7, 1); // number of requested files set to 1! + logWarnings(addNew.getWarnings()); + + addNew.setTimestamp(8, new Timestamp(new Date().getTime())); + logWarnings(addNew.getWarnings()); + + addNew.setInt(9, to.getDeferredStartTime()); + logWarnings(addNew.getWarnings()); + + log.trace("BoL CHUNK DAO: addNew; {}", addNew.toString()); + addNew.execute(); + logWarnings(addNew.getWarnings()); + + rs_new = addNew.getGeneratedKeys(); + int id_new = extractID(rs_new); + + // add protocols... + str = + "INSERT INTO request_TransferProtocols (request_queueID,config_ProtocolsID) VALUES (?,?)"; + addProtocols = con.prepareStatement(str); + logWarnings(con.getWarnings()); + for (Iterator i = to.getProtocolList().iterator(); i.hasNext(); ) { + addProtocols.setInt(1, id_new); + logWarnings(addProtocols.getWarnings()); + + addProtocols.setString(2, i.next()); + logWarnings(addProtocols.getWarnings()); + + log.trace("BoL CHUNK DAO: addNew; {}", addProtocols.toString()); + addProtocols.execute(); + logWarnings(addProtocols.getWarnings()); + } + + // addChild... + int id_s = fillBoLTables(to, id_new); + + // end transaction! + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + + // update primary key reading the generated key + to.setPrimaryKey(id_s); + } catch (SQLException e) { + log.error( + "BoL CHUNK DAO: Rolling back! Unable to complete addNew! " + + "BoLChunkDataTO: {}; exception received: {}", + to, + e.getMessage(), + e); + rollback(con); + } catch (Exception e) { + log.error( + "BoL CHUNK DAO: unable to complete addNew! BoLChunkDataTO: {}; " + + "exception received: {}", + to, + e.getMessage(), + e); + rollback(con); + } finally { + close(rs_new); + close(addNew); + close(addProtocols); + } + } + + /** + * To be used inside a transaction + * + * @param to + * @param requestQueueID + * @return + * @throws SQLException + * @throws Exception + */ + private synchronized int fillBoLTables(BoLChunkDataTO to, int requestQueueID) + throws SQLException, Exception { + + String str = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_do = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_b = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_s = null; + /* insert TDirOption for request */ + PreparedStatement addDirOption = null; + /* insert request_Bol for request */ + PreparedStatement addBoL = null; + PreparedStatement addChild = null; + + try { + // first fill in TDirOption + str = + "INSERT INTO request_DirOption (isSourceADirectory,allLevelRecursive,numOfLevels) VALUES (?,?,?)"; + addDirOption = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + logWarnings(con.getWarnings()); + addDirOption.setBoolean(1, to.getDirOption()); + logWarnings(addDirOption.getWarnings()); + + addDirOption.setBoolean(2, to.getAllLevelRecursive()); + logWarnings(addDirOption.getWarnings()); + + addDirOption.setInt(3, to.getNumLevel()); + logWarnings(addDirOption.getWarnings()); + + log.trace("BoL CHUNK DAO: addNew; {}", addDirOption.toString()); + addDirOption.execute(); + logWarnings(addDirOption.getWarnings()); + + rs_do = addDirOption.getGeneratedKeys(); + int id_do = extractID(rs_do); + + // second fill in request_BoL... sourceSURL and TDirOption! + str = + "INSERT INTO request_BoL (request_DirOptionID,request_queueID,sourceSURL,normalized_sourceSURL_StFN,sourceSURL_uniqueID) VALUES (?,?,?,?,?)"; + addBoL = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + logWarnings(con.getWarnings()); + addBoL.setInt(1, id_do); + logWarnings(addBoL.getWarnings()); + + addBoL.setInt(2, requestQueueID); + logWarnings(addBoL.getWarnings()); + + addBoL.setString(3, to.getFromSURL()); + logWarnings(addBoL.getWarnings()); + + addBoL.setString(4, to.normalizedStFN()); + logWarnings(addBoL.getWarnings()); + + addBoL.setInt(5, to.sulrUniqueID()); + logWarnings(addBoL.getWarnings()); + + log.trace("BoL CHUNK DAO: addNew; {}", addBoL.toString()); + addBoL.execute(); + logWarnings(addBoL.getWarnings()); + + rs_b = addBoL.getGeneratedKeys(); + int id_g = extractID(rs_b); + + // third fill in status_BoL... + str = "INSERT INTO status_BoL (request_BoLID,statusCode,explanation) VALUES (?,?,?)"; + addChild = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + logWarnings(con.getWarnings()); + addChild.setInt(1, id_g); + logWarnings(addChild.getWarnings()); + + addChild.setInt(2, to.getStatus()); + logWarnings(addChild.getWarnings()); + + addChild.setString(3, to.getErrString()); + logWarnings(addChild.getWarnings()); + + log.trace("BoL CHUNK DAO: addNew; " + addChild.toString()); + addChild.execute(); + logWarnings(addChild.getWarnings()); + + return id_g; + } finally { + close(rs_do); + close(rs_b); + close(rs_s); + close(addDirOption); + close(addBoL); + close(addChild); + } + } + + /** + * Method used to save the changes made to a retrieved BoLChunkDataTO, back into the MySQL DB. + * Only the fileSize, statusCode and explanation, of status_BoL table are written to the DB. + * Likewise for the request pinLifetime. In case of any error, an error message gets logged but no + * exception is thrown. + */ + public synchronized void update(BoLChunkDataTO to) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: update - unable to get a valid connection!"); + return; + } + PreparedStatement updateFileReq = null; + try { + // ready updateFileReq... + updateFileReq = + con.prepareStatement( + "UPDATE request_queue rq JOIN (status_BoL sb, request_BoL rb) ON (rq.ID=rb.request_queueID AND sb.request_BoLID=rb.ID)" + + " SET sb.fileSize=?, sb.statusCode=?, sb.explanation=?, rq.pinLifetime=?, rb.normalized_sourceSURL_StFN=?, rb.sourceSURL_uniqueID=?" + + " WHERE rb.ID=?"); + logWarnings(con.getWarnings()); + updateFileReq.setLong(1, to.getFileSize()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(2, to.getStatus()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(3, to.getErrString()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(4, to.getLifeTime()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(5, to.normalizedStFN()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(6, to.sulrUniqueID()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setLong(7, to.getPrimaryKey()); + logWarnings(updateFileReq.getWarnings()); + // execute update + log.trace("BoL CHUNK DAO: update method; {}", updateFileReq.toString()); + updateFileReq.executeUpdate(); + logWarnings(updateFileReq.getWarnings()); + } catch (SQLException e) { + log.error("BoL CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); + } finally { + close(updateFileReq); + } + } + + /** + * Updates the request_Bol represented by the received ReducedBoLChunkDataTO by setting its + * normalized_sourceSURL_StFN and sourceSURL_uniqueID + * + * @param chunkTO + */ + public synchronized void updateIncomplete(ReducedBoLChunkDataTO chunkTO) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: updateIncomplete - unable to get a valid connection!"); + return; + } + String str = + "UPDATE request_BoL SET normalized_sourceSURL_StFN=?, " + + "sourceSURL_uniqueID=? WHERE ID=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + stmt.setString(1, chunkTO.normalizedStFN()); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, chunkTO.surlUniqueID()); + logWarnings(stmt.getWarnings()); + + stmt.setLong(3, chunkTO.primaryKey()); + logWarnings(stmt.getWarnings()); + + log.trace("BoL CHUNK DAO - update incomplete: {}", stmt.toString()); + stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + } catch (SQLException e) { + log.error("BoL CHUNK DAO: Unable to complete update incomplete! {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * TODO WARNING! THIS IS A WORK IN PROGRESS!!! Method used to refresh the BoLChunkDataTO + * information from the MySQL DB. In this first version, only the statusCode is reloaded from the + * DB. TODO The next version must contains all the information related to the Chunk! In case of + * any error, an error message gets logged but no exception is thrown. + */ + public synchronized BoLChunkDataTO refresh(long primary_key) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: refresh - unable to get a valid connection!"); + return null; + } + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + + try { + // get chunks of the request + str = "SELECT statusCode " + "FROM status_BoL " + "WHERE request_BoLID=?"; + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + find.setLong(1, primary_key); + + logWarnings(find.getWarnings()); + log.trace("BoL CHUNK DAO: refresh status method; " + find.toString()); + + rs = find.executeQuery(); + + logWarnings(find.getWarnings()); + BoLChunkDataTO aux = null; + while (rs.next()) { + aux = new BoLChunkDataTO(); + aux.setStatus(rs.getInt("statusCode")); + } + return aux; + } catch (SQLException e) { + log.error("BoL CHUNK DAO: {}", e.getMessage(), e); + return null; + } finally { + close(rs); + close(find); + } + } + + /** + * Method that queries the MySQL DB to find all entries matching the supplied TRequestToken. The + * Collection contains the corresponding BoLChunkDataTO objects. An initial simple query + * establishes the list of protocols associated with the request. A second complex query + * establishes all chunks associated with the request, by properly joining request_queue, + * request_BoL, status_BoL and request_DirOption. The considered fields are: (1) From status_BoL: + * the ID field which becomes the TOs primary key, and statusCode. (2) From request_BoL: + * sourceSURL (3) From request_queue: pinLifetime (4) From request_DirOption: isSourceADirectory, + * alLevelRecursive, numOfLevels In case of any error, a log gets written and an empty collection + * is returned. No exception is thrown. NOTE! Chunks in SRM_ABORTED status are NOT returned! + */ + public synchronized Collection find(TRequestToken requestToken) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + String strToken = requestToken.toString(); + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + str = + "SELECT tp.config_ProtocolsID " + + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " + + "WHERE rq.r_token=?"; + + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList protocols = new ArrayList(); + find.setString(1, strToken); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO: find method; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + while (rs.next()) { + protocols.add(rs.getString("tp.config_ProtocolsID")); + } + close(rs); + close(find); + + // get chunks of the request + str = + "SELECT sb.statusCode, rq.timeStamp, rq.pinLifetime, rq.deferredStartTime, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " + + "LEFT JOIN request_DirOption d ON rb.request_DirOptionID=d.ID " + + "WHERE rq.r_token=? AND sb.statusCode<>?"; + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + ArrayList list = new ArrayList(); + find.setString(1, strToken); + logWarnings(find.getWarnings()); + + find.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO: find method; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + BoLChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new BoLChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sb.statusCode")); + chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setDeferredStartTime(rs.getInt("rq.deferredStartTime")); + chunkDataTO.setRequestToken(strToken); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); + chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); + + chunkDataTO.setNormalizedStFN(rs.getString("rb.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); + chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); + chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); + chunkDataTO.setProtocolList(protocols); + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("BOL CHUNK DAO: {}", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedBoLChunkDataTO associated to the given TRequestToken + * expressed as String. + */ + public synchronized Collection findReduced(String reqtoken) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + // get reduced chunks + String str = + "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " + + "WHERE rq.r_token=?"; + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, reqtoken); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO! findReduced with request token; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + ReducedBoLChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new ReducedBoLChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sb.statusCode")); + chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); + chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rb.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(uniqueID); + } + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("BOL CHUNK DAO: {}", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedBoLChunkDataTO associated to the given griduser, and + * whose SURLs are contained in the supplied array of Strings. + */ + public synchronized Collection findReduced( + TRequestToken requestToken, int[] surlUniqueIDs, String[] surls) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + /* + * NOTE: we search also on the fromSurl because otherwise we lost all + * request_Bol that have not the uniqueID set because are not yet been + * used by anybody + */ + // get reduced chunks + String str = + "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " + + "WHERE rq.r_token=? AND ( rb.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlUniqueIDs) + + " AND rb.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, requestToken.getValue()); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + ReducedBoLChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new ReducedBoLChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sb.statusCode")); + chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); + chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rb.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(uniqueID); + } + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("BoL CHUNK DAO: {}", e.getMessage(), e); + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedBoLChunkDataTO associated to the given griduser, and + * whose SURLs are contained in the supplied array of Strings. + */ + public synchronized Collection findReduced( + String griduser, int[] surlUniqueIDs, String[] surls) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + /* + * NOTE: we search also on the fromSurl because otherwise we lost all + * request_Bol that have not the uniqueID set because are not yet been + * used by anybody + */ + // get reduced chunks + String str = + "SELECT sb.statusCode, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, rb.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " + + "WHERE rq.client_dn=? AND ( rb.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlUniqueIDs) + + " AND rb.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, griduser); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + ReducedBoLChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new ReducedBoLChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sb.statusCode")); + chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); + chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rb.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(uniqueID); + } + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("BoL CHUNK DAO: {}", e.getMessage(), e); + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns the number of BoL requests on the given SURL, that are in SRM_SUCCESS + * state. This method is intended to be used by BoLChunkCatalog in the isSRM_SUCCESS method + * invocation. In case of any error, 0 is returned. + */ + public synchronized int numberInSRM_SUCCESS(int surlUniqueID) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: numberInSRM_SUCCESS - unable to get a valid connection!"); + return 0; + } + String str = + "SELECT COUNT(rb.ID) " + + "FROM status_BoL sb JOIN request_BoL rb " + + "ON (sb.request_BoLID=rb.ID) " + + "WHERE rb.sourceSURL_uniqueID=? AND sb.statusCode=?"; + PreparedStatement find = null; + ResultSet rs = null; + try { + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + /* Prepared statement spares DB-specific String notation! */ + find.setInt(1, surlUniqueID); + logWarnings(find.getWarnings()); + + find.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + logWarnings(find.getWarnings()); + + log.trace("BoL CHUNK DAO - numberInSRM_SUCCESS method: {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + int numberFileSuccessful = 0; + if (rs.next()) { + numberFileSuccessful = rs.getInt(1); + } + return numberFileSuccessful; + } catch (SQLException e) { + log.error( + "BoL CHUNK DAO! Unable to determine numberInSRM_SUCCESS! " + "Returning 0! ", + e.getMessage(), + e); + return 0; + } finally { + close(rs); + close(find); + } + } + + /** + * Method used in extraordinary situations to signal that data retrieved from the DB was malformed + * and could not be translated into the StoRM object model. This method attempts to change the + * status of the request to SRM_FAILURE and record it in the DB. This operation could potentially + * fail because the source of the malformed problems could be a problematic DB; indeed, initially + * only log messages where recorded. Yet it soon became clear that the source of malformed data + * were the clients and/or FE recording info in the DB. In these circumstances the client would + * see its request as being in the SRM_IN_PROGRESS state for ever. Hence the pressing need to + * inform it of the encountered problems. + */ + public synchronized void signalMalformedBoLChunk(BoLChunkDataTO auxTO) { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: signalMalformedBoLChunk - unable to get a valid connection!"); + return; + } + String signalSQL = + "UPDATE status_BoL SET statusCode=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) + + ", explanation=? WHERE request_BoLID=" + + auxTO.getPrimaryKey(); + PreparedStatement signal = null; + try { + signal = con.prepareStatement(signalSQL); + logWarnings(con.getWarnings()); + /* Prepared statement spares DB-specific String notation! */ + signal.setString(1, "Request is malformed!"); + logWarnings(signal.getWarnings()); + + log.trace("BoL CHUNK DAO: signalMalformed; {}", signal.toString()); + signal.executeUpdate(); + logWarnings(signal.getWarnings()); + } catch (SQLException e) { + log.error( + "BoLChunkDAO! Unable to signal in DB that the request was " + + "malformed! Request: {}; Exception: {}", + auxTO.toString(), + e.toString(), + e); + } finally { + close(signal); + } + } + + /** + * Method that updates all expired requests in SRM_SUCCESS state, into SRM_RELEASED. This is + * needed when the client forgets to invoke srmReleaseFiles(). + * + * @return + */ + public synchronized List transitExpiredSRM_SUCCESS() { + + if (!checkConnection()) { + log.error("BoL CHUNK DAO: transitExpiredSRM_SUCCESS - unable to get a valid connection!"); + return new ArrayList(); + } + + HashMap expiredSurlMap = new HashMap(); + String str = null; + PreparedStatement prepStatement = null; + + /* Find all expired surls */ + try { + // start transaction + con.setAutoCommit(false); + + str = + "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " + + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "WHERE sb.statusCode=?" + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + prepStatement = con.prepareStatement(str); + prepStatement.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + + ResultSet res = prepStatement.executeQuery(); + logWarnings(prepStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rb.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "BoLChunkDAO! unable to build the TSURL from {}: " + + "InvalidTSURLAttributesException ", + sourceSURL, + e.getMessage()); + } + } + expiredSurlMap.put(sourceSURL, uniqueID); + } + + if (expiredSurlMap.isEmpty()) { + commit(con); + log.trace( + "BoLChunkDAO! No chunk of BoL request was transited from " + + "SRM_SUCCESS to SRM_RELEASED."); + return new ArrayList(); + } + } catch (SQLException e) { + log.error("BoLChunkDAO! SQLException.", e.getMessage(), e); + rollback(con); + return new ArrayList(); + } finally { + close(prepStatement); + } + + /* Update status of all successful surls to SRM_RELEASED */ + + prepStatement = null; + try { + + str = + "UPDATE " + + "status_BoL sb JOIN (request_BoL rb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "SET sb.statusCode=? " + + "WHERE sb.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + prepStatement = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + prepStatement.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + logWarnings(prepStatement.getWarnings()); + + prepStatement.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + logWarnings(prepStatement.getWarnings()); + + log.trace("BoL CHUNK DAO - transitExpiredSRM_SUCCESS method: {}", prepStatement.toString()); + + int count = prepStatement.executeUpdate(); + logWarnings(prepStatement.getWarnings()); + + if (count == 0) { + log.trace( + "BoLChunkDAO! No chunk of BoL request was" + + " transited from SRM_SUCCESS to SRM_RELEASED."); + } else { + log.info( + "BoLChunkDAO! {} chunks of BoL requests were transited from " + + "SRM_SUCCESS to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "BoLChunkDAO! Unable to transit expired SRM_SUCCESS chunks of " + + "BoL requests, to SRM_RELEASED! ", + e.getMessage(), + e); + rollback(con); + return new ArrayList(); + } finally { + close(prepStatement); + } + + /* + * in order to enhance performance here we can check if there is any file + * system with tape (T1D0, T1D1), if there is not any we can skip the + * following + */ + + /* Find all not expired surls from PtG */ + + HashSet pinnedSurlSet = new HashSet(); + try { + // SURLs pinned by BoLs + str = + "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " + + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "WHERE sb.statusCode=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS) + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; + + ResultSet res = null; + + prepStatement = con.prepareStatement(str); + res = prepStatement.executeQuery(); + logWarnings(prepStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rb.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "BoLChunkDAO! unable to build the TSURL from {}: " + + "InvalidTSURLAttributesException ", + sourceSURL, + e.getMessage()); + } + } + pinnedSurlSet.add(uniqueID); + } + + close(prepStatement); + + str = + "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID FROM " + + "request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "WHERE sg.statusCode=?" + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; + + prepStatement = con.prepareStatement(str); + + prepStatement.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + + res = prepStatement.executeQuery(); + logWarnings(prepStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rg.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "BoLChunkDAO! unable to build the TSURL from {}: " + + "InvalidTSURLAttributesException {}", + sourceSURL, + e.getMessage()); + } + } + pinnedSurlSet.add(uniqueID); + } + + commit(con); + + } catch (SQLException e) { + log.error("BoLChunkDAO! SQLException. {}", e.getMessage(), e); + rollback(con); + } finally { + close(prepStatement); + } + + /* Remove the Extended Attribute pinned if there is not a valid surl on it */ + ArrayList expiredSurlList = new ArrayList(); + TSURL surl; + for (Entry surlEntry : expiredSurlMap.entrySet()) { + if (!pinnedSurlSet.contains(surlEntry.getValue())) { + try { + surl = TSURL.makeFromStringValidate(surlEntry.getKey()); + } catch (InvalidTSURLAttributesException e) { + log.error( + "Invalid SURL, cannot release the pin " + "(Extended Attribute): {}", + surlEntry.getKey()); + continue; + } + expiredSurlList.add(surl); + StoRI stori; + try { + stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); + } catch (Throwable e) { + log.error( + "Invalid SURL {} cannot release the pin. {}: {}", + surlEntry.getKey(), + e.getClass().getCanonicalName(), + e.getMessage()); + continue; + } + + if (stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { + StormEA.removePinned(stori.getAbsolutePath()); + } + } + } + return expiredSurlList; + } + + /** + * Method that transits chunks in SRM_SUCCESS to SRM_ABORTED, for the given SURL: the overall + * request status of the requests containing that chunk, is not changed! The TURL is set to null. + * Beware, that the chunks may be part of requests that have finished, or that still have not + * finished because other chunks are still being processed. + */ + public synchronized void transitSRM_SUCCESStoSRM_ABORTED( + int surlUniqueID, String surl, String explanation) { + + if (!checkConnection()) { + log.error( + "BoL CHUNK DAO: transitSRM_SUCCESStoSRM_ABORTED - unable to get a valid connection!"); + return; + } + String str = + "UPDATE " + + "status_BoL sb JOIN request_BoL rb ON sb.request_BoLID=rb.ID " + + "SET sb.statusCode=?, sb.explanation=?, sb.transferURL=NULL " + + "WHERE sb.statusCode=? AND (rb.sourceSURL_uniqueID=? OR rb.targetSURL=?)"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(stmt.getWarnings()); + + stmt.setString(2, explanation); + logWarnings(stmt.getWarnings()); + + stmt.setInt(3, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + logWarnings(stmt.getWarnings()); + + stmt.setInt(4, surlUniqueID); + logWarnings(stmt.getWarnings()); + + stmt.setString(5, surl); + logWarnings(stmt.getWarnings()); + + log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_ABORTED: {}", stmt.toString()); + int count = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + if (count > 0) { + log.info( + "BoL CHUNK DAO! {} chunks were transited from SRM_SUCCESS " + "to SRM_ABORTED.", count); + } else { + log.trace("BoL CHUNK DAO! No chunks were transited from SRM_SUCCESS " + "to SRM_ABORTED."); + } + } catch (SQLException e) { + log.error("BoL CHUNK DAO! Unable to transitSRM_SUCCESStoSRM_ABORTED! {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method that updates all chunks in SRM_SUCCESS state, into SRM_RELEASED. An array of long + * representing the primary key of each chunk is required: only they get the status changed + * provided their current status is SRM_SUCCESS. This method is used during srmReleaseFiles In + * case of any error nothing happens and no exception is thrown, but proper messages get logged. + */ + public synchronized void transitSRM_SUCCESStoSRM_RELEASED(long[] ids) { + + if (!checkConnection()) { + log.error( + "BoL CHUNK DAO: transitSRM_SUCCESStoSRM_RELEASED - unable to get a valid connection!"); + return; + } + String str = + "UPDATE status_BoL SET statusCode=? " + + "WHERE statusCode=? AND request_BoLID IN " + + makeWhereString(ids); + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + logWarnings(stmt.getWarnings()); + + log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_RELEASED: {}", stmt.toString()); + int count = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "BoL CHUNK DAO! No chunk of BoL request " + + "was transited from SRM_SUCCESS to SRM_RELEASED."); + } else { + log.info( + "BoL CHUNK DAO! {} chunks of BoL requests were transited " + + "from SRM_SUCCESS to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "BoL CHUNK DAO! Unable to transit chunks from SRM_SUCCESS " + "to SRM_RELEASED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + public synchronized void transitSRM_SUCCESStoSRM_RELEASED(long[] ids, TRequestToken token) { + + if (token == null) { + transitSRM_SUCCESStoSRM_RELEASED(ids); + } else { + /* + * If a request token has been specified, only the related BoL requests + * have to be released. This is done adding the r.r_token="..." clause in + * the where subquery. + */ + if (!checkConnection()) { + log.error( + "BoL CHUNK DAO: transitSRM_SUCCESStoSRM_RELEASED - unable to get a valid connection!"); + return; + } + String str = + "UPDATE " + + "status_BoL sb JOIN (request_BoL rb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "SET sb.statusCode=? " + + "WHERE sb.statusCode=? AND rq.r_token='" + + token.toString() + + "' AND rb.ID IN " + + makeWhereString(ids); + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + logWarnings(stmt.getWarnings()); + + log.trace("BoL CHUNK DAO - transitSRM_SUCCESStoSRM_RELEASED: {}", stmt.toString()); + int count = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "BoL CHUNK DAO! No chunk of BoL request was " + + "transited from SRM_SUCCESS to SRM_RELEASED."); + } else { + log.info( + "BoL CHUNK DAO! {} chunks of BoL requests were transited " + + "from SRM_SUCCESS to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "BoL CHUNK DAO! Unable to transit chunks " + "from SRM_SUCCESS to SRM_RELEASED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + } + + /** Auxiliary method used to close a ResultSet */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("BoL CHUNK DAO! Unable to close ResultSet! Exception: " + e); + } + } + } + + /** Auxiliary method used to close a Statement */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "BoL CHUNK DAO! Unable to close Statement {} - Exception: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + private void commit(Connection con) { + + if (con != null) { + try { + con.commit(); + con.setAutoCommit(true); + } catch (SQLException e) { + log.error("BoL, SQL EXception {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method used to roll back a failed transaction */ + private void rollback(Connection con) { + + if (con != null) { + try { + con.rollback(); + con.setAutoCommit(true); + log.error("BoL CHUNK DAO: roll back successful!"); + } catch (SQLException e2) { + log.error("BoL CHUNK DAO: roll back failed! {}", e2.getMessage(), e2); + } + } + } + + /** + * Private method that returns the generated ID: it throws an exception in case of any problem! + */ + private int extractID(ResultSet rs) throws Exception { + + if (rs == null) { + throw new Exception("BoL CHUNK DAO! Null ResultSet!"); + } + if (rs.next()) { + return rs.getInt(1); + } + log.error( + "BoL CHUNK DAO! It was not possible to establish " + + "the assigned autoincrement primary key!"); + throw new Exception( + "BoL CHUNK DAO! It was not possible to establish the assigned autoincrement primary key!"); + } + + /** Auxiliary private method that logs all SQL warnings. */ + private void logWarnings(SQLWarning w) { + + if (w != null) { + log.debug("BoL CHUNK DAO: {}", w.toString()); + while ((w = w.getNextWarning()) != null) { + log.debug("BoL CHUNK DAO: {}", w.toString()); + } + } + } + + /** Method that returns a String containing all IDs. */ + private String makeWhereString(long[] rowids) { + + StringBuilder sb = new StringBuilder("("); + int n = rowids.length; + for (int i = 0; i < n; i++) { + sb.append(rowids[i]); + if (i < (n - 1)) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surl's IDs. */ + private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { + + StringBuilder sb = new StringBuilder("("); + for (int i = 0; i < surlUniqueIDs.length; i++) { + if (i > 0) { + sb.append(","); + } + sb.append(surlUniqueIDs[i]); + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surls. */ + private String makeSurlString(String[] surls) { + + StringBuilder sb = new StringBuilder("("); + int n = surls.length; + + for (int i = 0; i < n; i++) { + + SURL requestedSURL; + + try { + requestedSURL = SURL.makeSURLfromString(surls[i]); + } catch (NamespaceException e) { + log.error(e.getMessage()); + log.debug("Skip '{}' during query creation", surls[i]); + continue; + } + + sb.append("'"); + sb.append(requestedSURL.getNormalFormAsString()); + sb.append("','"); + sb.append(requestedSURL.getQueryFormAsString()); + sb.append("'"); + + if (i < (n - 1)) { + sb.append(","); + } + } + + sb.append(")"); + return sb.toString(); + } + + /** Auxiliary method that sets up the connection to the DB, as well as the prepared statement. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + logWarnings(con.getWarnings()); + response = con.isValid(0); + } catch (ClassNotFoundException | SQLException e) { + log.error("BoL CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); + } + return response; + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private boolean checkConnection() { + + boolean response = true; + if (reconnect) { + log.debug("BoL CHUNK DAO! Reconnecting to DB! "); + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method that tales down a connection to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + log.error("BoL CHUNK DAO! Exception in takeDownConnection method: {}", e.getMessage(), e); + } + } + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (requestToken == null || requestToken.getValue().trim().isEmpty() || explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + " explanation=" + + explanation); + } + doUpdateStatusOnMatchingStatus( + requestToken, + null, + null, + expectedStatusCode, + newStatusCode, + explanation, + true, + false, + true); + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) + throws IllegalArgumentException { + + if (requestToken == null + || requestToken.getValue().trim().isEmpty() + || surlsUniqueIDs == null + || surls == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + "surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls); + } + doUpdateStatusOnMatchingStatus( + requestToken, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + null, + true, + true, + false); + } + + public synchronized void doUpdateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation, + boolean withRequestToken, + boolean withSurls, + boolean withExplanation) + throws IllegalArgumentException { + + if ((withRequestToken && requestToken == null) + || (withExplanation && explanation == null) + || (withSurls && (surlUniqueIDs == null || surls == null))) { + throw new IllegalArgumentException( + "Unable to perform the doUpdateStatusOnMatchingStatus, " + + "invalid arguments: withRequestToken=" + + withRequestToken + + " requestToken=" + + requestToken + + " withSurls=" + + withSurls + + " surlUniqueIDs=" + + surlUniqueIDs + + " surls=" + + surls + + " withExplaination=" + + withExplanation + + " explanation=" + + explanation); + } + if (!checkConnection()) { + log.error("BOL CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); + return; + } + String str = + "UPDATE status_BoL sb JOIN (request_BoL rb, request_queue rq) " + + "ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "SET sb.statusCode=? "; + if (withExplanation) { + str += " , " + buildExpainationSet(explanation); + } + str += " WHERE sb.statusCode=? "; + if (withRequestToken) { + str += " AND " + buildTokenWhereClause(requestToken); + } + if (withSurls) { + str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); + } + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); + logWarnings(stmt.getWarnings()); + + log.trace("BOL CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); + int count = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "BOL CHUNK DAO! No chunk of BOL request was updated from {} " + "to {}.", + expectedStatusCode, + newStatusCode); + } else { + log.debug( + "BOL CHUNK DAO! {} chunks of BOL requests were updated " + "from {} to {}.", + count, + expectedStatusCode, + newStatusCode); + } + } catch (SQLException e) { + log.error( + "BOL CHUNK DAO! Unable to updated from {} to {}!", expectedStatusCode, newStatusCode, e); + } finally { + close(stmt); + } + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray, String dn) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0 + || dn == null) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " dn=" + + dn); + } + return find(surlsUniqueIDs, surlsArray, dn, true); + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray); + } + return find(surlsUniqueIDs, surlsArray, null, false); + } + + private synchronized Collection find( + int[] surlsUniqueIDs, String[] surlsArray, String dn, boolean withDn) + throws IllegalArgumentException { + + if ((withDn && dn == null) + || surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " withDn=" + + withDn + + " dn=" + + dn); + } + if (!checkConnection()) { + log.error("BoL CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + // get chunks of the request + String str = + "SELECT rq.ID, rq.r_token, sb.statusCode, rq.timeStamp, rq.pinLifetime, " + + "rq.deferredStartTime, rb.ID, rb.sourceSURL, rb.normalized_sourceSURL_StFN, " + + "rb.sourceSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID=rq.ID AND sb.request_BoLID=rb.ID) " + + "LEFT JOIN request_DirOption d ON rb.request_DirOptionID=d.ID " + + "WHERE ( rb.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rb.sourceSURL IN " + + makeSurlString(surlsArray) + + " )"; + if (withDn) { + str += " AND rq.client_dn=\'" + dn + "\'"; + } + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + List list = new ArrayList(); + + log.trace("BOL CHUNK DAO - find method: {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + BoLChunkDataTO chunkDataTO = null; + while (rs.next()) { + + chunkDataTO = new BoLChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sb.statusCode")); + chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setDeferredStartTime(rs.getInt("rq.deferredStartTime")); + chunkDataTO.setRequestToken(rs.getString("rq.r_token")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setPrimaryKey(rs.getLong("rb.ID")); + chunkDataTO.setFromSURL(rs.getString("rb.sourceSURL")); + + chunkDataTO.setNormalizedStFN(rs.getString("rb.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rb.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); + chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); + chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("BOL CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + private String buildExpainationSet(String explanation) { + + return " sb.explanation='" + explanation + "' "; + } + + private String buildTokenWhereClause(TRequestToken requestToken) { + + return " rq.r_token='" + requestToken.toString() + "' "; + } + + private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { + + return " ( rb.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rb.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + } } diff --git a/src/main/java/it/grid/storm/catalogs/BoLChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/BoLChunkDataTO.java index 4600758d..84bf434b 100644 --- a/src/main/java/it/grid/storm/catalogs/BoLChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/BoLChunkDataTO.java @@ -1,256 +1,243 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.namespace.model.Protocol; import it.grid.storm.srm.types.TStatusCode; - import java.sql.Timestamp; import java.util.List; /** - * Class that represents a row in the Persistence Layer: this is all raw data - * referring to the BoLChunkData proper, that is, String and primitive types. - * - * Each field is initialized with default values as per SRM 2.2 specification: - * protocolList GSIFTP dirOption false status SRM_REQUEST_QUEUED - * - * All other fields are 0 if int, or a white space if String. - * + * Class that represents a row in the Persistence Layer: this is all raw data referring to the + * BoLChunkData proper, that is, String and primitive types. + * + *

Each field is initialized with default values as per SRM 2.2 specification: protocolList + * GSIFTP dirOption false status SRM_REQUEST_QUEUED + * + *

All other fields are 0 if int, or a white space if String. + * * @author CNAF * @version 1.0 * @date Aug 2009 */ public class BoLChunkDataTO { - /* Database table request_Bol fields BEGIN */ - private long primaryKey = -1; // ID primary key of record in DB - private String fromSURL = " "; - private boolean dirOption; // initialised in constructor - private String normalizedStFN = null; - private Integer surlUniqueID = null; - /* Database table request_Get fields END */ + /* Database table request_Bol fields BEGIN */ + private long primaryKey = -1; // ID primary key of record in DB + private String fromSURL = " "; + private boolean dirOption; // initialised in constructor + private String normalizedStFN = null; + private Integer surlUniqueID = null; + /* Database table request_Get fields END */ - private String requestToken = " "; - private int lifetime = 0; - private boolean allLevelRecursive; // initialised in constructor - private int numLevel; // initialised in constructor - private List protocolList = null; // initialised in constructor - private long filesize = 0; - private int status; // initialised in constructor - private String errString = " "; - private int deferredStartTime = -1; - private Timestamp timeStamp = null; + private String requestToken = " "; + private int lifetime = 0; + private boolean allLevelRecursive; // initialised in constructor + private int numLevel; // initialised in constructor + private List protocolList = null; // initialised in constructor + private long filesize = 0; + private int status; // initialised in constructor + private String errString = " "; + private int deferredStartTime = -1; + private Timestamp timeStamp = null; - public BoLChunkDataTO() { + public BoLChunkDataTO() { - TURLPrefix protocolPreferences = new TURLPrefix(); - protocolPreferences.addProtocol(Protocol.GSIFTP); - this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); - this.status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - this.dirOption = false; - this.allLevelRecursive = false; - this.numLevel = 0; - } + TURLPrefix protocolPreferences = new TURLPrefix(); + protocolPreferences.addProtocol(Protocol.GSIFTP); + this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); + this.status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + this.dirOption = false; + this.allLevelRecursive = false; + this.numLevel = 0; + } - public boolean getAllLevelRecursive() { + public boolean getAllLevelRecursive() { - return allLevelRecursive; - } + return allLevelRecursive; + } - public int getDeferredStartTime() { + public int getDeferredStartTime() { - return deferredStartTime; - } + return deferredStartTime; + } - public boolean getDirOption() { + public boolean getDirOption() { - return dirOption; - } + return dirOption; + } - public String getErrString() { + public String getErrString() { - return errString; - } + return errString; + } - public long getFileSize() { + public long getFileSize() { - return filesize; - } + return filesize; + } - public String getFromSURL() { + public String getFromSURL() { - return fromSURL; - } + return fromSURL; + } - public int getLifeTime() { + public int getLifeTime() { - return lifetime; - } + return lifetime; + } - public int getNumLevel() { + public int getNumLevel() { - return numLevel; - } + return numLevel; + } - public long getPrimaryKey() { + public long getPrimaryKey() { - return primaryKey; - } + return primaryKey; + } - public List getProtocolList() { + public List getProtocolList() { - return protocolList; - } + return protocolList; + } - public String getRequestToken() { + public String getRequestToken() { - return requestToken; - } + return requestToken; + } - public Timestamp getTimeStamp() { + public Timestamp getTimeStamp() { - return timeStamp; - } + return timeStamp; + } - public int getStatus() { + public int getStatus() { - return status; - } + return status; + } - public void setAllLevelRecursive(boolean b) { + public void setAllLevelRecursive(boolean b) { - allLevelRecursive = b; - } + allLevelRecursive = b; + } - public void setDeferredStartTime(int deferredStartTime) { + public void setDeferredStartTime(int deferredStartTime) { - this.deferredStartTime = deferredStartTime; - } + this.deferredStartTime = deferredStartTime; + } - public void setDirOption(boolean b) { + public void setDirOption(boolean b) { - dirOption = b; - } + dirOption = b; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } + errString = s; + } - public void setFileSize(long n) { + public void setFileSize(long n) { - filesize = n; - } + filesize = n; + } - public void setFromSURL(String s) { + public void setFromSURL(String s) { - fromSURL = s; - } + fromSURL = s; + } - public void setLifeTime(int n) { + public void setLifeTime(int n) { - lifetime = n; - } + lifetime = n; + } - public void setNumLevel(int n) { + public void setNumLevel(int n) { - numLevel = n; - } + numLevel = n; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public void setProtocolList(List l) { + public void setProtocolList(List l) { - if ((l != null) && (!l.isEmpty())) { - protocolList = l; - } - } + if ((l != null) && (!l.isEmpty())) { + protocolList = l; + } + } - public void setRequestToken(String s) { + public void setRequestToken(String s) { - requestToken = s; - } + requestToken = s; + } - public void setTimeStamp(Timestamp timeStamp) { + public void setTimeStamp(Timestamp timeStamp) { - this.timeStamp = timeStamp; - } + this.timeStamp = timeStamp; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - this.normalizedStFN = normalizedStFN; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + /** @return the normalizedStFN */ + public String normalizedStFN() { - return normalizedStFN; - } + return normalizedStFN; + } - /** - * @param surlUniqueID - * the sURLUniqueID to set - */ - public void setSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the sURLUniqueID to set */ + public void setSurlUniqueID(Integer surlUniqueID) { - this.surlUniqueID = surlUniqueID; - } + this.surlUniqueID = surlUniqueID; + } - /** - * @return the sURLUniqueID - */ - public Integer sulrUniqueID() { + /** @return the sURLUniqueID */ + public Integer sulrUniqueID() { - return surlUniqueID; - } + return surlUniqueID; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(requestToken); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(lifetime); - sb.append(" "); - sb.append(dirOption); - sb.append(" "); - sb.append(allLevelRecursive); - sb.append(" "); - sb.append(numLevel); - sb.append(" "); - sb.append(protocolList); - sb.append(" "); - sb.append(filesize); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(requestToken); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(lifetime); + sb.append(" "); + sb.append(dirOption); + sb.append(" "); + sb.append(allLevelRecursive); + sb.append(" "); + sb.append(numLevel); + sb.append(" "); + sb.append(protocolList); + sb.append(" "); + sb.append(filesize); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/BoLData.java b/src/main/java/it/grid/storm/catalogs/BoLData.java index 00ea2362..2cf9c2cf 100644 --- a/src/main/java/it/grid/storm/catalogs/BoLData.java +++ b/src/main/java/it/grid/storm/catalogs/BoLData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -13,132 +12,119 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a BringOnLineChunkData, that is part of a multifile - * BringOnLine srm request. It contains data about: the requestToken, the - * fromSURL, the requested lifeTime of pinning, the TDirOption which tells - * whether the requested SURL is a directory and if it must be recursed at all - * levels, as well as the desired number of levels to recurse, the desired - * transferProtocols in order of preference, the fileSize, and the transferURL - * for the supplied SURL. - * + * This class represents a BringOnLineChunkData, that is part of a multifile BringOnLine srm + * request. It contains data about: the requestToken, the fromSURL, the requested lifeTime of + * pinning, the TDirOption which tells whether the requested SURL is a directory and if it must be + * recursed at all levels, as well as the desired number of levels to recurse, the desired + * transferProtocols in order of preference, the fileSize, and the transferURL for the supplied + * SURL. + * * @author CNAF * @version 1.0 * @date Aug 2009 */ public class BoLData extends AnonymousFileTransferData { - private static final Logger log = LoggerFactory.getLogger(BoLData.class); - - /** - * requested lifetime of TURL: it is the pin time! - */ - private TLifeTimeInSeconds lifeTime; - - /** - * specifies if the request regards a directory and related info - */ - private TDirOption dirOption; - - /** - * size of file - */ - private TSizeInBytes fileSize; - - /** - * how many seconds to wait before to make the lifeTime start consuming - */ - private int deferredStartTime = 0; - - public BoLData(TSURL fromSURL, TLifeTimeInSeconds lifeTime, - TDirOption dirOption, TURLPrefix desiredProtocols, TSizeInBytes fileSize, - TReturnStatus status, TTURL transferURL, int deferredStartTime) - throws InvalidFileTransferDataAttributesException, - InvalidBoLDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(fromSURL, desiredProtocols, status, transferURL); - if (lifeTime == null || dirOption == null || fileSize == null) { - throw new InvalidBoLDataAttributesException(fromSURL, lifeTime, - dirOption, desiredProtocols, fileSize, status, transferURL); - } - this.lifeTime = lifeTime; - this.dirOption = dirOption; - this.fileSize = fileSize; - this.deferredStartTime = deferredStartTime; - } - - /** - * Method that sets the status of this request to SRM_FILE_PINNED; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_FILE_PINNED(String explanation) { - - setStatus(TStatusCode.SRM_FILE_PINNED, explanation); - } - - public int getDeferredStartTime() { - - return deferredStartTime; - } - - /** - * Method that returns the dirOption specified in the srm request. - */ - public TDirOption getDirOption() { - - return dirOption; - } - - /** - * Method that returns the file size for this chunk of the srm request. - */ - public TSizeInBytes getFileSize() { - - return fileSize; - } - - /** - * Method that returns the requested pin life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds getLifeTime() { - - return lifeTime; - } - - public void setDeferredStartTime(int deferredStartTime) { - - this.deferredStartTime = deferredStartTime; - } - - /** - * Method used to set the size of the file corresponding to the requested - * SURL. If the supplied TSizeInByte is null, then nothing gets set! - */ - public void setFileSize(TSizeInBytes size) { - - if (size != null) { - fileSize = size; - } - } - - public void setLifeTime(long lifeTimeInSeconds) { - - TLifeTimeInSeconds lifeTime; - try { - lifeTime = TLifeTimeInSeconds.make(lifeTimeInSeconds, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - return; - } - - this.lifeTime = lifeTime; - } + private static final Logger log = LoggerFactory.getLogger(BoLData.class); + + /** requested lifetime of TURL: it is the pin time! */ + private TLifeTimeInSeconds lifeTime; + + /** specifies if the request regards a directory and related info */ + private TDirOption dirOption; + + /** size of file */ + private TSizeInBytes fileSize; + + /** how many seconds to wait before to make the lifeTime start consuming */ + private int deferredStartTime = 0; + + public BoLData( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL, + int deferredStartTime) + throws InvalidFileTransferDataAttributesException, InvalidBoLDataAttributesException, + InvalidSurlRequestDataAttributesException { + + super(fromSURL, desiredProtocols, status, transferURL); + if (lifeTime == null || dirOption == null || fileSize == null) { + throw new InvalidBoLDataAttributesException( + fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, transferURL); + } + this.lifeTime = lifeTime; + this.dirOption = dirOption; + this.fileSize = fileSize; + this.deferredStartTime = deferredStartTime; + } + + /** + * Method that sets the status of this request to SRM_FILE_PINNED; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + public void changeStatusSRM_FILE_PINNED(String explanation) { + + setStatus(TStatusCode.SRM_FILE_PINNED, explanation); + } + + public int getDeferredStartTime() { + + return deferredStartTime; + } + + /** Method that returns the dirOption specified in the srm request. */ + public TDirOption getDirOption() { + + return dirOption; + } + + /** Method that returns the file size for this chunk of the srm request. */ + public TSizeInBytes getFileSize() { + + return fileSize; + } + + /** Method that returns the requested pin life time for this chunk of the srm request. */ + public TLifeTimeInSeconds getLifeTime() { + + return lifeTime; + } + + public void setDeferredStartTime(int deferredStartTime) { + + this.deferredStartTime = deferredStartTime; + } + + /** + * Method used to set the size of the file corresponding to the requested SURL. If the supplied + * TSizeInByte is null, then nothing gets set! + */ + public void setFileSize(TSizeInBytes size) { + + if (size != null) { + fileSize = size; + } + } + + public void setLifeTime(long lifeTimeInSeconds) { + + TLifeTimeInSeconds lifeTime; + try { + lifeTime = TLifeTimeInSeconds.make(lifeTimeInSeconds, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + return; + } + this.lifeTime = lifeTime; + } } diff --git a/src/main/java/it/grid/storm/catalogs/BoLPersistentChunkData.java b/src/main/java/it/grid/storm/catalogs/BoLPersistentChunkData.java index 665fd94e..19e2d6e3 100644 --- a/src/main/java/it/grid/storm/catalogs/BoLPersistentChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/BoLPersistentChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -12,90 +11,91 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a BringOnLineChunkData, that is part of a multifile - * BringOnLine srm request. It contains data about: the requestToken, the - * fromSURL, the requested lifeTime of pinning, the TDirOption which tells - * whether the requested SURL is a directory and if it must be recursed at all - * levels, as well as the desired number of levels to recurse, the desired - * transferProtocols in order of preference, the fileSize, and the transferURL - * for the supplied SURL. - * + * This class represents a BringOnLineChunkData, that is part of a multifile BringOnLine srm + * request. It contains data about: the requestToken, the fromSURL, the requested lifeTime of + * pinning, the TDirOption which tells whether the requested SURL is a directory and if it must be + * recursed at all levels, as well as the desired number of levels to recurse, the desired + * transferProtocols in order of preference, the fileSize, and the transferURL for the supplied + * SURL. + * * @author CNAF * @version 1.0 * @date Aug 2009 */ -public class BoLPersistentChunkData extends BoLData implements - PersistentChunkData { - - private static final Logger log = LoggerFactory - .getLogger(BoLPersistentChunkData.class); - - /** - * long representing the primary key for the persistence layer, in the - * status_Put table - */ - private long primaryKey = -1; - - /** - * This is the requestToken of the multifile srm request to which this chunk - * belongs - */ - private final TRequestToken requestToken; - - public BoLPersistentChunkData(TRequestToken requestToken, TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix desiredProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL, int deferredStartTime) - throws InvalidBoLPersistentChunkDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidBoLDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, - transferURL, deferredStartTime); - if (requestToken == null) { - log.debug("BoLPersistentChunkData: requestToken is null!"); - throw new InvalidBoLPersistentChunkDataAttributesException(requestToken, - fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, - transferURL); - } - this.requestToken = requestToken; - } - - /** - * Method that returns the requestToken of the srm request to which this chunk - * belongs. - */ - public TRequestToken getRequestToken() { - - return requestToken; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - @Override - public long getPrimaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - @Override - public long getIdentifier() { - - return getPrimaryKey(); - } +public class BoLPersistentChunkData extends BoLData implements PersistentChunkData { + + private static final Logger log = LoggerFactory.getLogger(BoLPersistentChunkData.class); + + /** long representing the primary key for the persistence layer, in the status_Put table */ + private long primaryKey = -1; + + /** This is the requestToken of the multifile srm request to which this chunk belongs */ + private final TRequestToken requestToken; + + public BoLPersistentChunkData( + TRequestToken requestToken, + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL, + int deferredStartTime) + throws InvalidBoLPersistentChunkDataAttributesException, + InvalidFileTransferDataAttributesException, InvalidBoLDataAttributesException, + InvalidSurlRequestDataAttributesException { + + super( + fromSURL, + lifeTime, + dirOption, + desiredProtocols, + fileSize, + status, + transferURL, + deferredStartTime); + if (requestToken == null) { + log.debug("BoLPersistentChunkData: requestToken is null!"); + throw new InvalidBoLPersistentChunkDataAttributesException( + requestToken, + fromSURL, + lifeTime, + dirOption, + desiredProtocols, + fileSize, + status, + transferURL); + } + this.requestToken = requestToken; + } + + /** Method that returns the requestToken of the srm request to which this chunk belongs. */ + public TRequestToken getRequestToken() { + + return requestToken; + } + + /** Method used to get the primary key used in the persistence layer! */ + @Override + public long getPrimaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + @Override + public long getIdentifier() { + + return getPrimaryKey(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ChunkDAOUtils.java b/src/main/java/it/grid/storm/catalogs/ChunkDAOUtils.java index 985c8d63..2806e2f8 100644 --- a/src/main/java/it/grid/storm/catalogs/ChunkDAOUtils.java +++ b/src/main/java/it/grid/storm/catalogs/ChunkDAOUtils.java @@ -1,14 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import java.sql.SQLWarning; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.SQLWarning; - public class ChunkDAOUtils { private static final Logger log = LoggerFactory.getLogger(ChunkDAOUtils.class); @@ -31,9 +29,9 @@ public static void printWarnings(SQLWarning warning) { public static String buildInClauseForArray(int size) { StringBuilder b = new StringBuilder(); - for (int i=1; i<=size; i++) { + for (int i = 1; i <= size; i++) { b.append('?'); - if (i lookup( - TRequestToken rt) { - - Collection chunkDataTOs = dao.find(rt); - log.debug("COPY CHUNK CATALOG: retrieved data {}", chunkDataTOs); - return buildChunkDataList(chunkDataTOs, rt); - } - - private Collection buildChunkDataList( - Collection chunkDataTOs, TRequestToken rt) { - - ArrayList list = new ArrayList(); - CopyPersistentChunkData chunk; - for (CopyChunkDataTO chunkTO : chunkDataTOs) { - chunk = makeOne(chunkTO, rt); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(completeTO(chunkTO, chunk)); - } catch (InvalidReducedCopyChunkDataAttributesException e) { - log.warn("COPY CHUNK CATALOG! unable to add missing informations on " - + "DB to the request: {}", e.getMessage()); - } - } - log.debug("COPY CHUNK CATALOG: returning {}\n\n", list); - return list; - } - - private Collection buildChunkDataList( - Collection chunkDataTOs) { - - ArrayList list = new ArrayList(); - CopyPersistentChunkData chunk; - for (CopyChunkDataTO chunkTO : chunkDataTOs) { - chunk = makeOne(chunkTO); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(completeTO(chunkTO, chunk)); - } catch (InvalidReducedCopyChunkDataAttributesException e) { - log.warn("COPY CHUNK CATALOG! unable to add missing informations on DB " - + "to the request: {}", e.getMessage()); - } - } - log.debug("COPY CHUNK CATALOG: returning {}\n\n", list); - return list; - } - - public Collection lookupCopyChunkData( - TRequestToken requestToken, Collection surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOs = dao.find(requestToken, - surlsUniqueIDs, surlsArray); - return buildChunkDataList(chunkDataTOs, requestToken); - } - - public Collection lookupCopyChunkData(TSURL surl, - GridUserInterface user) { - - return lookupCopyChunkData(Arrays.asList(new TSURL[] { surl }), user); - } - - public Collection lookupCopyChunkData(TSURL surl) { - - return lookupCopyChunkData(Arrays.asList(new TSURL[] { surl })); - } - - private Collection lookupCopyChunkData( - List surls, GridUserInterface user) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOs = dao.find(surlsUniqueIDs, - surlsArray, user.getDn()); - return buildChunkDataList(chunkDataTOs); - } - - public Collection lookupCopyChunkData( - List surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOs = dao.find(surlsUniqueIDs, - surlsArray); - return buildChunkDataList(chunkDataTOs); - } - - private CopyPersistentChunkData makeOne(CopyChunkDataTO chunkTO) { - - try { - return makeOne(chunkTO, - new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); - } catch (InvalidTRequestTokenAttributesException e) { - throw new IllegalStateException( - "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " - + e); - } - } - - /** - * Generates a CopyChunkData from the received CopyChunkDataTO - * - * @param chunkDataTO - * @param rt - * @return - */ - private CopyPersistentChunkData makeOne(CopyChunkDataTO chunkDataTO, - TRequestToken rt) { - - StringBuilder errorSb = new StringBuilder(); - // fromSURL - TSURL fromSURL = null; - try { - fromSURL = TSURL.makeFromStringValidate(chunkDataTO.fromSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (chunkDataTO.normalizedSourceStFN() != null) { - fromSURL.setNormalizedStFN(chunkDataTO.normalizedSourceStFN()); - } - if (chunkDataTO.sourceSurlUniqueID() != null) { - fromSURL.setUniqueID(chunkDataTO.sourceSurlUniqueID().intValue()); - } - // toSURL - TSURL toSURL = null; - try { - toSURL = TSURL.makeFromStringValidate(chunkDataTO.toSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (chunkDataTO.normalizedTargetStFN() != null) { - toSURL.setNormalizedStFN(chunkDataTO.normalizedTargetStFN()); - } - if (chunkDataTO.targetSurlUniqueID() != null) { - toSURL.setUniqueID(chunkDataTO.targetSurlUniqueID().intValue()); - } - // lifeTime - TLifeTimeInSeconds lifeTime = null; - try { - lifeTime = TLifeTimeInSeconds.make(FileLifetimeConverter.getInstance() - .toStoRM(chunkDataTO.lifeTime()), TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // fileStorageType - TFileStorageType fileStorageType = FileStorageTypeConverter.getInstance() - .toSTORM(chunkDataTO.fileStorageType()); - if (fileStorageType == TFileStorageType.EMPTY) { - log.error("\nTFileStorageType could not be translated from its String " - + "representation! String: {}", chunkDataTO.fileStorageType()); - // fail creation of PtPChunk! - fileStorageType = null; - } - // spaceToken! - // - // WARNING! Although this field is in common between StoRM and DPM, a - // converter is still used - // because DPM logic for NULL/EMPTY is not known. StoRM model does not - // allow for null, so it must - // be taken care of! - TSpaceToken spaceToken = null; - TSpaceToken emptyToken = TSpaceToken.makeEmpty(); - // convert empty string representation of DPM into StoRM representation; - String spaceTokenTranslation = SpaceTokenStringConverter.getInstance() - .toStoRM(chunkDataTO.spaceToken()); - if (emptyToken.toString().equals(spaceTokenTranslation)) { - spaceToken = emptyToken; - } else { - try { - spaceToken = TSpaceToken.make(spaceTokenTranslation); - } catch (InvalidTSpaceTokenAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - } - // overwriteOption! - TOverwriteMode globalOverwriteOption = OverwriteModeConverter.getInstance() - .toSTORM(chunkDataTO.overwriteOption()); - if (globalOverwriteOption == TOverwriteMode.EMPTY) { - errorSb.append("\nTOverwriteMode could not be " - + "translated from its String representation! String: " - + chunkDataTO.overwriteOption()); - globalOverwriteOption = null; - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - chunkDataTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + chunkDataTO.status()); - } else { - status = new TReturnStatus(code, chunkDataTO.errString()); - } - // make CopyChunkData - CopyPersistentChunkData aux = null; - try { - aux = new CopyPersistentChunkData(rt, fromSURL, toSURL, lifeTime, - fileStorageType, spaceToken, globalOverwriteOption, status); - aux.setPrimaryKey(chunkDataTO.primaryKey()); - } catch (InvalidSurlRequestDataAttributesException e) { - dao.signalMalformedCopyChunk(chunkDataTO); - log.warn("COPY CHUNK CATALOG! Retrieved malformed Copy" - + " chunk data from persistence. Dropping chunk from request: {}", rt); - log.warn(e.getMessage()); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * - * Adds to the received CopyChunkDataTO the normalized StFN and the SURL - * unique ID taken from the CopyChunkData - * - * @param chunkTO - * @param chunk - */ - private void completeTO(ReducedCopyChunkDataTO chunkTO, - final ReducedCopyChunkData chunk) { - - chunkTO.setNormalizedSourceStFN(chunk.fromSURL().normalizedStFN()); - chunkTO.setSourceSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); - chunkTO.setNormalizedTargetStFN(chunk.toSURL().normalizedStFN()); - chunkTO.setTargetSurlUniqueID(new Integer(chunk.toSURL().uniqueId())); - } - - /** - * - * Creates a ReducedCopyChunkDataTO from the received CopyChunkDataTO and - * completes it with the normalized StFN and the SURL unique ID taken from the - * PtGChunkData - * - * @param chunkTO - * @param chunk - * @return - * @throws InvalidReducedCopyChunkDataAttributesException - */ - private ReducedCopyChunkDataTO completeTO(CopyChunkDataTO chunkTO, - final CopyPersistentChunkData chunk) - throws InvalidReducedCopyChunkDataAttributesException { - - ReducedCopyChunkDataTO reducedChunkTO = this.reduce(chunkTO); - this.completeTO(reducedChunkTO, this.reduce(chunk)); - return reducedChunkTO; - } - - /** - * Creates a ReducedCopyChunkData from the data contained in the received - * CopyChunkData - * - * @param chunk - * @return - * @throws InvalidReducedPtGChunkDataAttributesException - */ - private ReducedCopyChunkData reduce(CopyPersistentChunkData chunk) - throws InvalidReducedCopyChunkDataAttributesException { - - ReducedCopyChunkData reducedChunk = new ReducedCopyChunkData( - chunk.getSURL(), chunk.getDestinationSURL(), chunk.getStatus()); - reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); - return reducedChunk; - } - - /** - * Creates a ReducedCopyChunkDataTO from the data contained in the received - * CopyChunkDataTO - * - * @param chunkTO - * @return - */ - private ReducedCopyChunkDataTO reduce(CopyChunkDataTO chunkTO) { - - ReducedCopyChunkDataTO reducedChunkTO = new ReducedCopyChunkDataTO(); - reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); - reducedChunkTO.setFromSURL(chunkTO.fromSURL()); - reducedChunkTO.setNormalizedSourceStFN(chunkTO.normalizedSourceStFN()); - reducedChunkTO.setSourceSurlUniqueID(chunkTO.sourceSurlUniqueID()); - reducedChunkTO.setToSURL(chunkTO.toSURL()); - reducedChunkTO.setNormalizedTargetStFN(chunkTO.normalizedTargetStFN()); - reducedChunkTO.setTargetSurlUniqueID(chunkTO.targetSurlUniqueID()); - reducedChunkTO.setStatus(chunkTO.status()); - reducedChunkTO.setErrString(chunkTO.errString()); - return reducedChunkTO; - } - - /** - * Checks if the received CopyChunkDataTO contains the fields not set by the - * front end but required - * - * @param chunkTO - * @return - */ - private boolean isComplete(CopyChunkDataTO chunkTO) { - - return (chunkTO.normalizedSourceStFN() != null) - && (chunkTO.sourceSurlUniqueID() != null && chunkTO - .normalizedTargetStFN() != null) - && (chunkTO.targetSurlUniqueID() != null); - } - - /** - * Checks if the received ReducedPtGChunkDataTO contains the fields not set by - * the front end but required - * - * @param reducedChunkTO - * @return - */ - @SuppressWarnings("unused") - private boolean isComplete(ReducedCopyChunkDataTO reducedChunkTO) { - - return (reducedChunkTO.normalizedSourceStFN() != null) - && (reducedChunkTO.sourceSurlUniqueID() != null && reducedChunkTO - .normalizedTargetStFN() != null) - && (reducedChunkTO.targetSurlUniqueID() != null); - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, - newStatusCode, explanation); - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - List surlList, TStatusCode expectedStatusCode, - TStatusCode newStatusCode) { - - int[] surlsUniqueIDs = new int[surlList.size()]; - String[] surls = new String[surlList.size()]; - int index = 0; - for (TSURL tsurl : surlList) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - dao.updateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode); - } - + private static final Logger log = LoggerFactory.getLogger(CopyChunkCatalog.class); + + /* only instance of CopyChunkCatalog present in StoRM! */ + private static final CopyChunkCatalog cat = new CopyChunkCatalog(); + /* WARNING!!! TO BE MODIFIED WITH FACTORY!!! */ + private CopyChunkDAO dao = CopyChunkDAO.getInstance(); + + private CopyChunkCatalog() {} + + /** Method that returns the only instance of PtPChunkCatalog available. */ + public static CopyChunkCatalog getInstance() { + + return cat; + } + + /** + * Method used to update into Persistence a retrieved CopyChunkData. In case any error occurs, the + * operation does not proceed and no Exception is thrown. + * + *

Beware that the only fields updated into persistence are the StatusCode and the errorString. + */ + public synchronized void update(CopyPersistentChunkData cd) { + + CopyChunkDataTO to = new CopyChunkDataTO(); + /* primary key needed by DAO Object */ + to.setPrimaryKey(cd.getPrimaryKey()); + to.setLifeTime(FileLifetimeConverter.getInstance().toDB(cd.getLifetime().value())); + to.setStatus(StatusCodeConverter.getInstance().toDB(cd.getStatus().getStatusCode())); + to.setErrString(cd.getStatus().getExplanation()); + to.setFileStorageType(FileStorageTypeConverter.getInstance().toDB(cd.getFileStorageType())); + to.setOverwriteOption(OverwriteModeConverter.getInstance().toDB(cd.getOverwriteOption())); + to.setNormalizedSourceStFN(cd.getSURL().normalizedStFN()); + to.setSourceSurlUniqueID(new Integer(cd.getSURL().uniqueId())); + to.setNormalizedTargetStFN(cd.getDestinationSURL().normalizedStFN()); + to.setTargetSurlUniqueID(new Integer(cd.getDestinationSURL().uniqueId())); + + dao.update(to); + } + + /** + * Method that returns a Collection of CopyChunkData Objects matching the supplied TRequestToken. + * + *

If any of the data associated to the TRequestToken is not well formed and so does not allow + * a CopyChunkData Object to be created, then that part of the request is dropped and gets logged, + * and the processing continues with the next part. All valid chunks get returned: the others get + * dropped. + * + *

If there are no chunks to process then an empty Collection is returned, and a messagge gets + * logged. + */ + public synchronized Collection lookup(TRequestToken rt) { + + Collection chunkDataTOs = dao.find(rt); + log.debug("COPY CHUNK CATALOG: retrieved data {}", chunkDataTOs); + return buildChunkDataList(chunkDataTOs, rt); + } + + private Collection buildChunkDataList( + Collection chunkDataTOs, TRequestToken rt) { + + ArrayList list = new ArrayList(); + CopyPersistentChunkData chunk; + for (CopyChunkDataTO chunkTO : chunkDataTOs) { + chunk = makeOne(chunkTO, rt); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(completeTO(chunkTO, chunk)); + } catch (InvalidReducedCopyChunkDataAttributesException e) { + log.warn( + "COPY CHUNK CATALOG! unable to add missing informations on " + "DB to the request: {}", + e.getMessage()); + } + } + log.debug("COPY CHUNK CATALOG: returning {}\n\n", list); + return list; + } + + private Collection buildChunkDataList( + Collection chunkDataTOs) { + + ArrayList list = new ArrayList(); + CopyPersistentChunkData chunk; + for (CopyChunkDataTO chunkTO : chunkDataTOs) { + chunk = makeOne(chunkTO); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(completeTO(chunkTO, chunk)); + } catch (InvalidReducedCopyChunkDataAttributesException e) { + log.warn( + "COPY CHUNK CATALOG! unable to add missing informations on DB " + "to the request: {}", + e.getMessage()); + } + } + log.debug("COPY CHUNK CATALOG: returning {}\n\n", list); + return list; + } + + public Collection lookupCopyChunkData( + TRequestToken requestToken, Collection surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOs = dao.find(requestToken, surlsUniqueIDs, surlsArray); + return buildChunkDataList(chunkDataTOs, requestToken); + } + + public Collection lookupCopyChunkData( + TSURL surl, GridUserInterface user) { + + return lookupCopyChunkData(Arrays.asList(new TSURL[] {surl}), user); + } + + public Collection lookupCopyChunkData(TSURL surl) { + + return lookupCopyChunkData(Arrays.asList(new TSURL[] {surl})); + } + + private Collection lookupCopyChunkData( + List surls, GridUserInterface user) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOs = dao.find(surlsUniqueIDs, surlsArray, user.getDn()); + return buildChunkDataList(chunkDataTOs); + } + + public Collection lookupCopyChunkData(List surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOs = dao.find(surlsUniqueIDs, surlsArray); + return buildChunkDataList(chunkDataTOs); + } + + private CopyPersistentChunkData makeOne(CopyChunkDataTO chunkTO) { + + try { + return makeOne(chunkTO, new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); + } catch (InvalidTRequestTokenAttributesException e) { + throw new IllegalStateException( + "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " + e); + } + } + + /** + * Generates a CopyChunkData from the received CopyChunkDataTO + * + * @param chunkDataTO + * @param rt + * @return + */ + private CopyPersistentChunkData makeOne(CopyChunkDataTO chunkDataTO, TRequestToken rt) { + + StringBuilder errorSb = new StringBuilder(); + // fromSURL + TSURL fromSURL = null; + try { + fromSURL = TSURL.makeFromStringValidate(chunkDataTO.fromSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (chunkDataTO.normalizedSourceStFN() != null) { + fromSURL.setNormalizedStFN(chunkDataTO.normalizedSourceStFN()); + } + if (chunkDataTO.sourceSurlUniqueID() != null) { + fromSURL.setUniqueID(chunkDataTO.sourceSurlUniqueID().intValue()); + } + // toSURL + TSURL toSURL = null; + try { + toSURL = TSURL.makeFromStringValidate(chunkDataTO.toSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (chunkDataTO.normalizedTargetStFN() != null) { + toSURL.setNormalizedStFN(chunkDataTO.normalizedTargetStFN()); + } + if (chunkDataTO.targetSurlUniqueID() != null) { + toSURL.setUniqueID(chunkDataTO.targetSurlUniqueID().intValue()); + } + // lifeTime + TLifeTimeInSeconds lifeTime = null; + try { + lifeTime = + TLifeTimeInSeconds.make( + FileLifetimeConverter.getInstance().toStoRM(chunkDataTO.lifeTime()), + TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // fileStorageType + TFileStorageType fileStorageType = + FileStorageTypeConverter.getInstance().toSTORM(chunkDataTO.fileStorageType()); + if (fileStorageType == TFileStorageType.EMPTY) { + log.error( + "\nTFileStorageType could not be translated from its String " + + "representation! String: {}", + chunkDataTO.fileStorageType()); + // fail creation of PtPChunk! + fileStorageType = null; + } + // spaceToken! + // + // WARNING! Although this field is in common between StoRM and DPM, a + // converter is still used + // because DPM logic for NULL/EMPTY is not known. StoRM model does not + // allow for null, so it must + // be taken care of! + TSpaceToken spaceToken = null; + TSpaceToken emptyToken = TSpaceToken.makeEmpty(); + // convert empty string representation of DPM into StoRM representation; + String spaceTokenTranslation = + SpaceTokenStringConverter.getInstance().toStoRM(chunkDataTO.spaceToken()); + if (emptyToken.toString().equals(spaceTokenTranslation)) { + spaceToken = emptyToken; + } else { + try { + spaceToken = TSpaceToken.make(spaceTokenTranslation); + } catch (InvalidTSpaceTokenAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + } + // overwriteOption! + TOverwriteMode globalOverwriteOption = + OverwriteModeConverter.getInstance().toSTORM(chunkDataTO.overwriteOption()); + if (globalOverwriteOption == TOverwriteMode.EMPTY) { + errorSb.append( + "\nTOverwriteMode could not be " + + "translated from its String representation! String: " + + chunkDataTO.overwriteOption()); + globalOverwriteOption = null; + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(chunkDataTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + chunkDataTO.status()); + } else { + status = new TReturnStatus(code, chunkDataTO.errString()); + } + // make CopyChunkData + CopyPersistentChunkData aux = null; + try { + aux = + new CopyPersistentChunkData( + rt, + fromSURL, + toSURL, + lifeTime, + fileStorageType, + spaceToken, + globalOverwriteOption, + status); + aux.setPrimaryKey(chunkDataTO.primaryKey()); + } catch (InvalidSurlRequestDataAttributesException e) { + dao.signalMalformedCopyChunk(chunkDataTO); + log.warn( + "COPY CHUNK CATALOG! Retrieved malformed Copy" + + " chunk data from persistence. Dropping chunk from request: {}", + rt); + log.warn(e.getMessage()); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Adds to the received CopyChunkDataTO the normalized StFN and the SURL unique ID taken from the + * CopyChunkData + * + * @param chunkTO + * @param chunk + */ + private void completeTO(ReducedCopyChunkDataTO chunkTO, final ReducedCopyChunkData chunk) { + + chunkTO.setNormalizedSourceStFN(chunk.fromSURL().normalizedStFN()); + chunkTO.setSourceSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); + chunkTO.setNormalizedTargetStFN(chunk.toSURL().normalizedStFN()); + chunkTO.setTargetSurlUniqueID(new Integer(chunk.toSURL().uniqueId())); + } + + /** + * Creates a ReducedCopyChunkDataTO from the received CopyChunkDataTO and completes it with the + * normalized StFN and the SURL unique ID taken from the PtGChunkData + * + * @param chunkTO + * @param chunk + * @return + * @throws InvalidReducedCopyChunkDataAttributesException + */ + private ReducedCopyChunkDataTO completeTO( + CopyChunkDataTO chunkTO, final CopyPersistentChunkData chunk) + throws InvalidReducedCopyChunkDataAttributesException { + + ReducedCopyChunkDataTO reducedChunkTO = this.reduce(chunkTO); + this.completeTO(reducedChunkTO, this.reduce(chunk)); + return reducedChunkTO; + } + + /** + * Creates a ReducedCopyChunkData from the data contained in the received CopyChunkData + * + * @param chunk + * @return + * @throws InvalidReducedPtGChunkDataAttributesException + */ + private ReducedCopyChunkData reduce(CopyPersistentChunkData chunk) + throws InvalidReducedCopyChunkDataAttributesException { + + ReducedCopyChunkData reducedChunk = + new ReducedCopyChunkData(chunk.getSURL(), chunk.getDestinationSURL(), chunk.getStatus()); + reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); + return reducedChunk; + } + + /** + * Creates a ReducedCopyChunkDataTO from the data contained in the received CopyChunkDataTO + * + * @param chunkTO + * @return + */ + private ReducedCopyChunkDataTO reduce(CopyChunkDataTO chunkTO) { + + ReducedCopyChunkDataTO reducedChunkTO = new ReducedCopyChunkDataTO(); + reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); + reducedChunkTO.setFromSURL(chunkTO.fromSURL()); + reducedChunkTO.setNormalizedSourceStFN(chunkTO.normalizedSourceStFN()); + reducedChunkTO.setSourceSurlUniqueID(chunkTO.sourceSurlUniqueID()); + reducedChunkTO.setToSURL(chunkTO.toSURL()); + reducedChunkTO.setNormalizedTargetStFN(chunkTO.normalizedTargetStFN()); + reducedChunkTO.setTargetSurlUniqueID(chunkTO.targetSurlUniqueID()); + reducedChunkTO.setStatus(chunkTO.status()); + reducedChunkTO.setErrString(chunkTO.errString()); + return reducedChunkTO; + } + + /** + * Checks if the received CopyChunkDataTO contains the fields not set by the front end but + * required + * + * @param chunkTO + * @return + */ + private boolean isComplete(CopyChunkDataTO chunkTO) { + + return (chunkTO.normalizedSourceStFN() != null) + && (chunkTO.sourceSurlUniqueID() != null && chunkTO.normalizedTargetStFN() != null) + && (chunkTO.targetSurlUniqueID() != null); + } + + /** + * Checks if the received ReducedPtGChunkDataTO contains the fields not set by the front end but + * required + * + * @param reducedChunkTO + * @return + */ + @SuppressWarnings("unused") + private boolean isComplete(ReducedCopyChunkDataTO reducedChunkTO) { + + return (reducedChunkTO.normalizedSourceStFN() != null) + && (reducedChunkTO.sourceSurlUniqueID() != null + && reducedChunkTO.normalizedTargetStFN() != null) + && (reducedChunkTO.targetSurlUniqueID() != null); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, newStatusCode, explanation); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + List surlList, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) { + + int[] surlsUniqueIDs = new int[surlList.size()]; + String[] surls = new String[surlList.size()]; + int index = 0; + for (TSURL tsurl : surlList) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + dao.updateStatusOnMatchingStatus( + requestToken, surlsUniqueIDs, surls, expectedStatusCode, newStatusCode); + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopyChunkDAO.java b/src/main/java/it/grid/storm/catalogs/CopyChunkDAO.java index 912acb9d..65c28be9 100644 --- a/src/main/java/it/grid/storm/catalogs/CopyChunkDAO.java +++ b/src/main/java/it/grid/storm/catalogs/CopyChunkDAO.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.config.Configuration; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TStatusCode; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -20,754 +18,801 @@ import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * DAO class for PtPChunkCatalog. This DAO is specifically designed to connect - * to a MySQL DB. The raw data found in those tables is pre-treated in order to - * turn it into the Object Model of StoRM. See Method comments for further info. - * - * BEWARE! DAO Adjusts for extra fields in the DB that are not present in the - * object model. - * + * DAO class for PtPChunkCatalog. This DAO is specifically designed to connect to a MySQL DB. The + * raw data found in those tables is pre-treated in order to turn it into the Object Model of StoRM. + * See Method comments for further info. + * + *

BEWARE! DAO Adjusts for extra fields in the DB that are not present in the object model. + * * @author EGRID - ICTP Trieste * @version 2.0 * @date September 2005 */ public class CopyChunkDAO { - private static final Logger log = LoggerFactory.getLogger(CopyChunkDAO.class); - - /* String with the name of the class for the DB driver */ - private final String driver = Configuration.getInstance().getDBDriver(); - /* String referring to the URL of the DB */ - private final String url = Configuration.getInstance().getStormDbURL(); - /* String with the password for the DB */ - private final String password = Configuration.getInstance().getDBPassword(); - /* String with the name for the DB */ - private final String name = Configuration.getInstance().getDBUserName(); - - /* Connection to DB - WARNING!!! It is kept open all the time! */ - private Connection con = null; - /* boolean that tells whether reconnection is needed because of MySQL bug! */ - private boolean reconnect = false; - - /* Singleton instance */ - private final static CopyChunkDAO dao = new CopyChunkDAO(); - - /* timer thread that will run a task to alert when reconnecting is necessary! */ - private Timer clock = null; - /* - * timer task that will update the boolean signaling that a reconnection is - * needed! - */ - private TimerTask clockTask = null; - /* milliseconds that must pass before reconnecting to DB */ - private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; - /* initial delay in milliseconds before starting timer */ - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - - private CopyChunkDAO() { - - setUpConnection(); - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of the CopyChunkDAO. - */ - public static CopyChunkDAO getInstance() { - - return dao; - } - - /** - * Method used to save the changes made to a retrieved CopyChunkDataTO, back - * into the MySQL DB. - * - * Only statusCode and explanation, of status_Copy table get written to the - * DB. Likewise for fileLifetime of request_queue table. - * - * In case of any error, an error messagge gets logged but no exception is - * thrown. - */ - public synchronized void update(CopyChunkDataTO to) { - - if (!checkConnection()) { - log.error("COPY CHUNK DAO: update - unable to get a valid connection!"); - return; - } - PreparedStatement updateFileReq = null; - try { - // ready updateFileReq... - updateFileReq = con - .prepareStatement("UPDATE request_queue rq JOIN (status_Copy sc, request_Copy rc) " - + "ON (rq.ID=rc.request_queueID AND sc.request_CopyID=rc.ID) " - + "SET sc.statusCode=?, sc.explanation=?, rq.fileLifetime=?, rq.config_FileStorageTypeID=?, rq.config_OverwriteID=?, " - + "rc.normalized_sourceSURL_StFN=?, rc.sourceSURL_uniqueID=?, rc.normalized_targetSURL_StFN=?, rc.targetSURL_uniqueID=? " - + "WHERE rc.ID=?"); - logWarnings(con.getWarnings()); - - updateFileReq.setInt(1, to.status()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(2, to.errString()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(3, to.lifeTime()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(4, to.fileStorageType()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(5, to.overwriteOption()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(6, to.normalizedSourceStFN()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(7, to.sourceSurlUniqueID()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(8, to.normalizedTargetStFN()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(9, to.targetSurlUniqueID()); - logWarnings(updateFileReq.getWarnings()); - - updateFileReq.setLong(10, to.primaryKey()); - logWarnings(updateFileReq.getWarnings()); - - // run updateFileReq - updateFileReq.executeUpdate(); - logWarnings(updateFileReq.getWarnings()); - } catch (SQLException e) { - log.error("COPY CHUNK DAO: Unable to complete update! {}", - e.getMessage(), e); - } finally { - close(updateFileReq); - } - } - - /** - * Updates the request_Get represented by the received ReducedPtGChunkDataTO - * by setting its normalized_sourceSURL_StFN and sourceSURL_uniqueID - * - * @param chunkTO - */ - public synchronized void updateIncomplete(ReducedCopyChunkDataTO chunkTO) { - - if (!checkConnection()) { - log - .error("COPY CHUNK DAO: updateIncomplete - unable to get a valid connection!"); - return; - } - String str = "UPDATE request_Copy SET normalized_sourceSURL_StFN=?, sourceSURL_uniqueID=?, normalized_targetSURL_StFN=?, targetSURL_uniqueID=? " - + "WHERE ID=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - stmt.setString(1, chunkTO.normalizedSourceStFN()); - logWarnings(stmt.getWarnings()); - - stmt.setInt(2, chunkTO.sourceSurlUniqueID()); - logWarnings(stmt.getWarnings()); - - stmt.setString(3, chunkTO.normalizedTargetStFN()); - logWarnings(stmt.getWarnings()); - - stmt.setInt(4, chunkTO.targetSurlUniqueID()); - logWarnings(stmt.getWarnings()); - - stmt.setLong(5, chunkTO.primaryKey()); - logWarnings(stmt.getWarnings()); - - log.trace("COPY CHUNK DAO - update incomplete: {}", stmt.toString()); - stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - } catch (SQLException e) { - log.error("COPY CHUNK DAO: Unable to complete update incomplete! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that queries the MySQL DB to find all entries matching the supplied - * TRequestToken. The Collection contains the corresponding CopyChunkDataTO - * objects. - * - * A complex query establishes all chunks associated with the request token, - * by properly joining request_queue, request_Copy and status_Copy. The - * considered fields are: - * - * (1) From status_Copy: the ID field which becomes the TOs primary key, and - * statusCode. - * - * (2) From request_Copy: targetSURL and sourceSURL. - * - * (3) From request_queue: fileLifetime, config_FileStorageTypeID, s_token, - * config_OverwriteID. - * - * In case of any error, a log gets written and an empty collection is - * returned. No exception is returned. - * - * NOTE! Chunks in SRM_ABORTED status are NOT returned! - */ - public synchronized Collection find( - TRequestToken requestToken) { - - if (!checkConnection()) { - log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - String strToken = requestToken.toString(); - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - /* get chunks of the request */ - str = "SELECT rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " - + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " - + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " - + "WHERE rq.r_token=? AND sc.statusCode<>?"; - - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, strToken); - logWarnings(find.getWarnings()); - - find.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(find.getWarnings()); - - log.debug("COPY CHUNK DAO: find method; " + find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - CopyChunkDataTO chunkDataTO; - while (rs.next()) { - chunkDataTO = new CopyChunkDataTO(); - chunkDataTO.setRequestToken(strToken); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); - chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); - chunkDataTO.setNormalizedSourceStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); - chunkDataTO.setNormalizedTargetStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); - } - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("COPY CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - - } - - public synchronized Collection find( - TRequestToken requestToken, int[] surlUniqueIDs, String[] surls) { - - if (!checkConnection()) { - log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - String strToken = requestToken.toString(); - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - /* get chunks of the request */ - str = "SELECT rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " - + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " - + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " - + "WHERE rq.r_token=? AND ( rc.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlUniqueIDs) - + " AND rc.sourceSURL IN " - + makeSurlString(surls) + " ) "; - - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, strToken); - logWarnings(find.getWarnings()); - - log.debug("COPY CHUNK DAO: find method; {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - - CopyChunkDataTO chunkDataTO; - while (rs.next()) { - chunkDataTO = new CopyChunkDataTO(); - chunkDataTO.setRequestToken(strToken); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); - chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); - chunkDataTO.setNormalizedSourceStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); - chunkDataTO.setNormalizedTargetStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); - } - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("COPY CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - - } - - /** - * Method used in extraordinary situations to signal that data retrieved from - * the DB was malformed and could not be translated into the StoRM object - * model. - * - * This method attempts to change the status of the request to SRM_FAILURE and - * record it in the DB. - * - * This operation could potentially fail because the source of the malformed - * problems could be a problematic DB; indeed, initially only log messagges - * where recorded. - * - * Yet it soon became clear that the source of malformed data were the clients - * and/or FE recording info in the DB. In these circumstances the client would - * its request as being in the SRM_IN_PROGRESS state for ever. Hence the - * pressing need to inform it of the encountered problems. - */ - public synchronized void signalMalformedCopyChunk(CopyChunkDataTO auxTO) { - - if (!checkConnection()) { - log - .error("COPY CHUNK DAO: signalMalformedCopyChunk - unable to get a valid connection!"); - return; - } - String signalSQL = "UPDATE status_Copy SET statusCode=" - + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) - + ", explanation=? WHERE request_CopyID=" + auxTO.primaryKey(); - - PreparedStatement signal = null; - try { - /* update storm_put_filereq */ - signal = con.prepareStatement(signalSQL); - logWarnings(con.getWarnings()); - - /* Prepared statement spares DB-specific String notation! */ - signal.setString(1, "Request is malformed!"); - logWarnings(signal.getWarnings()); - - signal.executeUpdate(); - logWarnings(signal.getWarnings()); - } catch (SQLException e) { - log.error("CopyChunkDAO! Unable to signal in DB that the request was " - + "malformed! Request: {}; Error: {}", auxTO.toString(), - e.getMessage(), e); - } finally { - close(signal); - } - } - - /** - * Auxiliary method used to close a Statement - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("COPY CHUNK DAO! Unable to close Statement {} - Error: {}", - stmt.toString(), e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to close a ResultSet - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("COPY CHUNK DAO! Unable to close ResultSet! Error: {}", - e.getMessage(), e); - } - } - } - - /** - * Auxiliary private method that logs all SQL warnings. - */ - private void logWarnings(SQLWarning w) { - - if (w != null) { - log.debug("COPY CHUNK DAO: {}", w.toString()); - while ((w = w.getNextWarning()) != null) { - log.debug("COPY CHUNK DAO: {}", w.toString()); - } - } - } - - /** - * Auxiliary method that sets up the conenction to the DB. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - logWarnings(con.getWarnings()); - response = con.isValid(0); - } catch (SQLException | ClassNotFoundException e) { - log.error("COPY CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private synchronized boolean checkConnection() { - - boolean response = true; - if (reconnect) { - log.debug("COPY CHUNK DAO! Reconnecting to DB! "); - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method that takes down a conenctin to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - log.error("COPY CHUNK DAO! Exception in takeDownConnection method: {}", - e.getMessage(), e); - } - } - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || explanation == null) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken + " explanation=" - + explanation); - } - doUpdateStatusOnMatchingStatus(requestToken, null, null, - expectedStatusCode, newStatusCode, explanation, true, false, true); - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlsUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode) - throws IllegalArgumentException { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || surlsUniqueIDs == null || surls == null || surlsUniqueIDs.length == 0 - || surls.length == 0 || surlsUniqueIDs.length != surls.length) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken - + "surlsUniqueIDs=" + surlsUniqueIDs + " surls=" + surls); - } - doUpdateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, null, true, true, false); - } - - public synchronized void doUpdateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation, boolean withRequestToken, boolean withSurls, - boolean withExplanation) throws IllegalArgumentException { - - if ((withRequestToken && requestToken == null) - || (withExplanation && explanation == null) - || (withSurls && (surlUniqueIDs == null || surls == null))) { - throw new IllegalArgumentException( - "Unable to perform the doUpdateStatusOnMatchingStatus, " - + "invalid arguments: withRequestToken=" + withRequestToken - + " requestToken=" + requestToken + " withSurls=" + withSurls - + " surlUniqueIDs=" + surlUniqueIDs + " surls=" + surls - + " withExplaination=" + withExplanation + " explanation=" - + explanation); - } - if (!checkConnection()) { - log - .error("COPY CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); - return; - } - String str = "UPDATE request_queue rq JOIN (status_Copy sc, request_Copy rc) " - + "ON (rq.ID=rc.request_queueID AND sc.request_CopyID=rc.ID) " - + "SET sc.statusCode=? "; - if (withExplanation) { - str += " , " + buildExpainationSet(explanation); - } - str += " WHERE sc.statusCode=? "; - if (withRequestToken) { - str += " AND " + buildTokenWhereClause(requestToken); - } - if (withSurls) { - str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); - } - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - logWarnings(con.getWarnings()); - stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); - logWarnings(stmt.getWarnings()); - - stmt - .setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); - logWarnings(stmt.getWarnings()); - - log.trace("COPY CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); - int count = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("COPY CHUNK DAO! No chunk of COPY request was updated " - + "from {} to {}.", expectedStatusCode, newStatusCode); - } else { - log.debug("COPY CHUNK DAO! {} chunks of COPY requests were updated " - + "from {} to {}.", count, expectedStatusCode, newStatusCode); - } - } catch (SQLException e) { - log.error("COPY CHUNK DAO! Unable to updated from {} to {}! {}", - expectedStatusCode, newStatusCode, e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that returns a String containing all Surl's IDs. - */ - private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { - - StringBuilder sb = new StringBuilder("("); - for (int i = 0; i < surlUniqueIDs.length; i++) { - if (i > 0) { - sb.append(","); - } - sb.append(surlUniqueIDs[i]); - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surls. - */ - private String makeSurlString(String[] surls) { - - StringBuilder sb = new StringBuilder("("); - int n = surls.length; - for (int i = 0; i < n; i++) { - sb.append("'"); - sb.append(surls[i]); - sb.append("'"); - if (i < (n - 1)) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - public synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0 || dn == null) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " dn=" + dn); - } - return find(surlsUniqueIDs, surlsArray, dn, true); - } - - public synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray); - } - return find(surlsUniqueIDs, surlsArray, null, false); - } - - private synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn, boolean withDn) - throws IllegalArgumentException { - - if ((withDn && dn == null) || surlsUniqueIDs == null - || surlsUniqueIDs.length == 0 || surlsArray == null - || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " withDn=" + withDn + " dn=" + dn); - } - if (!checkConnection()) { - log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - String str = "SELECT rq.r_token, rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, " - + "rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, " - + "rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, " - + "d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " - + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " - + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " - + "WHERE ( rc.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) - + " AND rc.sourceSURL IN " - + makeSurlString(surlsArray) + " )"; - if (withDn) { - str += " AND rq.client_dn=\'" + dn + "\'"; - } - find = con.prepareStatement(str); - logWarnings(con.getWarnings()); - - List list = new ArrayList(); - - log.trace("COPY CHUNK DAO - find method: {}", find.toString()); - rs = find.executeQuery(); - logWarnings(find.getWarnings()); - CopyChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new CopyChunkDataTO(); - chunkDataTO.setRequestToken(rs.getString("rq.r_token")); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); - chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); - chunkDataTO.setNormalizedSourceStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); - chunkDataTO.setNormalizedTargetStFN(rs - .getString("rc.normalized_sourceSURL_StFN")); - uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); - } - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("COPY CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - private String buildExpainationSet(String explanation) { - - return " sc.explanation='" + explanation + "' "; - } - - private String buildTokenWhereClause(TRequestToken requestToken) { - - return " rq.r_token='" + requestToken.toString() + "' "; - } - - private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { - - return " ( rc.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) + " AND rc.sourceSURL IN " - + makeSurlString(surls) + " ) "; - } - + private static final Logger log = LoggerFactory.getLogger(CopyChunkDAO.class); + + /* String with the name of the class for the DB driver */ + private final String driver = Configuration.getInstance().getDBDriver(); + /* String referring to the URL of the DB */ + private final String url = Configuration.getInstance().getStormDbURL(); + /* String with the password for the DB */ + private final String password = Configuration.getInstance().getDBPassword(); + /* String with the name for the DB */ + private final String name = Configuration.getInstance().getDBUserName(); + + /* Connection to DB - WARNING!!! It is kept open all the time! */ + private Connection con = null; + /* boolean that tells whether reconnection is needed because of MySQL bug! */ + private boolean reconnect = false; + + /* Singleton instance */ + private static final CopyChunkDAO dao = new CopyChunkDAO(); + + /* timer thread that will run a task to alert when reconnecting is necessary! */ + private Timer clock = null; + /* + * timer task that will update the boolean signaling that a reconnection is + * needed! + */ + private TimerTask clockTask = null; + /* milliseconds that must pass before reconnecting to DB */ + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + /* initial delay in milliseconds before starting timer */ + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + + private CopyChunkDAO() { + + setUpConnection(); + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of the CopyChunkDAO. */ + public static CopyChunkDAO getInstance() { + + return dao; + } + + /** + * Method used to save the changes made to a retrieved CopyChunkDataTO, back into the MySQL DB. + * + *

Only statusCode and explanation, of status_Copy table get written to the DB. Likewise for + * fileLifetime of request_queue table. + * + *

In case of any error, an error messagge gets logged but no exception is thrown. + */ + public synchronized void update(CopyChunkDataTO to) { + + if (!checkConnection()) { + log.error("COPY CHUNK DAO: update - unable to get a valid connection!"); + return; + } + PreparedStatement updateFileReq = null; + try { + // ready updateFileReq... + updateFileReq = + con.prepareStatement( + "UPDATE request_queue rq JOIN (status_Copy sc, request_Copy rc) " + + "ON (rq.ID=rc.request_queueID AND sc.request_CopyID=rc.ID) " + + "SET sc.statusCode=?, sc.explanation=?, rq.fileLifetime=?, rq.config_FileStorageTypeID=?, rq.config_OverwriteID=?, " + + "rc.normalized_sourceSURL_StFN=?, rc.sourceSURL_uniqueID=?, rc.normalized_targetSURL_StFN=?, rc.targetSURL_uniqueID=? " + + "WHERE rc.ID=?"); + logWarnings(con.getWarnings()); + + updateFileReq.setInt(1, to.status()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(2, to.errString()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(3, to.lifeTime()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(4, to.fileStorageType()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(5, to.overwriteOption()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(6, to.normalizedSourceStFN()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(7, to.sourceSurlUniqueID()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(8, to.normalizedTargetStFN()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(9, to.targetSurlUniqueID()); + logWarnings(updateFileReq.getWarnings()); + + updateFileReq.setLong(10, to.primaryKey()); + logWarnings(updateFileReq.getWarnings()); + + // run updateFileReq + updateFileReq.executeUpdate(); + logWarnings(updateFileReq.getWarnings()); + } catch (SQLException e) { + log.error("COPY CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); + } finally { + close(updateFileReq); + } + } + + /** + * Updates the request_Get represented by the received ReducedPtGChunkDataTO by setting its + * normalized_sourceSURL_StFN and sourceSURL_uniqueID + * + * @param chunkTO + */ + public synchronized void updateIncomplete(ReducedCopyChunkDataTO chunkTO) { + + if (!checkConnection()) { + log.error("COPY CHUNK DAO: updateIncomplete - unable to get a valid connection!"); + return; + } + String str = + "UPDATE request_Copy SET normalized_sourceSURL_StFN=?, sourceSURL_uniqueID=?, normalized_targetSURL_StFN=?, targetSURL_uniqueID=? " + + "WHERE ID=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + stmt.setString(1, chunkTO.normalizedSourceStFN()); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, chunkTO.sourceSurlUniqueID()); + logWarnings(stmt.getWarnings()); + + stmt.setString(3, chunkTO.normalizedTargetStFN()); + logWarnings(stmt.getWarnings()); + + stmt.setInt(4, chunkTO.targetSurlUniqueID()); + logWarnings(stmt.getWarnings()); + + stmt.setLong(5, chunkTO.primaryKey()); + logWarnings(stmt.getWarnings()); + + log.trace("COPY CHUNK DAO - update incomplete: {}", stmt.toString()); + stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + } catch (SQLException e) { + log.error("COPY CHUNK DAO: Unable to complete update incomplete! {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method that queries the MySQL DB to find all entries matching the supplied TRequestToken. The + * Collection contains the corresponding CopyChunkDataTO objects. + * + *

A complex query establishes all chunks associated with the request token, by properly + * joining request_queue, request_Copy and status_Copy. The considered fields are: + * + *

(1) From status_Copy: the ID field which becomes the TOs primary key, and statusCode. + * + *

(2) From request_Copy: targetSURL and sourceSURL. + * + *

(3) From request_queue: fileLifetime, config_FileStorageTypeID, s_token, config_OverwriteID. + * + *

In case of any error, a log gets written and an empty collection is returned. No exception + * is returned. + * + *

NOTE! Chunks in SRM_ABORTED status are NOT returned! + */ + public synchronized Collection find(TRequestToken requestToken) { + + if (!checkConnection()) { + log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + String strToken = requestToken.toString(); + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + /* get chunks of the request */ + str = + "SELECT rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " + + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " + + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " + + "WHERE rq.r_token=? AND sc.statusCode<>?"; + + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, strToken); + logWarnings(find.getWarnings()); + + find.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(find.getWarnings()); + + log.debug("COPY CHUNK DAO: find method; " + find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + CopyChunkDataTO chunkDataTO; + while (rs.next()) { + chunkDataTO = new CopyChunkDataTO(); + chunkDataTO.setRequestToken(strToken); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); + chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); + chunkDataTO.setNormalizedSourceStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); + chunkDataTO.setNormalizedTargetStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); + } + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("COPY CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + public synchronized Collection find( + TRequestToken requestToken, int[] surlUniqueIDs, String[] surls) { + + if (!checkConnection()) { + log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + String strToken = requestToken.toString(); + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + /* get chunks of the request */ + str = + "SELECT rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " + + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " + + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " + + "WHERE rq.r_token=? AND ( rc.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlUniqueIDs) + + " AND rc.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, strToken); + logWarnings(find.getWarnings()); + + log.debug("COPY CHUNK DAO: find method; {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + + CopyChunkDataTO chunkDataTO; + while (rs.next()) { + chunkDataTO = new CopyChunkDataTO(); + chunkDataTO.setRequestToken(strToken); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); + chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); + chunkDataTO.setNormalizedSourceStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); + chunkDataTO.setNormalizedTargetStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); + } + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("COPY CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method used in extraordinary situations to signal that data retrieved from the DB was malformed + * and could not be translated into the StoRM object model. + * + *

This method attempts to change the status of the request to SRM_FAILURE and record it in the + * DB. + * + *

This operation could potentially fail because the source of the malformed problems could be + * a problematic DB; indeed, initially only log messagges where recorded. + * + *

Yet it soon became clear that the source of malformed data were the clients and/or FE + * recording info in the DB. In these circumstances the client would its request as being in the + * SRM_IN_PROGRESS state for ever. Hence the pressing need to inform it of the encountered + * problems. + */ + public synchronized void signalMalformedCopyChunk(CopyChunkDataTO auxTO) { + + if (!checkConnection()) { + log.error("COPY CHUNK DAO: signalMalformedCopyChunk - unable to get a valid connection!"); + return; + } + String signalSQL = + "UPDATE status_Copy SET statusCode=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) + + ", explanation=? WHERE request_CopyID=" + + auxTO.primaryKey(); + + PreparedStatement signal = null; + try { + /* update storm_put_filereq */ + signal = con.prepareStatement(signalSQL); + logWarnings(con.getWarnings()); + + /* Prepared statement spares DB-specific String notation! */ + signal.setString(1, "Request is malformed!"); + logWarnings(signal.getWarnings()); + + signal.executeUpdate(); + logWarnings(signal.getWarnings()); + } catch (SQLException e) { + log.error( + "CopyChunkDAO! Unable to signal in DB that the request was " + + "malformed! Request: {}; Error: {}", + auxTO.toString(), + e.getMessage(), + e); + } finally { + close(signal); + } + } + + /** Auxiliary method used to close a Statement */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "COPY CHUNK DAO! Unable to close Statement {} - Error: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + /** Auxiliary method used to close a ResultSet */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("COPY CHUNK DAO! Unable to close ResultSet! Error: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary private method that logs all SQL warnings. */ + private void logWarnings(SQLWarning w) { + + if (w != null) { + log.debug("COPY CHUNK DAO: {}", w.toString()); + while ((w = w.getNextWarning()) != null) { + log.debug("COPY CHUNK DAO: {}", w.toString()); + } + } + } + + /** Auxiliary method that sets up the conenction to the DB. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + logWarnings(con.getWarnings()); + response = con.isValid(0); + } catch (SQLException | ClassNotFoundException e) { + log.error("COPY CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); + } + return response; + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private synchronized boolean checkConnection() { + + boolean response = true; + if (reconnect) { + log.debug("COPY CHUNK DAO! Reconnecting to DB! "); + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method that takes down a conenctin to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + log.error("COPY CHUNK DAO! Exception in takeDownConnection method: {}", e.getMessage(), e); + } + } + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (requestToken == null || requestToken.getValue().trim().isEmpty() || explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + " explanation=" + + explanation); + } + doUpdateStatusOnMatchingStatus( + requestToken, + null, + null, + expectedStatusCode, + newStatusCode, + explanation, + true, + false, + true); + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) + throws IllegalArgumentException { + + if (requestToken == null + || requestToken.getValue().trim().isEmpty() + || surlsUniqueIDs == null + || surls == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + "surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls); + } + doUpdateStatusOnMatchingStatus( + requestToken, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + null, + true, + true, + false); + } + + public synchronized void doUpdateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation, + boolean withRequestToken, + boolean withSurls, + boolean withExplanation) + throws IllegalArgumentException { + + if ((withRequestToken && requestToken == null) + || (withExplanation && explanation == null) + || (withSurls && (surlUniqueIDs == null || surls == null))) { + throw new IllegalArgumentException( + "Unable to perform the doUpdateStatusOnMatchingStatus, " + + "invalid arguments: withRequestToken=" + + withRequestToken + + " requestToken=" + + requestToken + + " withSurls=" + + withSurls + + " surlUniqueIDs=" + + surlUniqueIDs + + " surls=" + + surls + + " withExplaination=" + + withExplanation + + " explanation=" + + explanation); + } + if (!checkConnection()) { + log.error("COPY CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); + return; + } + String str = + "UPDATE request_queue rq JOIN (status_Copy sc, request_Copy rc) " + + "ON (rq.ID=rc.request_queueID AND sc.request_CopyID=rc.ID) " + + "SET sc.statusCode=? "; + if (withExplanation) { + str += " , " + buildExpainationSet(explanation); + } + str += " WHERE sc.statusCode=? "; + if (withRequestToken) { + str += " AND " + buildTokenWhereClause(requestToken); + } + if (withSurls) { + str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); + } + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + logWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); + logWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); + logWarnings(stmt.getWarnings()); + + log.trace("COPY CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); + int count = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "COPY CHUNK DAO! No chunk of COPY request was updated " + "from {} to {}.", + expectedStatusCode, + newStatusCode); + } else { + log.debug( + "COPY CHUNK DAO! {} chunks of COPY requests were updated " + "from {} to {}.", + count, + expectedStatusCode, + newStatusCode); + } + } catch (SQLException e) { + log.error( + "COPY CHUNK DAO! Unable to updated from {} to {}! {}", + expectedStatusCode, + newStatusCode, + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + /** Method that returns a String containing all Surl's IDs. */ + private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { + + StringBuilder sb = new StringBuilder("("); + for (int i = 0; i < surlUniqueIDs.length; i++) { + if (i > 0) { + sb.append(","); + } + sb.append(surlUniqueIDs[i]); + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surls. */ + private String makeSurlString(String[] surls) { + + StringBuilder sb = new StringBuilder("("); + int n = surls.length; + for (int i = 0; i < n; i++) { + sb.append("'"); + sb.append(surls[i]); + sb.append("'"); + if (i < (n - 1)) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + public synchronized Collection find( + int[] surlsUniqueIDs, String[] surlsArray, String dn) throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0 + || dn == null) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " dn=" + + dn); + } + return find(surlsUniqueIDs, surlsArray, dn, true); + } + + public synchronized Collection find(int[] surlsUniqueIDs, String[] surlsArray) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray); + } + return find(surlsUniqueIDs, surlsArray, null, false); + } + + private synchronized Collection find( + int[] surlsUniqueIDs, String[] surlsArray, String dn, boolean withDn) + throws IllegalArgumentException { + + if ((withDn && dn == null) + || surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " withDn=" + + withDn + + " dn=" + + dn); + } + if (!checkConnection()) { + log.error("COPY CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + String str = + "SELECT rq.r_token, rq.s_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, " + + "rq.fileLifetime, rc.ID, rc.sourceSURL, rc.targetSURL, rc.normalized_sourceSURL_StFN, " + + "rc.sourceSURL_uniqueID, rc.normalized_targetSURL_StFN, rc.targetSURL_uniqueID, d.isSourceADirectory, " + + "d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_Copy rc, status_Copy sc) " + + "ON (rc.request_queueID=rq.ID AND sc.request_CopyID=rc.ID) " + + "LEFT JOIN request_DirOption d ON rc.request_DirOptionID=d.ID " + + "WHERE ( rc.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rc.sourceSURL IN " + + makeSurlString(surlsArray) + + " )"; + if (withDn) { + str += " AND rq.client_dn=\'" + dn + "\'"; + } + find = con.prepareStatement(str); + logWarnings(con.getWarnings()); + + List list = new ArrayList(); + + log.trace("COPY CHUNK DAO - find method: {}", find.toString()); + rs = find.executeQuery(); + logWarnings(find.getWarnings()); + CopyChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new CopyChunkDataTO(); + chunkDataTO.setRequestToken(rs.getString("rq.r_token")); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setLifeTime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setPrimaryKey(rs.getLong("rc.ID")); + chunkDataTO.setFromSURL(rs.getString("rc.sourceSURL")); + chunkDataTO.setNormalizedSourceStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSourceSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setToSURL(rs.getString("rc.targetSURL")); + chunkDataTO.setNormalizedTargetStFN(rs.getString("rc.normalized_sourceSURL_StFN")); + uniqueID = rs.getInt("rc.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setTargetSurlUniqueID(new Integer(uniqueID)); + } + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("COPY CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + private String buildExpainationSet(String explanation) { + + return " sc.explanation='" + explanation + "' "; + } + + private String buildTokenWhereClause(TRequestToken requestToken) { + + return " rq.r_token='" + requestToken.toString() + "' "; + } + + private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { + + return " ( rc.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rc.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopyChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/CopyChunkDataTO.java index 1b455ac7..a989e994 100644 --- a/src/main/java/it/grid/storm/catalogs/CopyChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/CopyChunkDataTO.java @@ -1,277 +1,251 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.sql.Timestamp; -import it.grid.storm.srm.types.TOverwriteMode; import it.grid.storm.srm.types.TFileStorageType; +import it.grid.storm.srm.types.TOverwriteMode; import it.grid.storm.srm.types.TStatusCode; +import java.sql.Timestamp; /** - * Class that represents a row in the Persistence Layer: this is all raw data - * referring to the CopyChunkData proper, that is, String and primitive types. - * - * Each field is initialized with default values as per SRM 2.2 specification: - * fileStorageType VOLATILE overwriteMode NEVER status SRM_REQUEST_QUEUED - * - * All other fields are 0 if int, or a white space if String. - * + * Class that represents a row in the Persistence Layer: this is all raw data referring to the + * CopyChunkData proper, that is, String and primitive types. + * + *

Each field is initialized with default values as per SRM 2.2 specification: fileStorageType + * VOLATILE overwriteMode NEVER status SRM_REQUEST_QUEUED + * + *

All other fields are 0 if int, or a white space if String. + * * @author EGRID ICTP * @version 2.0 * @date Semptember 2005 */ public class CopyChunkDataTO { - /* Database table request_Get fields BEGIN */ - private long primaryKey = -1; // ID primary key of record in DB - private String fromSURL = " "; - private String toSURL = " "; - private String normalizedSourceStFN = null; - private Integer sourceSurlUniqueID = null; - private String normalizedTargetStFN = null; - private Integer targetSurlUniqueID = null; - /* Database table request_Get fields END */ + /* Database table request_Get fields BEGIN */ + private long primaryKey = -1; // ID primary key of record in DB + private String fromSURL = " "; + private String toSURL = " "; + private String normalizedSourceStFN = null; + private Integer sourceSurlUniqueID = null; + private String normalizedTargetStFN = null; + private Integer targetSurlUniqueID = null; + /* Database table request_Get fields END */ - private String requestToken = " "; - private int lifetime = 0; - private String fileStorageType = null; // initialised in constructor - private String spaceToken = " "; - private String overwriteOption = null; // initialised in constructor - private int status; // initialised in constructor - private String errString = " "; - private Timestamp timeStamp = null; + private String requestToken = " "; + private int lifetime = 0; + private String fileStorageType = null; // initialised in constructor + private String spaceToken = " "; + private String overwriteOption = null; // initialised in constructor + private int status; // initialised in constructor + private String errString = " "; + private Timestamp timeStamp = null; - public CopyChunkDataTO() { + public CopyChunkDataTO() { - fileStorageType = FileStorageTypeConverter.getInstance().toDB( - TFileStorageType.VOLATILE); - overwriteOption = OverwriteModeConverter.getInstance().toDB( - TOverwriteMode.NEVER); - status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - } + fileStorageType = FileStorageTypeConverter.getInstance().toDB(TFileStorageType.VOLATILE); + overwriteOption = OverwriteModeConverter.getInstance().toDB(TOverwriteMode.NEVER); + status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + } - public long primaryKey() { + public long primaryKey() { - return primaryKey; - } + return primaryKey; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public String requestToken() { + public String requestToken() { - return requestToken; - } + return requestToken; + } - public void setRequestToken(String s) { + public void setRequestToken(String s) { - requestToken = s; - } + requestToken = s; + } - public Timestamp timeStamp() { + public Timestamp timeStamp() { - return timeStamp; - } + return timeStamp; + } - public void setTimeStamp(Timestamp timeStamp) { + public void setTimeStamp(Timestamp timeStamp) { - this.timeStamp = timeStamp; - } + this.timeStamp = timeStamp; + } - public String fromSURL() { + public String fromSURL() { - return fromSURL; - } + return fromSURL; + } - public void setFromSURL(String s) { + public void setFromSURL(String s) { - fromSURL = s; - } + fromSURL = s; + } - /** - * @return the normalizedStFN - */ - public String normalizedSourceStFN() { + /** @return the normalizedStFN */ + public String normalizedSourceStFN() { - return normalizedSourceStFN; - } + return normalizedSourceStFN; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedSourceStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedSourceStFN(String normalizedStFN) { - this.normalizedSourceStFN = normalizedStFN; - } + this.normalizedSourceStFN = normalizedStFN; + } - /** - * @return the surlUniqueID - */ - public Integer sourceSurlUniqueID() { + /** @return the surlUniqueID */ + public Integer sourceSurlUniqueID() { - return sourceSurlUniqueID; - } + return sourceSurlUniqueID; + } - /** - * @param surlUniqueID - * the surlUniqueID to set - */ - public void setSourceSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the surlUniqueID to set */ + public void setSourceSurlUniqueID(Integer surlUniqueID) { - this.sourceSurlUniqueID = surlUniqueID; - } + this.sourceSurlUniqueID = surlUniqueID; + } - /** - * @return the normalizedStFN - */ - public String normalizedTargetStFN() { + /** @return the normalizedStFN */ + public String normalizedTargetStFN() { - return normalizedTargetStFN; - } + return normalizedTargetStFN; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedTargetStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedTargetStFN(String normalizedStFN) { - this.normalizedTargetStFN = normalizedStFN; - } + this.normalizedTargetStFN = normalizedStFN; + } - /** - * @return the surlUniqueID - */ - public Integer targetSurlUniqueID() { + /** @return the surlUniqueID */ + public Integer targetSurlUniqueID() { - return targetSurlUniqueID; - } + return targetSurlUniqueID; + } - /** - * @param surlUniqueID - * the surlUniqueID to set - */ - public void setTargetSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the surlUniqueID to set */ + public void setTargetSurlUniqueID(Integer surlUniqueID) { - this.targetSurlUniqueID = surlUniqueID; - } + this.targetSurlUniqueID = surlUniqueID; + } - public String toSURL() { + public String toSURL() { - return toSURL; - } + return toSURL; + } - public void setToSURL(String s) { + public void setToSURL(String s) { - toSURL = s; - } + toSURL = s; + } - public int lifeTime() { + public int lifeTime() { - return lifetime; - } + return lifetime; + } - public void setLifeTime(int n) { + public void setLifeTime(int n) { - lifetime = n; - } + lifetime = n; + } - public String fileStorageType() { + public String fileStorageType() { - return fileStorageType; - } + return fileStorageType; + } - /** - * Method used to set the FileStorageType: if s is null nothing gets set; the - * internal default String is the one relative to Volatile FileStorageType. - */ - public void setFileStorageType(String s) { + /** + * Method used to set the FileStorageType: if s is null nothing gets set; the internal default + * String is the one relative to Volatile FileStorageType. + */ + public void setFileStorageType(String s) { - if (s != null) - fileStorageType = s; - } + if (s != null) fileStorageType = s; + } - public String spaceToken() { + public String spaceToken() { - return spaceToken; - } + return spaceToken; + } - public void setSpaceToken(String s) { + public void setSpaceToken(String s) { - spaceToken = s; - } + spaceToken = s; + } - public String overwriteOption() { + public String overwriteOption() { - return overwriteOption; - } + return overwriteOption; + } - /** - * Method used to set the OverwriteMode: if s is null nothing gets set; the - * internal default String is the one relative to Never OverwriteMode. - */ - public void setOverwriteOption(String s) { + /** + * Method used to set the OverwriteMode: if s is null nothing gets set; the internal default + * String is the one relative to Never OverwriteMode. + */ + public void setOverwriteOption(String s) { - if (s != null) - overwriteOption = s; - } + if (s != null) overwriteOption = s; + } - public int status() { + public int status() { - return status; - } + return status; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - public String errString() { + public String errString() { - return errString; - } + return errString; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } + errString = s; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(requestToken); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedSourceStFN); - sb.append(" "); - sb.append(sourceSurlUniqueID); - sb.append(" "); - sb.append(toSURL); - sb.append(" "); - sb.append(normalizedTargetStFN); - sb.append(" "); - sb.append(targetSurlUniqueID); - sb.append(" "); - sb.append(lifetime); - sb.append(" "); - sb.append(fileStorageType); - sb.append(" "); - sb.append(spaceToken); - sb.append(" "); - sb.append(overwriteOption); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(requestToken); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedSourceStFN); + sb.append(" "); + sb.append(sourceSurlUniqueID); + sb.append(" "); + sb.append(toSURL); + sb.append(" "); + sb.append(normalizedTargetStFN); + sb.append(" "); + sb.append(targetSurlUniqueID); + sb.append(" "); + sb.append(lifetime); + sb.append(" "); + sb.append(fileStorageType); + sb.append(" "); + sb.append(spaceToken); + sb.append(" "); + sb.append(overwriteOption); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopyData.java b/src/main/java/it/grid/storm/catalogs/CopyData.java index 103cdaf9..e46a7e1c 100644 --- a/src/main/java/it/grid/storm/catalogs/CopyData.java +++ b/src/main/java/it/grid/storm/catalogs/CopyData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -11,133 +10,123 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TStatusCode; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a CopyChunkData, that is part of a multifile Copy srm - * request. It contains data about: the requestToken, the fromSURL, the toSURL, - * the target fileLifeTime, the target fileStorageType and any available target - * spaceToken, the target overwriteOption to be applied in case the file already - * exists, the fileSize of the existing file if any, return status of the file + * This class represents a CopyChunkData, that is part of a multifile Copy srm request. It contains + * data about: the requestToken, the fromSURL, the toSURL, the target fileLifeTime, the target + * fileStorageType and any available target spaceToken, the target overwriteOption to be applied in + * case the file already exists, the fileSize of the existing file if any, return status of the file * together with its error string. - * + * * @author EGRID - ICTP Trieste * @date September, 2005 * @version 2.0 */ public class CopyData extends SurlMultyOperationRequestData { - private static final Logger log = LoggerFactory.getLogger(CopyData.class); - - /** - * SURL to which the srmCopy will put the file - */ - protected TSURL destinationSURL; - - /** - * requested lifetime - BEWARE!!! It is the fileLifetime at destination in - * case of Volatile files! - */ - protected TLifeTimeInSeconds lifetime; - - /** - * TFileStorageType at destination - */ - protected TFileStorageType fileStorageType; - - /** - * SpaceToken to use for toSURL - */ - protected TSpaceToken spaceToken; - - /** - * specifies the behaviour in case of existing files for Put part of the copy - * (could be local or remote!) - */ - protected TOverwriteMode overwriteOption; - - public CopyData(TSURL fromSURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, TReturnStatus status) - throws InvalidCopyDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(fromSURL, status); - if (destinationSURL == null || lifetime == null || fileStorageType == null - || spaceToken == null || overwriteOption == null) { - throw new InvalidCopyDataAttributesException(fromSURL, destinationSURL, - lifetime, fileStorageType, spaceToken, overwriteOption, status); - } - this.destinationSURL = destinationSURL; - this.lifetime = lifetime; - this.fileStorageType = fileStorageType; - this.spaceToken = spaceToken; - this.overwriteOption = overwriteOption; - } - - /** - * Method that returns the toSURL of the srm request to which this chunk - * belongs. - */ - public TSURL getDestinationSURL() { - - return destinationSURL; - } - - /** - * Method that returns the requested pin life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds getLifetime() { - - return lifetime; - } - - /** - * Method that returns the fileStorageType for this chunk of the srm request. - */ - public TFileStorageType getFileStorageType() { - - return fileStorageType; - } - - /** - * Method that returns the space token supplied for this chunk of the srm - * request. - */ - public TSpaceToken getSpaceToken() { - - return spaceToken; - } - - /** - * Method that returns the overwriteOption specified in the srm request. - */ - public TOverwriteMode getOverwriteOption() { - - return overwriteOption; - } - - /** - * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_DUPLICATION_ERROR(String explanation) { - - setStatus(TStatusCode.SRM_DUPLICATION_ERROR, explanation); - } - - /** - * Method that sets the status of this request to SRM_FATAL_INTERNAL_ERROR; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_FATAL_INTERNAL_ERROR(String explanation) { - - setStatus(TStatusCode.SRM_FATAL_INTERNAL_ERROR, explanation); - } - + private static final Logger log = LoggerFactory.getLogger(CopyData.class); + + /** SURL to which the srmCopy will put the file */ + protected TSURL destinationSURL; + + /** + * requested lifetime - BEWARE!!! It is the fileLifetime at destination in case of Volatile files! + */ + protected TLifeTimeInSeconds lifetime; + + /** TFileStorageType at destination */ + protected TFileStorageType fileStorageType; + + /** SpaceToken to use for toSURL */ + protected TSpaceToken spaceToken; + + /** + * specifies the behaviour in case of existing files for Put part of the copy (could be local or + * remote!) + */ + protected TOverwriteMode overwriteOption; + + public CopyData( + TSURL fromSURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status) + throws InvalidCopyDataAttributesException, InvalidSurlRequestDataAttributesException { + + super(fromSURL, status); + if (destinationSURL == null + || lifetime == null + || fileStorageType == null + || spaceToken == null + || overwriteOption == null) { + throw new InvalidCopyDataAttributesException( + fromSURL, + destinationSURL, + lifetime, + fileStorageType, + spaceToken, + overwriteOption, + status); + } + this.destinationSURL = destinationSURL; + this.lifetime = lifetime; + this.fileStorageType = fileStorageType; + this.spaceToken = spaceToken; + this.overwriteOption = overwriteOption; + } + + /** Method that returns the toSURL of the srm request to which this chunk belongs. */ + public TSURL getDestinationSURL() { + + return destinationSURL; + } + + /** Method that returns the requested pin life time for this chunk of the srm request. */ + public TLifeTimeInSeconds getLifetime() { + + return lifetime; + } + + /** Method that returns the fileStorageType for this chunk of the srm request. */ + public TFileStorageType getFileStorageType() { + + return fileStorageType; + } + + /** Method that returns the space token supplied for this chunk of the srm request. */ + public TSpaceToken getSpaceToken() { + + return spaceToken; + } + + /** Method that returns the overwriteOption specified in the srm request. */ + public TOverwriteMode getOverwriteOption() { + + return overwriteOption; + } + + /** + * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_DUPLICATION_ERROR(String explanation) { + + setStatus(TStatusCode.SRM_DUPLICATION_ERROR, explanation); + } + + /** + * Method that sets the status of this request to SRM_FATAL_INTERNAL_ERROR; it needs the + * explanation String which describes the situation in greater detail; if a null is passed, then + * an empty String is used as explanation. + */ + public void changeStatusSRM_FATAL_INTERNAL_ERROR(String explanation) { + + setStatus(TStatusCode.SRM_FATAL_INTERNAL_ERROR, explanation); + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopyGlobalFlagConverter.java b/src/main/java/it/grid/storm/catalogs/CopyGlobalFlagConverter.java index a64729a1..251c4727 100644 --- a/src/main/java/it/grid/storm/catalogs/CopyGlobalFlagConverter.java +++ b/src/main/java/it/grid/storm/catalogs/CopyGlobalFlagConverter.java @@ -1,98 +1,88 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.util.Map; +import it.grid.storm.srm.types.TOverwriteMode; import java.util.HashMap; import java.util.Iterator; -import it.grid.storm.srm.types.TOverwriteMode; +import java.util.Map; /** - * Package private auxiliary class used to convert between DPM and StoRM - * representation of Copy TOverwriteMode+RemoveSourceFiles global information - * for the whole request, and Flags in storm_req. - * + * Package private auxiliary class used to convert between DPM and StoRM representation of Copy + * TOverwriteMode+RemoveSourceFiles global information for the whole request, and Flags in + * storm_req. + * * @author: EGRID - ICTP Trieste * @version: 1.0 * @date: September 2005 */ class CopyGlobalFlagConverter { - private Map DPMtoSTORM = new HashMap(); - private Map STORMtoDPM = new HashMap(); + private Map DPMtoSTORM = new HashMap(); + private Map STORMtoDPM = new HashMap(); - private static CopyGlobalFlagConverter c = new CopyGlobalFlagConverter(); + private static CopyGlobalFlagConverter c = new CopyGlobalFlagConverter(); - /** - * Private constructor that fills in the conversion table; in particular, DPM - * uses int values to represent the pair of values: - * - * 0 NEVER + DO NOT RemoveSourceFiles 1 ALWAYS + DO NOT RemoveSourceFiles 2 - * WHENFILESAREDIFFERENT + DO NOT RemoveSourceFiles 4 NEVER + - * RemoveSourceFiles 5 ALWAYS + RemoveSourceFiles 6 WHENFILESAREDIFFERENT + - * RemoveSourceFiles - */ - private CopyGlobalFlagConverter() { + /** + * Private constructor that fills in the conversion table; in particular, DPM uses int values to + * represent the pair of values: + * + *

0 NEVER + DO NOT RemoveSourceFiles 1 ALWAYS + DO NOT RemoveSourceFiles 2 + * WHENFILESAREDIFFERENT + DO NOT RemoveSourceFiles 4 NEVER + RemoveSourceFiles 5 ALWAYS + + * RemoveSourceFiles 6 WHENFILESAREDIFFERENT + RemoveSourceFiles + */ + private CopyGlobalFlagConverter() { - DPMtoSTORM.put(new Integer(0), new Object[] { TOverwriteMode.NEVER, - new Boolean(false) }); - DPMtoSTORM.put(new Integer(1), new Object[] { TOverwriteMode.ALWAYS, - new Boolean(false) }); - DPMtoSTORM.put(new Integer(2), new Object[] { - TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(false) }); - DPMtoSTORM.put(new Integer(4), new Object[] { TOverwriteMode.NEVER, - new Boolean(true) }); - DPMtoSTORM.put(new Integer(5), new Object[] { TOverwriteMode.ALWAYS, - new Boolean(true) }); - DPMtoSTORM.put(new Integer(6), new Object[] { - TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(true) }); - Object aux; - for (Iterator i = DPMtoSTORM.keySet().iterator(); i.hasNext();) { - aux = i.next(); - STORMtoDPM.put(DPMtoSTORM.get(aux), aux); - } - } + DPMtoSTORM.put(new Integer(0), new Object[] {TOverwriteMode.NEVER, new Boolean(false)}); + DPMtoSTORM.put(new Integer(1), new Object[] {TOverwriteMode.ALWAYS, new Boolean(false)}); + DPMtoSTORM.put( + new Integer(2), new Object[] {TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(false)}); + DPMtoSTORM.put(new Integer(4), new Object[] {TOverwriteMode.NEVER, new Boolean(true)}); + DPMtoSTORM.put(new Integer(5), new Object[] {TOverwriteMode.ALWAYS, new Boolean(true)}); + DPMtoSTORM.put( + new Integer(6), new Object[] {TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(true)}); + Object aux; + for (Iterator i = DPMtoSTORM.keySet().iterator(); i.hasNext(); ) { + aux = i.next(); + STORMtoDPM.put(DPMtoSTORM.get(aux), aux); + } + } - /** - * Method that returns the only instance of OverwriteModeConverter. - */ - public static CopyGlobalFlagConverter getInstance() { + /** Method that returns the only instance of OverwriteModeConverter. */ + public static CopyGlobalFlagConverter getInstance() { - return c; - } + return c; + } - /** - * Method that returns the int used by DPM to represent the given - * TOverwriteMode and removeSourceFiles boolean. -1 is returned if no match is - * found. - */ - public int toDPM(TOverwriteMode om, boolean removeSourceFiles) { + /** + * Method that returns the int used by DPM to represent the given TOverwriteMode and + * removeSourceFiles boolean. -1 is returned if no match is found. + */ + public int toDPM(TOverwriteMode om, boolean removeSourceFiles) { - Integer aux = (Integer) STORMtoDPM.get(new Object[] { om, - new Boolean(removeSourceFiles) }); - if (aux == null) - return -1; - return aux.intValue(); - } + Integer aux = (Integer) STORMtoDPM.get(new Object[] {om, new Boolean(removeSourceFiles)}); + if (aux == null) return -1; + return aux.intValue(); + } - /** - * Method that returns an Object[] containing the TOverwriteMode and the - * boolean used by StoRM to represent the supplied int representation of DPM. - * An empty Object[] is returned if no StoRM type is found. - */ - public Object[] toSTORM(int n) { + /** + * Method that returns an Object[] containing the TOverwriteMode and the boolean used by StoRM to + * represent the supplied int representation of DPM. An empty Object[] is returned if no StoRM + * type is found. + */ + public Object[] toSTORM(int n) { - Object[] aux = (Object[]) DPMtoSTORM.get(new Integer(n)); - if (aux == null) - return new Object[] {}; - return aux; - } + Object[] aux = (Object[]) DPMtoSTORM.get(new Integer(n)); + if (aux == null) return new Object[] {}; + return aux; + } - public String toString() { + public String toString() { - return "OverWriteModeConverter.\nDPMtoSTORM map:" + DPMtoSTORM - + "\nSTORMtoDPM map:" + STORMtoDPM; - } + return "OverWriteModeConverter.\nDPMtoSTORM map:" + + DPMtoSTORM + + "\nSTORMtoDPM map:" + + STORMtoDPM; + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopyPersistentChunkData.java b/src/main/java/it/grid/storm/catalogs/CopyPersistentChunkData.java index 419ff151..3cea5c62 100644 --- a/src/main/java/it/grid/storm/catalogs/CopyPersistentChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/CopyPersistentChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -11,87 +10,80 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSpaceToken; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a CopyChunkData, that is part of a multifile Copy srm - * request. It contains data about: the requestToken, the fromSURL, the toSURL, - * the target fileLifeTime, the target fileStorageType and any available target - * spaceToken, the target overwriteOption to be applied in case the file already - * exists, the fileSize of the existing file if any, return status of the file + * This class represents a CopyChunkData, that is part of a multifile Copy srm request. It contains + * data about: the requestToken, the fromSURL, the toSURL, the target fileLifeTime, the target + * fileStorageType and any available target spaceToken, the target overwriteOption to be applied in + * case the file already exists, the fileSize of the existing file if any, return status of the file * together with its error string. - * + * * @author EGRID - ICTP Trieste * @date September, 2005 * @version 2.0 */ -public class CopyPersistentChunkData extends CopyData implements - PersistentChunkData { - - private static final Logger log = LoggerFactory - .getLogger(CopyPersistentChunkData.class); - - /** - * long representing the primary key for the persistence layer! - */ - private long primaryKey = -1; - - /** - * This is the requestToken of the multifile srm request to which this chunk - * belongs - */ - private TRequestToken requestToken; - - public CopyPersistentChunkData(TRequestToken requestToken, TSURL fromSURL, - TSURL destinationSURL, TLifeTimeInSeconds lifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TOverwriteMode overwriteOption, TReturnStatus status) - throws InvalidCopyPersistentChunkDataAttributesException, - InvalidCopyDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(fromSURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status); - if (requestToken == null) { - log.debug("CopyPersistentChunkData: requestToken is null!"); - throw new InvalidCopyPersistentChunkDataAttributesException(requestToken, - fromSURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status); - } - this.requestToken = requestToken; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - public long getPrimaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the requestToken of the srm request to which this chunk - * belongs. - */ - public TRequestToken getRequestToken() { - - return requestToken; - } - - @Override - public long getIdentifier() { - - return getPrimaryKey(); - } - +public class CopyPersistentChunkData extends CopyData implements PersistentChunkData { + + private static final Logger log = LoggerFactory.getLogger(CopyPersistentChunkData.class); + + /** long representing the primary key for the persistence layer! */ + private long primaryKey = -1; + + /** This is the requestToken of the multifile srm request to which this chunk belongs */ + private TRequestToken requestToken; + + public CopyPersistentChunkData( + TRequestToken requestToken, + TSURL fromSURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status) + throws InvalidCopyPersistentChunkDataAttributesException, InvalidCopyDataAttributesException, + InvalidSurlRequestDataAttributesException { + + super( + fromSURL, destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption, status); + if (requestToken == null) { + log.debug("CopyPersistentChunkData: requestToken is null!"); + throw new InvalidCopyPersistentChunkDataAttributesException( + requestToken, + fromSURL, + destinationSURL, + lifetime, + fileStorageType, + spaceToken, + overwriteOption, + status); + } + this.requestToken = requestToken; + } + + /** Method used to get the primary key used in the persistence layer! */ + public long getPrimaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the requestToken of the srm request to which this chunk belongs. */ + public TRequestToken getRequestToken() { + + return requestToken; + } + + @Override + public long getIdentifier() { + + return getPrimaryKey(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/CopySpecificFlagConverter.java b/src/main/java/it/grid/storm/catalogs/CopySpecificFlagConverter.java index bd269f40..b37400e7 100644 --- a/src/main/java/it/grid/storm/catalogs/CopySpecificFlagConverter.java +++ b/src/main/java/it/grid/storm/catalogs/CopySpecificFlagConverter.java @@ -1,99 +1,87 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.util.Map; +import it.grid.storm.srm.types.TOverwriteMode; import java.util.HashMap; import java.util.Iterator; -import it.grid.storm.srm.types.TOverwriteMode; +import java.util.Map; /** - * Package private auxiliary class used to convert between DPM and StoRM - * representation of Copy TOverwriteMode+TDirOption request specific - * information, and Flags in storm_copy_filereq. - * + * Package private auxiliary class used to convert between DPM and StoRM representation of Copy + * TOverwriteMode+TDirOption request specific information, and Flags in storm_copy_filereq. + * * @author: EGRID - ICTP Trieste * @version: 1.0 * @date: September 2005 */ class CopySpecificFlagConverter { - private Map DPMtoSTORM = new HashMap(); - private Map STORMtoDPM = new HashMap(); - - private static CopySpecificFlagConverter c = new CopySpecificFlagConverter(); + private Map DPMtoSTORM = new HashMap(); + private Map STORMtoDPM = new HashMap(); - /** - * Private constructor that fills in the conversion table; in particular, DPM - * uses int values to represent the pair of values: - * - * 0 NEVER + source NOT directory 1 ALWAYS + source NOT directory 2 - * WHENFILESAREDIFFERENT + source NOT directory 4 NEVER + source is directory - * 5 ALWAYS + source is directory 6 WHENFILESAREDIFFERENT + source is - * directory - */ - private CopySpecificFlagConverter() { + private static CopySpecificFlagConverter c = new CopySpecificFlagConverter(); - DPMtoSTORM.put(new Integer(0), new Object[] { TOverwriteMode.NEVER, - new Boolean(false) }); - DPMtoSTORM.put(new Integer(1), new Object[] { TOverwriteMode.ALWAYS, - new Boolean(false) }); - DPMtoSTORM.put(new Integer(2), new Object[] { - TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(false) }); - DPMtoSTORM.put(new Integer(4), new Object[] { TOverwriteMode.NEVER, - new Boolean(true) }); - DPMtoSTORM.put(new Integer(5), new Object[] { TOverwriteMode.ALWAYS, - new Boolean(true) }); - DPMtoSTORM.put(new Integer(6), new Object[] { - TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(true) }); - Object aux; - for (Iterator i = DPMtoSTORM.keySet().iterator(); i.hasNext();) { - aux = i.next(); - STORMtoDPM.put(DPMtoSTORM.get(aux), aux); - } - } + /** + * Private constructor that fills in the conversion table; in particular, DPM uses int values to + * represent the pair of values: + * + *

0 NEVER + source NOT directory 1 ALWAYS + source NOT directory 2 WHENFILESAREDIFFERENT + + * source NOT directory 4 NEVER + source is directory 5 ALWAYS + source is directory 6 + * WHENFILESAREDIFFERENT + source is directory + */ + private CopySpecificFlagConverter() { - /** - * Method that returns the only instance of CopySpecificFlagConverter. - */ - public static CopySpecificFlagConverter getInstance() { + DPMtoSTORM.put(new Integer(0), new Object[] {TOverwriteMode.NEVER, new Boolean(false)}); + DPMtoSTORM.put(new Integer(1), new Object[] {TOverwriteMode.ALWAYS, new Boolean(false)}); + DPMtoSTORM.put( + new Integer(2), new Object[] {TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(false)}); + DPMtoSTORM.put(new Integer(4), new Object[] {TOverwriteMode.NEVER, new Boolean(true)}); + DPMtoSTORM.put(new Integer(5), new Object[] {TOverwriteMode.ALWAYS, new Boolean(true)}); + DPMtoSTORM.put( + new Integer(6), new Object[] {TOverwriteMode.WHENFILESAREDIFFERENT, new Boolean(true)}); + Object aux; + for (Iterator i = DPMtoSTORM.keySet().iterator(); i.hasNext(); ) { + aux = i.next(); + STORMtoDPM.put(DPMtoSTORM.get(aux), aux); + } + } - return c; - } + /** Method that returns the only instance of CopySpecificFlagConverter. */ + public static CopySpecificFlagConverter getInstance() { - /** - * Method that returns the int used by DPM to represent the given - * TOverwriteMode and isSourceADirectory boolean. -1 is returned if no match - * is found. - */ - public int toDPM(TOverwriteMode om, boolean isSourceADirectory) { + return c; + } - Integer aux = (Integer) STORMtoDPM.get(new Object[] { om, - new Boolean(isSourceADirectory) }); - if (aux == null) - return -1; - return aux.intValue(); - } + /** + * Method that returns the int used by DPM to represent the given TOverwriteMode and + * isSourceADirectory boolean. -1 is returned if no match is found. + */ + public int toDPM(TOverwriteMode om, boolean isSourceADirectory) { - /** - * Method that returns an Object[] containing the TOverwriteMode and the - * Boolean used by StoRM to represent the supplied int representation of DPM. - * An empty Object[] is returned if no StoRM type is found. - */ - public Object[] toSTORM(int n) { + Integer aux = (Integer) STORMtoDPM.get(new Object[] {om, new Boolean(isSourceADirectory)}); + if (aux == null) return -1; + return aux.intValue(); + } - Object[] aux = (Object[]) DPMtoSTORM.get(new Integer(n)); - if (aux == null) - return new Object[] {}; - return aux; - } + /** + * Method that returns an Object[] containing the TOverwriteMode and the Boolean used by StoRM to + * represent the supplied int representation of DPM. An empty Object[] is returned if no StoRM + * type is found. + */ + public Object[] toSTORM(int n) { - public String toString() { + Object[] aux = (Object[]) DPMtoSTORM.get(new Integer(n)); + if (aux == null) return new Object[] {}; + return aux; + } - return "OverWriteModeConverter.\nDPMtoSTORM map:" + DPMtoSTORM - + "\nSTORMtoDPM map:" + STORMtoDPM; - } + public String toString() { + return "OverWriteModeConverter.\nDPMtoSTORM map:" + + DPMtoSTORM + + "\nSTORMtoDPM map:" + + STORMtoDPM; + } } diff --git a/src/main/java/it/grid/storm/catalogs/DirOptionConverter.java b/src/main/java/it/grid/storm/catalogs/DirOptionConverter.java index 4229ed7b..c183fd4f 100644 --- a/src/main/java/it/grid/storm/catalogs/DirOptionConverter.java +++ b/src/main/java/it/grid/storm/catalogs/DirOptionConverter.java @@ -1,58 +1,52 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; /** - * Package private class that translates between DPM flag for TDirOption and - * StoRM TDirOption proper. - * - * In particular DPM uses the int 1 to denote a recursive call, yet it fails to - * distinguish between a chosen recursion level; in other words there is no way - * that DPM specifies the number of levels to recurse: so either you recurse - * till the end or nothing. - * + * Package private class that translates between DPM flag for TDirOption and StoRM TDirOption + * proper. + * + *

In particular DPM uses the int 1 to denote a recursive call, yet it fails to distinguish + * between a chosen recursion level; in other words there is no way that DPM specifies the number of + * levels to recurse: so either you recurse till the end or nothing. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date August, 2005 */ class DirOptionConverter { - static private DirOptionConverter converter = null; + private static DirOptionConverter converter = null; - private DirOptionConverter() { + private DirOptionConverter() {} - } + public static DirOptionConverter getInstance() { - static public DirOptionConverter getInstance() { + if (converter == null) converter = new DirOptionConverter(); + return converter; + } - if (converter == null) - converter = new DirOptionConverter(); - return converter; - } + /** + * Method that translates the int used by DPM as flag for TDirOption, into a boolean for + * isDirOption. + * + *

1 causes true to be returned; any other value returns 0. + */ + public boolean toSTORM(int n) { - /** - * Method that translates the int used by DPM as flag for TDirOption, into a - * boolean for isDirOption. - * - * 1 causes true to be returned; any other value returns 0. - */ - public boolean toSTORM(int n) { + return (n == 1); + } - return (n == 1); - } + /** + * Method used to translate the boolean isDirOption into an int used by DPM to express the same + * thing. + * + *

true gets translated into 1; false into 0. + */ + public int toDPM(boolean isDirOption) { - /** - * Method used to translate the boolean isDirOption into an int used by DPM to - * express the same thing. - * - * true gets translated into 1; false into 0. - */ - public int toDPM(boolean isDirOption) { - - if (isDirOption) - return 1; - return 0; - } + if (isDirOption) return 1; + return 0; + } } diff --git a/src/main/java/it/grid/storm/catalogs/FileLifetimeConverter.java b/src/main/java/it/grid/storm/catalogs/FileLifetimeConverter.java index 3627d68c..c10b0990 100644 --- a/src/main/java/it/grid/storm/catalogs/FileLifetimeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/FileLifetimeConverter.java @@ -1,62 +1,53 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.config.Configuration; +import it.grid.storm.srm.types.TLifeTimeInSeconds; /** - * Class that handles DB representation of a pinLifetime as expressed by a - * TLifetimeInSeconds objects; in particular it takes care of protocol - * specification: - * - * 0/null/negative are translated as default StoRM configurable values. StoRMs - * Empty TLifeTimeInSeconds is translated as 0. - * + * Class that handles DB representation of a pinLifetime as expressed by a TLifetimeInSeconds + * objects; in particular it takes care of protocol specification: + * + *

0/null/negative are translated as default StoRM configurable values. StoRMs Empty + * TLifeTimeInSeconds is translated as 0. + * * @author EGRID ICTP * @version 1.0 * @date March 2007 */ public class FileLifetimeConverter { - private static FileLifetimeConverter stc = new FileLifetimeConverter(); // only - // instance - - private FileLifetimeConverter() { + private static FileLifetimeConverter stc = new FileLifetimeConverter(); // only + // instance - } + private FileLifetimeConverter() {} - /** - * Method that returns the only instance of SizeInBytesIntConverter - */ - public static FileLifetimeConverter getInstance() { + /** Method that returns the only instance of SizeInBytesIntConverter */ + public static FileLifetimeConverter getInstance() { - return stc; - } + return stc; + } - /** - * Method that translates the Empty TLifeTimeInSeconds into the empty - * representation of DB which is 0. Any other value is left as is. - */ - public int toDB(long l) { + /** + * Method that translates the Empty TLifeTimeInSeconds into the empty representation of DB which + * is 0. Any other value is left as is. + */ + public int toDB(long l) { - if (l == TLifeTimeInSeconds.makeEmpty().value()) - return 0; - return new Long(l).intValue(); - } + if (l == TLifeTimeInSeconds.makeEmpty().value()) return 0; + return new Long(l).intValue(); + } - /** - * Method that returns the long corresponding to the int value in the DB, - * except if it is 0, NULL or negative; a configurable default value is - * returned instead, corresponding to the getFileLifetimeDefault() - * Configuration class method. - */ - public long toStoRM(int s) { + /** + * Method that returns the long corresponding to the int value in the DB, except if it is 0, NULL + * or negative; a configurable default value is returned instead, corresponding to the + * getFileLifetimeDefault() Configuration class method. + */ + public long toStoRM(int s) { - if (s <= 0) - return Configuration.getInstance().getFileLifetimeDefault(); - return new Integer(s).longValue(); - } + if (s <= 0) return Configuration.getInstance().getFileLifetimeDefault(); + return new Integer(s).longValue(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/FileStorageTypeConverter.java b/src/main/java/it/grid/storm/catalogs/FileStorageTypeConverter.java index 0f8f8171..6d265e2e 100644 --- a/src/main/java/it/grid/storm/catalogs/FileStorageTypeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/FileStorageTypeConverter.java @@ -1,91 +1,83 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.util.Map; +import it.grid.storm.config.Configuration; +import it.grid.storm.srm.types.TFileStorageType; import java.util.HashMap; import java.util.Iterator; -import it.grid.storm.srm.types.TFileStorageType; -import it.grid.storm.config.Configuration; +import java.util.Map; /** - * Package private auxiliary class used to convert between DB raw data and StoRM - * object model representation of TFileStorageType. - * + * Package private auxiliary class used to convert between DB raw data and StoRM object model + * representation of TFileStorageType. + * * @author: EGRID ICTP * @version: 2.0 * @date: June 2005 */ class FileStorageTypeConverter { - private Map DBtoSTORM = new HashMap(); - private Map STORMtoDB = new HashMap(); - - private static FileStorageTypeConverter c = new FileStorageTypeConverter(); + private Map DBtoSTORM = new HashMap(); + private Map STORMtoDB = new HashMap(); - /** - * Private constructor that fills in the conversion tables; - * - * V - VOLATILE P - PERMANENT D - DURABLE - */ - private FileStorageTypeConverter() { + private static FileStorageTypeConverter c = new FileStorageTypeConverter(); - DBtoSTORM.put("V", TFileStorageType.VOLATILE); - DBtoSTORM.put("P", TFileStorageType.PERMANENT); - DBtoSTORM.put("D", TFileStorageType.DURABLE); - String aux; - for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext();) { - aux = i.next(); - STORMtoDB.put(DBtoSTORM.get(aux), aux); - } - } + /** + * Private constructor that fills in the conversion tables; + * + *

V - VOLATILE P - PERMANENT D - DURABLE + */ + private FileStorageTypeConverter() { - /** - * Method that returns the only instance of FileStorageTypeConverter. - */ - public static FileStorageTypeConverter getInstance() { + DBtoSTORM.put("V", TFileStorageType.VOLATILE); + DBtoSTORM.put("P", TFileStorageType.PERMANENT); + DBtoSTORM.put("D", TFileStorageType.DURABLE); + String aux; + for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext(); ) { + aux = i.next(); + STORMtoDB.put(DBtoSTORM.get(aux), aux); + } + } - return c; - } + /** Method that returns the only instance of FileStorageTypeConverter. */ + public static FileStorageTypeConverter getInstance() { - /** - * Method that returns the String used in the DB to represent the given - * TFileStorageType. The empty String "" is returned if no match is found. - */ - public String toDB(TFileStorageType fst) { + return c; + } - String aux = (String) STORMtoDB.get(fst); - if (aux == null) - return ""; - return aux; - } + /** + * Method that returns the String used in the DB to represent the given TFileStorageType. The + * empty String "" is returned if no match is found. + */ + public String toDB(TFileStorageType fst) { - /** - * Method that returns the TFileStorageType used by StoRM to represent the - * supplied String representation in the DB. A configured default - * TFileStorageType is returned in case no corresponding StoRM type is found. - * TFileStorageType.EMPTY is returned if there are configuration errors. - */ - public TFileStorageType toSTORM(String s) { + String aux = (String) STORMtoDB.get(fst); + if (aux == null) return ""; + return aux; + } - TFileStorageType aux = DBtoSTORM.get(s); - if (aux == null) - // This case is that the String s is different from V,P or D. - aux = DBtoSTORM.get(Configuration.getInstance() - .getDefaultFileStorageType()); - if (aux == null) - // This case should never happen, but in case we prefer ponder PERMANENT. - return TFileStorageType.EMPTY; - else - return aux; - } + /** + * Method that returns the TFileStorageType used by StoRM to represent the supplied String + * representation in the DB. A configured default TFileStorageType is returned in case no + * corresponding StoRM type is found. TFileStorageType.EMPTY is returned if there are + * configuration errors. + */ + public TFileStorageType toSTORM(String s) { - public String toString() { + TFileStorageType aux = DBtoSTORM.get(s); + if (aux == null) + // This case is that the String s is different from V,P or D. + aux = DBtoSTORM.get(Configuration.getInstance().getDefaultFileStorageType()); + if (aux == null) + // This case should never happen, but in case we prefer ponder PERMANENT. + return TFileStorageType.EMPTY; + else return aux; + } - return "FileStorageTypeConverter.\nDBtoSTORM map:" + DBtoSTORM - + "\nSTORMtoDB map:" + STORMtoDB; - } + public String toString() { + return "FileStorageTypeConverter.\nDBtoSTORM map:" + DBtoSTORM + "\nSTORMtoDB map:" + STORMtoDB; + } } diff --git a/src/main/java/it/grid/storm/catalogs/FileTransferData.java b/src/main/java/it/grid/storm/catalogs/FileTransferData.java index 352d6c52..f2add2b6 100644 --- a/src/main/java/it/grid/storm/catalogs/FileTransferData.java +++ b/src/main/java/it/grid/storm/catalogs/FileTransferData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -9,21 +8,18 @@ public interface FileTransferData extends SynchMultyOperationRequestData { - /** - * Method that returns a TURLPrefix containing the transfer protocols desired - * for this chunk of the srm request. - */ - public TURLPrefix getTransferProtocols(); + /** + * Method that returns a TURLPrefix containing the transfer protocols desired for this chunk of + * the srm request. + */ + public TURLPrefix getTransferProtocols(); - /** - * Method that returns the TURL for this chunk of the srm request. - */ - public TTURL getTransferURL(); - - /** - * Method used to set the transferURL associated to the SURL of this chunk. If - * TTURL is null, then nothing gets set! - */ - public void setTransferURL(final TTURL turl); + /** Method that returns the TURL for this chunk of the srm request. */ + public TTURL getTransferURL(); + /** + * Method used to set the transferURL associated to the SURL of this chunk. If TTURL is null, then + * nothing gets set! + */ + public void setTransferURL(final TTURL turl); } diff --git a/src/main/java/it/grid/storm/catalogs/IdentityPtGData.java b/src/main/java/it/grid/storm/catalogs/IdentityPtGData.java index f740039d..82c03878 100644 --- a/src/main/java/it/grid/storm/catalogs/IdentityPtGData.java +++ b/src/main/java/it/grid/storm/catalogs/IdentityPtGData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -14,47 +13,50 @@ import it.grid.storm.srm.types.TTURL; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityPtGData extends AnonymousPtGData implements - IdentityInputData { - - private final GridUserInterface auth; - - /** - * @param requestToken - * @param fromSURL - * @param lifeTime - * @param dirOption - * @param desiredProtocols - * @param fileSize - * @param status - * @param transferURL - * @throws InvalidPtGDataAttributesException - */ - public IdentityPtGData(GridUserInterface auth, TSURL SURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix desiredProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL) throws InvalidPtGDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException, IllegalArgumentException { - - super(SURL, lifeTime, dirOption, desiredProtocols, fileSize, status, - transferURL); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } - - @Override - public GridUserInterface getUser() { - - return auth; - } - - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } +public class IdentityPtGData extends AnonymousPtGData implements IdentityInputData { + + private final GridUserInterface auth; + + /** + * @param requestToken + * @param fromSURL + * @param lifeTime + * @param dirOption + * @param desiredProtocols + * @param fileSize + * @param status + * @param transferURL + * @throws InvalidPtGDataAttributesException + */ + public IdentityPtGData( + GridUserInterface auth, + TSURL SURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtGDataAttributesException, InvalidFileTransferDataAttributesException, + InvalidSurlRequestDataAttributesException, IllegalArgumentException { + + super(SURL, lifeTime, dirOption, desiredProtocols, fileSize, status, transferURL); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } + + @Override + public GridUserInterface getUser() { + + return auth; + } + + @Override + public String getPrincipal() { + + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/IdentityPtPData.java b/src/main/java/it/grid/storm/catalogs/IdentityPtPData.java index 75840ad7..5b2a22d1 100644 --- a/src/main/java/it/grid/storm/catalogs/IdentityPtPData.java +++ b/src/main/java/it/grid/storm/catalogs/IdentityPtPData.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.catalogs; import it.grid.storm.common.types.TURLPrefix; @@ -19,54 +16,64 @@ import it.grid.storm.srm.types.TTURL; import it.grid.storm.synchcall.data.IdentityInputData; -/** - * @author Michele Dibenedetto - * - */ -public class IdentityPtPData extends AnonymousPtPData implements - IdentityInputData { - - private final GridUserInterface auth; +/** @author Michele Dibenedetto */ +public class IdentityPtPData extends AnonymousPtPData implements IdentityInputData { - /** - * @param requestToken - * @param fromSURL - * @param lifeTime - * @param dirOption - * @param desiredProtocols - * @param fileSize - * @param status - * @param transferURL - * @throws InvalidPtGDataAttributesException - */ - public IdentityPtPData(GridUserInterface auth, TSURL SURL, - TLifeTimeInSeconds pinLifetime, TLifeTimeInSeconds fileLifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TSizeInBytes expectedFileSize, TURLPrefix transferProtocols, - TOverwriteMode overwriteOption, TReturnStatus status, TTURL transferURL) - throws InvalidPtPDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException, IllegalArgumentException { + private final GridUserInterface auth; - super(SURL, pinLifetime, fileLifetime, fileStorageType, spaceToken, - expectedFileSize, transferProtocols, overwriteOption, status, transferURL); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + /** + * @param requestToken + * @param fromSURL + * @param lifeTime + * @param dirOption + * @param desiredProtocols + * @param fileSize + * @param status + * @param transferURL + * @throws InvalidPtGDataAttributesException + */ + public IdentityPtPData( + GridUserInterface auth, + TSURL SURL, + TLifeTimeInSeconds pinLifetime, + TLifeTimeInSeconds fileLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes expectedFileSize, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtPDataAttributesException, InvalidFileTransferDataAttributesException, + InvalidSurlRequestDataAttributesException, IllegalArgumentException { - @Override - public GridUserInterface getUser() { + super( + SURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - return auth; - } + @Override + public GridUserInterface getUser() { - @Override - public String getPrincipal() { + return auth; + } - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidBoLChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidBoLChunkDataAttributesException.java index 6046d423..2b152323 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidBoLChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidBoLChunkDataAttributesException.java @@ -1,81 +1,81 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import it.grid.storm.common.types.TURLPrefix; +import it.grid.storm.srm.types.TDirOption; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; -import it.grid.storm.srm.types.TDirOption; -import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TSizeInBytes; -import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of BoLChunkData are invalid, that is if any of the following is - * _null_: requestToken, fromSURL, lifeTime, numOfLevels, transferProtocols, - * fileSize, status, transferURL. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * BoLChunkData are invalid, that is if any of the following is _null_: requestToken, fromSURL, + * lifeTime, numOfLevels, transferProtocols, fileSize, status, transferURL. + * * @author CNAF * @date Aug 2009 * @version 1.0 */ public class InvalidBoLChunkDataAttributesException extends Exception { - private static final long serialVersionUID = 5657310881067434280L; + private static final long serialVersionUID = 5657310881067434280L; - // booleans that indicate whether the corresponding variable is null - private boolean nullRequestToken; - private boolean nullFromSURL; - private boolean nullLifeTime; - private boolean nullDirOption; - private boolean nullTransferProtocols; - private boolean nullFileSize; - private boolean nullStatus; - private boolean nullTransferURL; + // booleans that indicate whether the corresponding variable is null + private boolean nullRequestToken; + private boolean nullFromSURL; + private boolean nullLifeTime; + private boolean nullDirOption; + private boolean nullTransferProtocols; + private boolean nullFileSize; + private boolean nullStatus; + private boolean nullTransferURL; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidBoLChunkDataAttributesException(TRequestToken requestToken, - TSURL fromSURL, TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidBoLChunkDataAttributesException( + TRequestToken requestToken, + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) { - nullRequestToken = requestToken == null; - nullFromSURL = fromSURL == null; - nullLifeTime = lifeTime == null; - nullDirOption = dirOption == null; - nullTransferProtocols = transferProtocols == null; - nullFileSize = fileSize == null; - nullStatus = status == null; - nullTransferURL = transferURL == null; - } + nullRequestToken = requestToken == null; + nullFromSURL = fromSURL == null; + nullLifeTime = lifeTime == null; + nullDirOption = dirOption == null; + nullTransferProtocols = transferProtocols == null; + nullFileSize = fileSize == null; + nullStatus = status == null; + nullTransferURL = transferURL == null; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Invalid BoLChunkData attributes: null-requestToken="); - sb.append(nullRequestToken); - sb.append("; nul-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-lifeTime="); - sb.append(nullLifeTime); - sb.append("; null-dirOption="); - sb.append(nullDirOption); - sb.append("; null-transferProtocols="); - sb.append(nullTransferProtocols); - sb.append("; null-fileSize="); - sb.append(nullFileSize); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("; null-transferURL="); - sb.append(nullTransferURL); - sb.append("."); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("Invalid BoLChunkData attributes: null-requestToken="); + sb.append(nullRequestToken); + sb.append("; nul-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-lifeTime="); + sb.append(nullLifeTime); + sb.append("; null-dirOption="); + sb.append(nullDirOption); + sb.append("; null-transferProtocols="); + sb.append(nullTransferProtocols); + sb.append("; null-fileSize="); + sb.append(nullFileSize); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("; null-transferURL="); + sb.append(nullTransferURL); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidBoLDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidBoLDataAttributesException.java index 408d6e23..0f6d42f3 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidBoLDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidBoLDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -12,87 +11,102 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TTURL; -/** - * @author Michele Dibenedetto - * - */ -public class InvalidBoLDataAttributesException extends - InvalidFileTransferDataAttributesException { +/** @author Michele Dibenedetto */ +public class InvalidBoLDataAttributesException extends InvalidFileTransferDataAttributesException { - private static final long serialVersionUID = 8113403994527678088L; - // booleans that indicate whether the corresponding variable is null - protected boolean nullLifeTime; - protected boolean nullDirOption; - protected boolean nullFileSize; + private static final long serialVersionUID = 8113403994527678088L; + // booleans that indicate whether the corresponding variable is null + protected boolean nullLifeTime; + protected boolean nullDirOption; + protected boolean nullFileSize; - public InvalidBoLDataAttributesException(TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL) { + public InvalidBoLDataAttributesException( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) { - super(fromSURL, transferProtocols, status, transferURL); - init(lifeTime, dirOption, fileSize); - } + super(fromSURL, transferProtocols, status, transferURL); + init(lifeTime, dirOption, fileSize); + } - public InvalidBoLDataAttributesException(TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL, String message) { + public InvalidBoLDataAttributesException( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL, + String message) { - super(fromSURL, transferProtocols, status, transferURL, message); - init(lifeTime, dirOption, fileSize); - } + super(fromSURL, transferProtocols, status, transferURL, message); + init(lifeTime, dirOption, fileSize); + } - public InvalidBoLDataAttributesException(TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL, Throwable cause) { + public InvalidBoLDataAttributesException( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL, + Throwable cause) { - super(fromSURL, transferProtocols, status, transferURL, cause); - init(lifeTime, dirOption, fileSize); - } + super(fromSURL, transferProtocols, status, transferURL, cause); + init(lifeTime, dirOption, fileSize); + } - public InvalidBoLDataAttributesException(TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL, String message, Throwable cause) { + public InvalidBoLDataAttributesException( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL, + String message, + Throwable cause) { - super(fromSURL, transferProtocols, status, transferURL, message, cause); - init(lifeTime, dirOption, fileSize); - } + super(fromSURL, transferProtocols, status, transferURL, message, cause); + init(lifeTime, dirOption, fileSize); + } - private void init(TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TSizeInBytes fileSize) { + private void init(TLifeTimeInSeconds lifeTime, TDirOption dirOption, TSizeInBytes fileSize) { - nullLifeTime = lifeTime == null; - nullDirOption = dirOption == null; - nullFileSize = fileSize == null; - } + nullLifeTime = lifeTime == null; + nullDirOption = dirOption == null; + nullFileSize = fileSize == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("InvalidBoLDataAttributesException [nullLifeTime="); - builder.append(nullLifeTime); - builder.append(", nullDirOption="); - builder.append(nullDirOption); - builder.append(", nullFileSize="); - builder.append(nullFileSize); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidBoLDataAttributesException [nullLifeTime="); + builder.append(nullLifeTime); + builder.append(", nullDirOption="); + builder.append(nullDirOption); + builder.append(", nullFileSize="); + builder.append(nullFileSize); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidBoLPersistentChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidBoLPersistentChunkDataAttributesException.java index a552df2f..4107be9d 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidBoLPersistentChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidBoLPersistentChunkDataAttributesException.java @@ -1,80 +1,75 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TDirOption; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; -import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TSizeInBytes; -import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of PtPChunkData are invalid, that is if any of the following is - * _null_: requestToken, toSURL, lifetime, fileStorageType, spaceToken, - * knownSizeOfThisFile, TURLPrefix transferProtocols, overwriteOption, fileSize, - * status, transferURL. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * PtPChunkData are invalid, that is if any of the following is _null_: requestToken, toSURL, + * lifetime, fileStorageType, spaceToken, knownSizeOfThisFile, TURLPrefix transferProtocols, + * overwriteOption, fileSize, status, transferURL. + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 2.0 */ -public class InvalidBoLPersistentChunkDataAttributesException extends - InvalidBoLDataAttributesException { - - private static final long serialVersionUID = -5117535717125685975L; - /** - * booleans that indicate whether the corresponding variable is null - */ - boolean nullRequestToken; +public class InvalidBoLPersistentChunkDataAttributesException + extends InvalidBoLDataAttributesException { - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidBoLPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL fromSURL, TLifeTimeInSeconds lifeTime, - TDirOption dirOption, TURLPrefix desiredProtocols, TSizeInBytes fileSize, - TReturnStatus status, TTURL transferURL) { + private static final long serialVersionUID = -5117535717125685975L; + /** booleans that indicate whether the corresponding variable is null */ + boolean nullRequestToken; - super(fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, - transferURL); - nullRequestToken = requestToken == null; - } + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidBoLPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) { - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + super(fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, transferURL); + nullRequestToken = requestToken == null; + } - StringBuilder builder = new StringBuilder(); - builder - .append("InvalidBoLPersistentChunkDataAttributesException [nullRequestToken="); - builder.append(nullRequestToken); - builder.append(", nullLifeTime="); - builder.append(nullLifeTime); - builder.append(", nullDirOption="); - builder.append(nullDirOption); - builder.append(", nullFileSize="); - builder.append(nullFileSize); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("InvalidBoLPersistentChunkDataAttributesException [nullRequestToken="); + builder.append(nullRequestToken); + builder.append(", nullLifeTime="); + builder.append(nullLifeTime); + builder.append(", nullDirOption="); + builder.append(nullDirOption); + builder.append(", nullFileSize="); + builder.append(nullFileSize); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidCopyChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidCopyChunkDataAttributesException.java index 86d657c7..be90cce8 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidCopyChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidCopyChunkDataAttributesException.java @@ -1,80 +1,80 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TRequestToken; -import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TFileStorageType; -import it.grid.storm.srm.types.TSpaceToken; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TOverwriteMode; +import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; +import it.grid.storm.srm.types.TSpaceToken; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of CopyChunkData are invalid, that is if any of the following is - * _null_: requestToken, fromsURL, toSURL, lifetime, fileStorageType, - * spaceToken, overwriteOption, status. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * CopyChunkData are invalid, that is if any of the following is _null_: requestToken, fromsURL, + * toSURL, lifetime, fileStorageType, spaceToken, overwriteOption, status. + * * @author EGRID - ICTP Trieste * @date September, 2005 * @version 2.0 */ public class InvalidCopyChunkDataAttributesException extends Exception { - private static final long serialVersionUID = 6786154038995023512L; + private static final long serialVersionUID = 6786154038995023512L; - // booleans that indicate whether the corresponding variable is null - private boolean nullRequestToken; - private boolean nullFromSURL; - private boolean nullToSURL; - private boolean nullLifetime; - private boolean nullFileStorageType; - private boolean nullSpaceToken; - private boolean nullOverwriteOption; - private boolean nullStatus; + // booleans that indicate whether the corresponding variable is null + private boolean nullRequestToken; + private boolean nullFromSURL; + private boolean nullToSURL; + private boolean nullLifetime; + private boolean nullFileStorageType; + private boolean nullSpaceToken; + private boolean nullOverwriteOption; + private boolean nullStatus; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidCopyChunkDataAttributesException(TRequestToken requestToken, - TSURL fromSURL, TSURL toSURL, TLifeTimeInSeconds lifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TOverwriteMode overwriteOption, TReturnStatus status) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidCopyChunkDataAttributesException( + TRequestToken requestToken, + TSURL fromSURL, + TSURL toSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status) { - nullRequestToken = requestToken == null; - nullFromSURL = fromSURL == null; - nullToSURL = toSURL == null; - nullLifetime = lifetime == null; - nullFileStorageType = fileStorageType == null; - nullSpaceToken = spaceToken == null; - nullOverwriteOption = overwriteOption == null; - nullStatus = status == null; - } + nullRequestToken = requestToken == null; + nullFromSURL = fromSURL == null; + nullToSURL = toSURL == null; + nullLifetime = lifetime == null; + nullFileStorageType = fileStorageType == null; + nullSpaceToken = spaceToken == null; + nullOverwriteOption = overwriteOption == null; + nullStatus = status == null; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Invalid CopyChunkData attributes: null-requestToken="); - sb.append(nullRequestToken); - sb.append("; null-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-toSURL="); - sb.append(nullToSURL); - sb.append("; null-lifetime="); - sb.append(nullLifetime); - sb.append("; null-filestorageType="); - sb.append(nullFileStorageType); - sb.append("; null-spaceToken="); - sb.append(nullSpaceToken); - sb.append("; null-overwriteOption="); - sb.append(nullOverwriteOption); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("."); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("Invalid CopyChunkData attributes: null-requestToken="); + sb.append(nullRequestToken); + sb.append("; null-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-toSURL="); + sb.append(nullToSURL); + sb.append("; null-lifetime="); + sb.append(nullLifetime); + sb.append("; null-filestorageType="); + sb.append(nullFileStorageType); + sb.append("; null-spaceToken="); + sb.append(nullSpaceToken); + sb.append("; null-overwriteOption="); + sb.append(nullOverwriteOption); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidCopyDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidCopyDataAttributesException.java index c31ed841..d70c0ce8 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidCopyDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidCopyDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -11,94 +10,110 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSpaceToken; -/** - * @author Michele Dibenedetto - * - */ -public class InvalidCopyDataAttributesException extends - InvalidSurlRequestDataAttributesException { +/** @author Michele Dibenedetto */ +public class InvalidCopyDataAttributesException extends InvalidSurlRequestDataAttributesException { - private static final long serialVersionUID = -1217486426437414490L; - protected boolean nullDestinationSURL; - protected boolean nullLifetime; - protected boolean nullFileStorageType; - protected boolean nullSpaceToken; - protected boolean nullOverwriteOption; + private static final long serialVersionUID = -1217486426437414490L; + protected boolean nullDestinationSURL; + protected boolean nullLifetime; + protected boolean nullFileStorageType; + protected boolean nullSpaceToken; + protected boolean nullOverwriteOption; - public InvalidCopyDataAttributesException(TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, TReturnStatus status) { + public InvalidCopyDataAttributesException( + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status) { - super(SURL, status); - init(destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption); - } + super(SURL, status); + init(destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption); + } - public InvalidCopyDataAttributesException(TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, String message) { + public InvalidCopyDataAttributesException( + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + String message) { - super(SURL, status, message); - init(destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption); - } + super(SURL, status, message); + init(destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption); + } - public InvalidCopyDataAttributesException(TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, Throwable cause) { + public InvalidCopyDataAttributesException( + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + Throwable cause) { - super(SURL, status, cause); - init(destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption); - } + super(SURL, status, cause); + init(destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption); + } - public InvalidCopyDataAttributesException(TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, String message, Throwable cause) { + public InvalidCopyDataAttributesException( + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + String message, + Throwable cause) { - super(SURL, status, message, cause); - init(destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption); - } + super(SURL, status, message, cause); + init(destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption); + } - private void init(TSURL destinationSURL, TLifeTimeInSeconds lifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TOverwriteMode overwriteOption) { + private void init( + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption) { - nullDestinationSURL = destinationSURL == null; - nullLifetime = lifetime == null; - nullFileStorageType = fileStorageType == null; - nullSpaceToken = spaceToken == null; - nullOverwriteOption = overwriteOption == null; - } + nullDestinationSURL = destinationSURL == null; + nullLifetime = lifetime == null; + nullFileStorageType = fileStorageType == null; + nullSpaceToken = spaceToken == null; + nullOverwriteOption = overwriteOption == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("InvalidCopyDataAttributesException [nullDestinationSURL="); - builder.append(nullDestinationSURL); - builder.append(", nullLifetime="); - builder.append(nullLifetime); - builder.append(", nullFileStorageType="); - builder.append(nullFileStorageType); - builder.append(", nullSpaceToken="); - builder.append(nullSpaceToken); - builder.append(", nullOverwriteOption="); - builder.append(nullOverwriteOption); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidCopyDataAttributesException [nullDestinationSURL="); + builder.append(nullDestinationSURL); + builder.append(", nullLifetime="); + builder.append(nullLifetime); + builder.append(", nullFileStorageType="); + builder.append(nullFileStorageType); + builder.append(", nullSpaceToken="); + builder.append(nullSpaceToken); + builder.append(", nullOverwriteOption="); + builder.append(nullOverwriteOption); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidCopyPersistentChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidCopyPersistentChunkDataAttributesException.java index 4259b4db..f5c09348 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidCopyPersistentChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidCopyPersistentChunkDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -12,94 +11,131 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSpaceToken; -/** - * @author Michele Dibenedetto - * - */ -public class InvalidCopyPersistentChunkDataAttributesException extends - InvalidCopyDataAttributesException { - - /** - * - */ - private static final long serialVersionUID = 1266996505954208061L; - private boolean nullRequestToken; - - public InvalidCopyPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, TReturnStatus status) { - - super(SURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status); - init(requestToken); - } - - public InvalidCopyPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, String message) { - - super(SURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status, message); - init(requestToken); - } - - public InvalidCopyPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, Throwable cause) { - - super(SURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status, cause); - init(requestToken); - } - - public InvalidCopyPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL SURL, TSURL destinationSURL, - TLifeTimeInSeconds lifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TOverwriteMode overwriteOption, - TReturnStatus status, String message, Throwable cause) { - - super(SURL, destinationSURL, lifetime, fileStorageType, spaceToken, - overwriteOption, status, message, cause); - init(requestToken); - } - - private void init(TRequestToken requestToken) { - - nullRequestToken = requestToken == null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder - .append("InvalidCopyPersistentChunkDataAttributesException [nullRequestToken="); - builder.append(nullRequestToken); - builder.append(", nullDestinationSURL="); - builder.append(nullDestinationSURL); - builder.append(", nullLifetime="); - builder.append(nullLifetime); - builder.append(", nullFileStorageType="); - builder.append(nullFileStorageType); - builder.append(", nullSpaceToken="); - builder.append(nullSpaceToken); - builder.append(", nullOverwriteOption="); - builder.append(nullOverwriteOption); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append("]"); - return builder.toString(); - } +/** @author Michele Dibenedetto */ +public class InvalidCopyPersistentChunkDataAttributesException + extends InvalidCopyDataAttributesException { + + /** */ + private static final long serialVersionUID = 1266996505954208061L; + + private boolean nullRequestToken; + + public InvalidCopyPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status) { + + super(SURL, destinationSURL, lifetime, fileStorageType, spaceToken, overwriteOption, status); + init(requestToken); + } + + public InvalidCopyPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + String message) { + + super( + SURL, + destinationSURL, + lifetime, + fileStorageType, + spaceToken, + overwriteOption, + status, + message); + init(requestToken); + } + + public InvalidCopyPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + Throwable cause) { + + super( + SURL, + destinationSURL, + lifetime, + fileStorageType, + spaceToken, + overwriteOption, + status, + cause); + init(requestToken); + } + + public InvalidCopyPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL SURL, + TSURL destinationSURL, + TLifeTimeInSeconds lifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TOverwriteMode overwriteOption, + TReturnStatus status, + String message, + Throwable cause) { + + super( + SURL, + destinationSURL, + lifetime, + fileStorageType, + spaceToken, + overwriteOption, + status, + message, + cause); + init(requestToken); + } + + private void init(TRequestToken requestToken) { + + nullRequestToken = requestToken == null; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("InvalidCopyPersistentChunkDataAttributesException [nullRequestToken="); + builder.append(nullRequestToken); + builder.append(", nullDestinationSURL="); + builder.append(nullDestinationSURL); + builder.append(", nullLifetime="); + builder.append(nullLifetime); + builder.append(", nullFileStorageType="); + builder.append(nullFileStorageType); + builder.append(", nullSpaceToken="); + builder.append(nullSpaceToken); + builder.append(", nullOverwriteOption="); + builder.append(nullOverwriteOption); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidFileTransferDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidFileTransferDataAttributesException.java index 0acc4dfb..408635e6 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidFileTransferDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidFileTransferDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -9,72 +8,79 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TTURL; -/** - * @author Michele Dibenedetto - */ -public class InvalidFileTransferDataAttributesException extends - InvalidSurlRequestDataAttributesException { +/** @author Michele Dibenedetto */ +public class InvalidFileTransferDataAttributesException + extends InvalidSurlRequestDataAttributesException { - private static final long serialVersionUID = 4416318501544415810L; - protected boolean nullTransferProtocols; - protected boolean nullTransferURL; + private static final long serialVersionUID = 4416318501544415810L; + protected boolean nullTransferProtocols; + protected boolean nullTransferURL; - public InvalidFileTransferDataAttributesException(TSURL SURL, - TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL) { + public InvalidFileTransferDataAttributesException( + TSURL SURL, TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL) { - super(SURL, status); - init(transferProtocols, transferURL); - } + super(SURL, status); + init(transferProtocols, transferURL); + } - public InvalidFileTransferDataAttributesException(TSURL SURL, - TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL, - String message) { + public InvalidFileTransferDataAttributesException( + TSURL SURL, + TURLPrefix transferProtocols, + TReturnStatus status, + TTURL transferURL, + String message) { - super(SURL, status, message); - init(transferProtocols, transferURL); - } + super(SURL, status, message); + init(transferProtocols, transferURL); + } - public InvalidFileTransferDataAttributesException(TSURL SURL, - TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL, - Throwable cause) { + public InvalidFileTransferDataAttributesException( + TSURL SURL, + TURLPrefix transferProtocols, + TReturnStatus status, + TTURL transferURL, + Throwable cause) { - super(SURL, status, cause); - init(transferProtocols, transferURL); - } + super(SURL, status, cause); + init(transferProtocols, transferURL); + } - public InvalidFileTransferDataAttributesException(TSURL SURL, - TURLPrefix transferProtocols, TReturnStatus status, TTURL transferURL, - String message, Throwable cause) { + public InvalidFileTransferDataAttributesException( + TSURL SURL, + TURLPrefix transferProtocols, + TReturnStatus status, + TTURL transferURL, + String message, + Throwable cause) { - super(SURL, status, message, cause); - init(transferProtocols, transferURL); - } + super(SURL, status, message, cause); + init(transferProtocols, transferURL); + } - private void init(TURLPrefix transferProtocols, TTURL transferURL) { + private void init(TURLPrefix transferProtocols, TTURL transferURL) { - nullTransferProtocols = transferProtocols == null; - nullTransferURL = transferURL == null; - } + nullTransferProtocols = transferProtocols == null; + nullTransferURL = transferURL == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder - .append("InvalidFileTransferDataAttributesException [nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidFileTransferDataAttributesException [nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidPtGDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidPtGDataAttributesException.java index 232f2e61..e4b01b6b 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidPtGDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidPtGDataAttributesException.java @@ -1,75 +1,73 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; -import it.grid.storm.srm.types.TDirOption; import it.grid.storm.common.types.TURLPrefix; -import it.grid.storm.srm.types.TSizeInBytes; +import it.grid.storm.srm.types.TDirOption; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; +import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of PtGChunkData are invalid, that is if any of the following is - * _null_: requestToken, fromSURL, lifeTime, numOfLevels, transferProtocols, - * fileSize, status, transferURL. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * PtGChunkData are invalid, that is if any of the following is _null_: requestToken, fromSURL, + * lifeTime, numOfLevels, transferProtocols, fileSize, status, transferURL. + * * @author EGRID - ICTP Trieste * @date March 23rd, 2005 * @version 3.0 */ -public class InvalidPtGDataAttributesException extends - InvalidFileTransferDataAttributesException { +public class InvalidPtGDataAttributesException extends InvalidFileTransferDataAttributesException { - private static final long serialVersionUID = -3484929474636108262L; - // booleans that indicate whether the corresponding variable is null - protected boolean nullLifeTime; - protected boolean nullDirOption; - protected boolean nullFileSize; + private static final long serialVersionUID = -3484929474636108262L; + // booleans that indicate whether the corresponding variable is null + protected boolean nullLifeTime; + protected boolean nullDirOption; + protected boolean nullFileSize; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidPtGDataAttributesException(TSURL fromSURL, - TLifeTimeInSeconds lifeTime, TDirOption dirOption, - TURLPrefix transferProtocols, TSizeInBytes fileSize, TReturnStatus status, - TTURL transferURL) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidPtGDataAttributesException( + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) { - super(fromSURL, transferProtocols, status, transferURL); - nullLifeTime = lifeTime == null; - nullDirOption = dirOption == null; - nullFileSize = fileSize == null; - } + super(fromSURL, transferProtocols, status, transferURL); + nullLifeTime = lifeTime == null; + nullDirOption = dirOption == null; + nullFileSize = fileSize == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("InvalidPtGChunkDataAttributesException [nullLifeTime="); - builder.append(nullLifeTime); - builder.append(", nullDirOption="); - builder.append(nullDirOption); - builder.append(", nullFileSize="); - builder.append(nullFileSize); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidPtGChunkDataAttributesException [nullLifeTime="); + builder.append(nullLifeTime); + builder.append(", nullDirOption="); + builder.append(nullDirOption); + builder.append(", nullFileSize="); + builder.append(nullFileSize); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidPtGPersistentChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidPtGPersistentChunkDataAttributesException.java index 4b37da94..c4b701e1 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidPtGPersistentChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidPtGPersistentChunkDataAttributesException.java @@ -1,79 +1,75 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TDirOption; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; -import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TSizeInBytes; -import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of PtPChunkData are invalid, that is if any of the following is - * _null_: requestToken, toSURL, lifetime, fileStorageType, spaceToken, - * knownSizeOfThisFile, TURLPrefix transferProtocols, overwriteOption, fileSize, - * status, transferURL. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * PtPChunkData are invalid, that is if any of the following is _null_: requestToken, toSURL, + * lifetime, fileStorageType, spaceToken, knownSizeOfThisFile, TURLPrefix transferProtocols, + * overwriteOption, fileSize, status, transferURL. + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 2.0 */ -public class InvalidPtGPersistentChunkDataAttributesException extends - InvalidPtGDataAttributesException { +public class InvalidPtGPersistentChunkDataAttributesException + extends InvalidPtGDataAttributesException { - private static final long serialVersionUID = -5117535717125685975L; - /** - * booleans that indicate whether the corresponding variable is null - */ - boolean nullRequestToken; + private static final long serialVersionUID = -5117535717125685975L; + /** booleans that indicate whether the corresponding variable is null */ + boolean nullRequestToken; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidPtGPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL fromSURL, TLifeTimeInSeconds lifeTime, - TDirOption dirOption, TURLPrefix transferProtocols, TSizeInBytes fileSize, - TReturnStatus status, TTURL transferURL) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidPtGPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) { - super(fromSURL, lifeTime, dirOption, transferProtocols, fileSize, status, - transferURL); - nullRequestToken = requestToken == null; - } + super(fromSURL, lifeTime, dirOption, transferProtocols, fileSize, status, transferURL); + nullRequestToken = requestToken == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder - .append("InvalidPtGPersistentChunkDataAttributesException [nullRequestToken="); - builder.append(nullRequestToken); - builder.append(", nullLifeTime="); - builder.append(nullLifeTime); - builder.append(", nullDirOption="); - builder.append(nullDirOption); - builder.append(", nullFileSize="); - builder.append(nullFileSize); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidPtGPersistentChunkDataAttributesException [nullRequestToken="); + builder.append(nullRequestToken); + builder.append(", nullLifeTime="); + builder.append(nullLifeTime); + builder.append(", nullDirOption="); + builder.append(nullDirOption); + builder.append(", nullFileSize="); + builder.append(nullFileSize); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidPtPDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidPtPDataAttributesException.java index 0edbc8ce..59e16eaf 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidPtPDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidPtPDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -14,112 +13,157 @@ import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TTURL; -/** - * @author Michele Dibenedetto - */ -public class InvalidPtPDataAttributesException extends - InvalidFileTransferDataAttributesException { - - /** - * - */ - private static final long serialVersionUID = 1051060981188652979L; - protected boolean nullSpaceToken; - protected boolean nullPinLifetime; - protected boolean nullFileLifetime; - protected boolean nullFileStorageType; - protected boolean nullKnownSizeOfThisFile; - protected boolean nullOverwriteOption; - - public InvalidPtPDataAttributesException(TSURL toSURL, - TLifeTimeInSeconds fileLifetime, TLifeTimeInSeconds pinLifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TSizeInBytes knownSizeOfThisFile, TURLPrefix transferProtocols, - TOverwriteMode overwriteOption, TReturnStatus status, TTURL transferURL) { - - super(toSURL, transferProtocols, status, transferURL); - init(spaceToken, fileLifetime, pinLifetime, fileStorageType, - knownSizeOfThisFile, overwriteOption); - } - - public InvalidPtPDataAttributesException(TSURL toSURL, - TLifeTimeInSeconds fileLifetime, TLifeTimeInSeconds pinLifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TSizeInBytes knownSizeOfThisFile, TURLPrefix transferProtocols, - TOverwriteMode overwriteOption, TReturnStatus status, TTURL transferURL, - String message) { - - super(toSURL, transferProtocols, status, transferURL, message); - init(spaceToken, fileLifetime, pinLifetime, fileStorageType, - knownSizeOfThisFile, overwriteOption); - } - - public InvalidPtPDataAttributesException(TSURL toSURL, - TLifeTimeInSeconds fileLifetime, TLifeTimeInSeconds pinLifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TSizeInBytes knownSizeOfThisFile, TURLPrefix transferProtocols, - TOverwriteMode overwriteOption, TReturnStatus status, TTURL transferURL, - Throwable cause) { - - super(toSURL, transferProtocols, status, transferURL, cause); - init(spaceToken, fileLifetime, pinLifetime, fileStorageType, - knownSizeOfThisFile, overwriteOption); - } - - public InvalidPtPDataAttributesException(TSURL toSURL, - TLifeTimeInSeconds fileLifetime, TLifeTimeInSeconds pinLifetime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TSizeInBytes knownSizeOfThisFile, TURLPrefix transferProtocols, - TOverwriteMode overwriteOption, TReturnStatus status, TTURL transferURL, - String message, Throwable cause) { - - super(toSURL, transferProtocols, status, transferURL, message, cause); - init(spaceToken, fileLifetime, pinLifetime, fileStorageType, - knownSizeOfThisFile, overwriteOption); - } - - private void init(TSpaceToken spaceToken, TLifeTimeInSeconds fileLifetime, - TLifeTimeInSeconds pinLifetime, TFileStorageType fileStorageType, - TSizeInBytes knownSizeOfThisFile, TOverwriteMode overwriteOption) { - - nullSpaceToken = spaceToken == null; - nullPinLifetime = pinLifetime == null; - nullFileLifetime = fileLifetime == null; - nullFileStorageType = fileStorageType == null; - nullKnownSizeOfThisFile = knownSizeOfThisFile == null; - nullOverwriteOption = overwriteOption == null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("InvalidPtPDataAttributesException [nullSpaceToken="); - builder.append(nullSpaceToken); - builder.append(", nullPinLifetime="); - builder.append(nullPinLifetime); - builder.append(", nullFileLifetime="); - builder.append(nullFileLifetime); - builder.append(", nullFileStorageType="); - builder.append(nullFileStorageType); - builder.append(", nullKnownSizeOfThisFile="); - builder.append(nullKnownSizeOfThisFile); - builder.append(", nullOverwriteOption="); - builder.append(nullOverwriteOption); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } +/** @author Michele Dibenedetto */ +public class InvalidPtPDataAttributesException extends InvalidFileTransferDataAttributesException { + + /** */ + private static final long serialVersionUID = 1051060981188652979L; + + protected boolean nullSpaceToken; + protected boolean nullPinLifetime; + protected boolean nullFileLifetime; + protected boolean nullFileStorageType; + protected boolean nullKnownSizeOfThisFile; + protected boolean nullOverwriteOption; + + public InvalidPtPDataAttributesException( + TSURL toSURL, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes knownSizeOfThisFile, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL) { + + super(toSURL, transferProtocols, status, transferURL); + init( + spaceToken, + fileLifetime, + pinLifetime, + fileStorageType, + knownSizeOfThisFile, + overwriteOption); + } + + public InvalidPtPDataAttributesException( + TSURL toSURL, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes knownSizeOfThisFile, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL, + String message) { + + super(toSURL, transferProtocols, status, transferURL, message); + init( + spaceToken, + fileLifetime, + pinLifetime, + fileStorageType, + knownSizeOfThisFile, + overwriteOption); + } + + public InvalidPtPDataAttributesException( + TSURL toSURL, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes knownSizeOfThisFile, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL, + Throwable cause) { + + super(toSURL, transferProtocols, status, transferURL, cause); + init( + spaceToken, + fileLifetime, + pinLifetime, + fileStorageType, + knownSizeOfThisFile, + overwriteOption); + } + + public InvalidPtPDataAttributesException( + TSURL toSURL, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes knownSizeOfThisFile, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL, + String message, + Throwable cause) { + + super(toSURL, transferProtocols, status, transferURL, message, cause); + init( + spaceToken, + fileLifetime, + pinLifetime, + fileStorageType, + knownSizeOfThisFile, + overwriteOption); + } + + private void init( + TSpaceToken spaceToken, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSizeInBytes knownSizeOfThisFile, + TOverwriteMode overwriteOption) { + + nullSpaceToken = spaceToken == null; + nullPinLifetime = pinLifetime == null; + nullFileLifetime = fileLifetime == null; + nullFileStorageType = fileStorageType == null; + nullKnownSizeOfThisFile = knownSizeOfThisFile == null; + nullOverwriteOption = overwriteOption == null; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("InvalidPtPDataAttributesException [nullSpaceToken="); + builder.append(nullSpaceToken); + builder.append(", nullPinLifetime="); + builder.append(nullPinLifetime); + builder.append(", nullFileLifetime="); + builder.append(nullFileLifetime); + builder.append(", nullFileStorageType="); + builder.append(nullFileStorageType); + builder.append(", nullKnownSizeOfThisFile="); + builder.append(nullKnownSizeOfThisFile); + builder.append(", nullOverwriteOption="); + builder.append(nullOverwriteOption); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidPtPPersistentChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidPtPPersistentChunkDataAttributesException.java index 2d37dda1..a7f8f729 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidPtPPersistentChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidPtPPersistentChunkDataAttributesException.java @@ -1,90 +1,96 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TRequestToken; -import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TLifeTimeInSeconds; +import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TFileStorageType; -import it.grid.storm.srm.types.TSpaceToken; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TOverwriteMode; -import it.grid.storm.common.types.TURLPrefix; -import it.grid.storm.srm.types.TSizeInBytes; +import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; +import it.grid.storm.srm.types.TSizeInBytes; +import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of PtPChunkData are invalid, that is if any of the following is - * _null_: requestToken, toSURL, lifetime, fileStorageType, spaceToken, - * knownSizeOfThisFile, TURLPrefix transferProtocols, overwriteOption, fileSize, - * status, transferURL. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * PtPChunkData are invalid, that is if any of the following is _null_: requestToken, toSURL, + * lifetime, fileStorageType, spaceToken, knownSizeOfThisFile, TURLPrefix transferProtocols, + * overwriteOption, fileSize, status, transferURL. + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 2.0 */ -public class InvalidPtPPersistentChunkDataAttributesException extends - InvalidPtPDataAttributesException { +public class InvalidPtPPersistentChunkDataAttributesException + extends InvalidPtPDataAttributesException { - private static final long serialVersionUID = -5117535717125685975L; - /** - * booleans that indicate whether the corresponding variable is null - */ - boolean nullRequestToken; + private static final long serialVersionUID = -5117535717125685975L; + /** booleans that indicate whether the corresponding variable is null */ + boolean nullRequestToken; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidPtPPersistentChunkDataAttributesException( - TRequestToken requestToken, TSURL toSURL, TLifeTimeInSeconds fileLifetime, - TLifeTimeInSeconds pinLifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TSizeInBytes knownSizeOfThisFile, - TURLPrefix transferProtocols, TOverwriteMode overwriteOption, - TReturnStatus status, TTURL transferURL) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidPtPPersistentChunkDataAttributesException( + TRequestToken requestToken, + TSURL toSURL, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes knownSizeOfThisFile, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL) { - super(toSURL, fileLifetime, pinLifetime, fileStorageType, spaceToken, - knownSizeOfThisFile, transferProtocols, overwriteOption, status, - transferURL); - nullRequestToken = requestToken == null; - } + super( + toSURL, + fileLifetime, + pinLifetime, + fileStorageType, + spaceToken, + knownSizeOfThisFile, + transferProtocols, + overwriteOption, + status, + transferURL); + nullRequestToken = requestToken == null; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { - StringBuilder builder = new StringBuilder(); - builder - .append("InvalidPtPPersistentChunkDataAttributesException [nullRequestToken="); - builder.append(nullRequestToken); - builder.append(", nullSpaceToken="); - builder.append(nullSpaceToken); - builder.append(", nullPinLifetime="); - builder.append(nullPinLifetime); - builder.append(", nullFileLifetime="); - builder.append(nullFileLifetime); - builder.append(", nullFileStorageType="); - builder.append(nullFileStorageType); - builder.append(", nullKnownSizeOfThisFile="); - builder.append(nullKnownSizeOfThisFile); - builder.append(", nullOverwriteOption="); - builder.append(nullOverwriteOption); - builder.append(", nullSURL="); - builder.append(nullSURL); - builder.append(", nullTransferProtocols="); - builder.append(nullTransferProtocols); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append(", nullTransferURL="); - builder.append(nullTransferURL); - builder.append("]"); - return builder.toString(); - } + StringBuilder builder = new StringBuilder(); + builder.append("InvalidPtPPersistentChunkDataAttributesException [nullRequestToken="); + builder.append(nullRequestToken); + builder.append(", nullSpaceToken="); + builder.append(nullSpaceToken); + builder.append(", nullPinLifetime="); + builder.append(nullPinLifetime); + builder.append(", nullFileLifetime="); + builder.append(nullFileLifetime); + builder.append(", nullFileStorageType="); + builder.append(nullFileStorageType); + builder.append(", nullKnownSizeOfThisFile="); + builder.append(nullKnownSizeOfThisFile); + builder.append(", nullOverwriteOption="); + builder.append(nullOverwriteOption); + builder.append(", nullSURL="); + builder.append(nullSURL); + builder.append(", nullTransferProtocols="); + builder.append(nullTransferProtocols); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append(", nullTransferURL="); + builder.append(nullTransferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidReducedBoLChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidReducedBoLChunkDataAttributesException.java index dc3725ab..3dc2ad03 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidReducedBoLChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidReducedBoLChunkDataAttributesException.java @@ -1,48 +1,43 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of ReducedBoLChunkData are invalid, that is if any is _null_. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * ReducedBoLChunkData are invalid, that is if any is _null_. + * * @author EGRID - ICTP Trieste * @date November, 2006 * @version 1.0 */ public class InvalidReducedBoLChunkDataAttributesException extends Exception { - private static final long serialVersionUID = -8145580437017768234L; - - // booleans that indicate whether the corresponding variable is null - private boolean nullFromSURL; - private boolean nullStatus; - - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidReducedBoLChunkDataAttributesException(TSURL fromSURL, - TReturnStatus status) { - - nullFromSURL = fromSURL == null; - nullStatus = status == null; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Invalid BoLChunkData attributes: null-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("."); - return sb.toString(); - } + private static final long serialVersionUID = -8145580437017768234L; + + // booleans that indicate whether the corresponding variable is null + private boolean nullFromSURL; + private boolean nullStatus; + + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidReducedBoLChunkDataAttributesException(TSURL fromSURL, TReturnStatus status) { + + nullFromSURL = fromSURL == null; + nullStatus = status == null; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Invalid BoLChunkData attributes: null-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidReducedCopyChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidReducedCopyChunkDataAttributesException.java index 65235db5..b8948851 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidReducedCopyChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidReducedCopyChunkDataAttributesException.java @@ -1,51 +1,47 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; /** - * This class represents an exception thrown when the attributes supplied to the - * constructor of ReducedCopyChunkData are invalid, that is if any of the - * following is _null_: fromsURL, toSURL, status. - * + * This class represents an exception thrown when the attributes supplied to the constructor of + * ReducedCopyChunkData are invalid, that is if any of the following is _null_: fromsURL, toSURL, + * status. + * * @author Michele Dibenedetto */ @SuppressWarnings("serial") public class InvalidReducedCopyChunkDataAttributesException extends Exception { - // booleans that indicate whether the corresponding variable is null - private boolean nullFromSURL; - private boolean nullToSURL; - private boolean nullStatus; + // booleans that indicate whether the corresponding variable is null + private boolean nullFromSURL; + private boolean nullToSURL; + private boolean nullStatus; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidReducedCopyChunkDataAttributesException(TSURL fromSURL, - TSURL toSURL, TReturnStatus status) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidReducedCopyChunkDataAttributesException( + TSURL fromSURL, TSURL toSURL, TReturnStatus status) { - nullFromSURL = fromSURL == null; - nullToSURL = toSURL == null; - nullStatus = status == null; - } + nullFromSURL = fromSURL == null; + nullToSURL = toSURL == null; + nullStatus = status == null; + } - @Override - public String toString() { + @Override + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Invalid CopyChunkData attributes: null-requestToken="); - sb.append("; null-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-toSURL="); - sb.append(nullToSURL); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("."); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("Invalid CopyChunkData attributes: null-requestToken="); + sb.append("; null-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-toSURL="); + sb.append(nullToSURL); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidReducedPtGChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidReducedPtGChunkDataAttributesException.java index 6c96fe12..8440edb7 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidReducedPtGChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidReducedPtGChunkDataAttributesException.java @@ -1,48 +1,43 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of ReducedPtGChunkData are invalid, that is if any is _null_. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * ReducedPtGChunkData are invalid, that is if any is _null_. + * * @author EGRID - ICTP Trieste * @date November, 2006 * @version 1.0 */ public class InvalidReducedPtGChunkDataAttributesException extends Exception { - private static final long serialVersionUID = -7943458526292568164L; - - // booleans that indicate whether the corresponding variable is null - private boolean nullFromSURL; - private boolean nullStatus; - - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidReducedPtGChunkDataAttributesException(TSURL fromSURL, - TReturnStatus status) { - - nullFromSURL = fromSURL == null; - nullStatus = status == null; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Invalid PtGChunkData attributes: null-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("."); - return sb.toString(); - } + private static final long serialVersionUID = -7943458526292568164L; + + // booleans that indicate whether the corresponding variable is null + private boolean nullFromSURL; + private boolean nullStatus; + + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidReducedPtGChunkDataAttributesException(TSURL fromSURL, TReturnStatus status) { + + nullFromSURL = fromSURL == null; + nullStatus = status == null; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Invalid PtGChunkData attributes: null-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidReducedPtPChunkDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidReducedPtPChunkDataAttributesException.java index 638f7661..2c773ef0 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidReducedPtPChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidReducedPtPChunkDataAttributesException.java @@ -1,59 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TFileStorageType; import it.grid.storm.srm.types.TLifeTimeInSeconds; +import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of ReducedPtPChunkData are invalid, that is if any is _null_. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * ReducedPtPChunkData are invalid, that is if any is _null_. + * * @author EGRID - ICTP Trieste * @date January, 2007 * @version 1.0 */ public class InvalidReducedPtPChunkDataAttributesException extends Exception { - private static final long serialVersionUID = 4945626188325362854L; - - // booleans that indicate whether the corresponding variable is null - private boolean nullToSURL; - private boolean nullStatus; - private boolean nullFileStorageType; - private boolean nullFileLifetime; - - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidReducedPtPChunkDataAttributesException(TSURL toSURL, - TReturnStatus status, TFileStorageType fileStorageType, - TLifeTimeInSeconds fileLifetime) { - - nullFileStorageType = fileStorageType == null; - nullToSURL = toSURL == null; - nullStatus = status == null; - nullFileLifetime = fileLifetime == null; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Invalid PtPChunkData attributes: null-toSURL="); - sb.append(nullToSURL); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("; null-fileStorageType="); - sb.append(nullFileStorageType); - sb.append("; null-fileLifetime="); - sb.append(nullFileLifetime); - sb.append("."); - return sb.toString(); - } + private static final long serialVersionUID = 4945626188325362854L; + + // booleans that indicate whether the corresponding variable is null + private boolean nullToSURL; + private boolean nullStatus; + private boolean nullFileStorageType; + private boolean nullFileLifetime; + + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidReducedPtPChunkDataAttributesException( + TSURL toSURL, + TReturnStatus status, + TFileStorageType fileStorageType, + TLifeTimeInSeconds fileLifetime) { + + nullFileStorageType = fileStorageType == null; + nullToSURL = toSURL == null; + nullStatus = status == null; + nullFileLifetime = fileLifetime == null; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Invalid PtPChunkData attributes: null-toSURL="); + sb.append(nullToSURL); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("; null-fileStorageType="); + sb.append(nullFileStorageType); + sb.append("; null-fileLifetime="); + sb.append(nullFileLifetime); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidRequestSummaryDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidRequestSummaryDataAttributesException.java index 4608e2bf..3d9d9b28 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidRequestSummaryDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidRequestSummaryDataAttributesException.java @@ -1,54 +1,49 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TRequestType; -import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.griduser.GridUserInterface; +import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.srm.types.TRequestType; /** - * This class represents an Exception thrown when a RequestSummaryData object is - * created with any invalid attributes: null TRequestType, null TRequestToken, - * null VomsGridUser. - * + * This class represents an Exception thrown when a RequestSummaryData object is created with any + * invalid attributes: null TRequestType, null TRequestToken, null VomsGridUser. + * * @author EGRID - ICTP Trieste * @date March 18th, 2005 * @version 3.0 */ public class InvalidRequestSummaryDataAttributesException extends Exception { - private static final long serialVersionUID = -7729349713696058669L; - - // booleans true if the corresponding variablesare null or negative - private boolean nullRequestType = true; - private boolean nullRequestToken = true; - private boolean nullVomsGridUser = true; - - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidRequestSummaryDataAttributesException(TRequestType requestType, - TRequestToken requestToken, GridUserInterface gu) { - - nullRequestType = (requestType == null); - nullRequestToken = (requestToken == null); - nullVomsGridUser = (gu == null); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Invalid RequestSummaryData attributes exception: "); - sb.append("nullRequestType="); - sb.append(nullRequestType); - sb.append("; nullRequestToken="); - sb.append(nullRequestToken); - sb.append("; nullVomsGridUser="); - sb.append(nullVomsGridUser); - return sb.toString(); - } + private static final long serialVersionUID = -7729349713696058669L; + + // booleans true if the corresponding variablesare null or negative + private boolean nullRequestType = true; + private boolean nullRequestToken = true; + private boolean nullVomsGridUser = true; + + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidRequestSummaryDataAttributesException( + TRequestType requestType, TRequestToken requestToken, GridUserInterface gu) { + + nullRequestType = (requestType == null); + nullRequestToken = (requestToken == null); + nullVomsGridUser = (gu == null); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Invalid RequestSummaryData attributes exception: "); + sb.append("nullRequestType="); + sb.append(nullRequestType); + sb.append("; nullRequestToken="); + sb.append(nullRequestToken); + sb.append("; nullVomsGridUser="); + sb.append(nullVomsGridUser); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidRetrievedDataException.java b/src/main/java/it/grid/storm/catalogs/InvalidRetrievedDataException.java index ddce2846..8f0997bc 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidRetrievedDataException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidRetrievedDataException.java @@ -1,53 +1,63 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; /** - * Class that represents an Exception thrown by the ReservedSpaceCatalog when it - * is asked to retrieve info from the persistence but the raw data is invalid - * and does not allow a well-formed domain obejcts to be created. - * + * Class that represents an Exception thrown by the ReservedSpaceCatalog when it is asked to + * retrieve info from the persistence but the raw data is invalid and does not allow a well-formed + * domain obejcts to be created. + * * @author: EGRID ICTP * @version: 1.0 * @date: June 2005 */ public class InvalidRetrievedDataException extends Exception { - private static final long serialVersionUID = -3645913441787012438L; - - private String requestToken; - private String requestType; - private int totalFilesInThisRequest; - private int numOfQueuedRequests; - private int numOfProgressing; - private int numFinished; - private boolean isSuspended; - - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidRetrievedDataException(String requestToken, String requestType, - int totalFilesInThisRequest, int numOfQueuedRequests, - int numOfProgressingRequests, int numFinished, boolean isSuspended) { - - this.requestToken = requestToken; - this.requestType = requestType; - this.totalFilesInThisRequest = totalFilesInThisRequest; - this.numOfQueuedRequests = numOfQueuedRequests; - this.numOfProgressing = numOfProgressingRequests; - this.numFinished = numFinished; - this.isSuspended = isSuspended; - } - - public String toString() { - - return "InvalidRetrievedDataException: token=" + requestToken + " type=" - + requestType + " total-files=" + totalFilesInThisRequest + " queued=" - + numOfQueuedRequests + " progressing=" + numOfProgressing + " finished=" - + numFinished + " isSusp=" + isSuspended; - } - + private static final long serialVersionUID = -3645913441787012438L; + + private String requestToken; + private String requestType; + private int totalFilesInThisRequest; + private int numOfQueuedRequests; + private int numOfProgressing; + private int numFinished; + private boolean isSuspended; + + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidRetrievedDataException( + String requestToken, + String requestType, + int totalFilesInThisRequest, + int numOfQueuedRequests, + int numOfProgressingRequests, + int numFinished, + boolean isSuspended) { + + this.requestToken = requestToken; + this.requestType = requestType; + this.totalFilesInThisRequest = totalFilesInThisRequest; + this.numOfQueuedRequests = numOfQueuedRequests; + this.numOfProgressing = numOfProgressingRequests; + this.numFinished = numFinished; + this.isSuspended = isSuspended; + } + + public String toString() { + + return "InvalidRetrievedDataException: token=" + + requestToken + + " type=" + + requestType + + " total-files=" + + totalFilesInThisRequest + + " queued=" + + numOfQueuedRequests + + " progressing=" + + numOfProgressing + + " finished=" + + numFinished + + " isSusp=" + + isSuspended; + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidSpaceDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidSpaceDataAttributesException.java index 8c3fc0c1..9acdbd47 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidSpaceDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidSpaceDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -8,35 +7,32 @@ import it.grid.storm.srm.types.TSpaceToken; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidSpaceDataAttributesException extends Exception { - private static final long serialVersionUID = -5317879266114702669L; - - private boolean nullAuth = true; - private boolean nullToken = true; + private static final long serialVersionUID = -5317879266114702669L; - public InvalidSpaceDataAttributesException(GridUserInterface guser) { + private boolean nullAuth = true; + private boolean nullToken = true; - nullAuth = (guser == null); - } + public InvalidSpaceDataAttributesException(GridUserInterface guser) { - public InvalidSpaceDataAttributesException(TSpaceToken token) { + nullAuth = (guser == null); + } - nullToken = (token == null); - } + public InvalidSpaceDataAttributesException(TSpaceToken token) { - public String toString() { + nullToken = (token == null); + } - return "null-Auth=" + nullAuth + "nullToken=" + nullToken; - } + public String toString() { + return "null-Auth=" + nullAuth + "nullToken=" + nullToken; + } } diff --git a/src/main/java/it/grid/storm/catalogs/InvalidSurlRequestDataAttributesException.java b/src/main/java/it/grid/storm/catalogs/InvalidSurlRequestDataAttributesException.java index b6d5a217..4c3a4dd4 100644 --- a/src/main/java/it/grid/storm/catalogs/InvalidSurlRequestDataAttributesException.java +++ b/src/main/java/it/grid/storm/catalogs/InvalidSurlRequestDataAttributesException.java @@ -1,71 +1,65 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class InvalidSurlRequestDataAttributesException extends Exception { - private static final long serialVersionUID = -8636768167720753989L; - protected boolean nullSURL; - protected boolean nullStatus; - - public InvalidSurlRequestDataAttributesException(TSURL SURL, - TReturnStatus status) { + private static final long serialVersionUID = -8636768167720753989L; + protected boolean nullSURL; + protected boolean nullStatus; - super(); - init(SURL, status); - } + public InvalidSurlRequestDataAttributesException(TSURL SURL, TReturnStatus status) { - public InvalidSurlRequestDataAttributesException(TSURL SURL, - TReturnStatus status, String message) { + super(); + init(SURL, status); + } - super(message); - init(SURL, status); - } + public InvalidSurlRequestDataAttributesException( + TSURL SURL, TReturnStatus status, String message) { - public InvalidSurlRequestDataAttributesException(TSURL SURL, - TReturnStatus status, Throwable cause) { + super(message); + init(SURL, status); + } - super(cause); - init(SURL, status); - } + public InvalidSurlRequestDataAttributesException( + TSURL SURL, TReturnStatus status, Throwable cause) { - public InvalidSurlRequestDataAttributesException(TSURL SURL, - TReturnStatus status, String message, Throwable cause) { + super(cause); + init(SURL, status); + } - super(message, cause); - init(SURL, status); - } + public InvalidSurlRequestDataAttributesException( + TSURL SURL, TReturnStatus status, String message, Throwable cause) { - private void init(TSURL SURL, TReturnStatus status) { + super(message, cause); + init(SURL, status); + } - nullSURL = SURL == null; - nullStatus = status == null; - } + private void init(TSURL SURL, TReturnStatus status) { - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { + nullSURL = SURL == null; + nullStatus = status == null; + } - StringBuilder builder = new StringBuilder(); - builder.append("InvalidSurlRequestDataAttributesException [nullSURL="); - builder.append(nullSURL); - builder.append(", nullStatus="); - builder.append(nullStatus); - builder.append("]"); - return builder.toString(); - } + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("InvalidSurlRequestDataAttributesException [nullSURL="); + builder.append(nullSURL); + builder.append(", nullStatus="); + builder.append(nullStatus); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/JiTData.java b/src/main/java/it/grid/storm/catalogs/JiTData.java index 4ef35773..71855bee 100644 --- a/src/main/java/it/grid/storm/catalogs/JiTData.java +++ b/src/main/java/it/grid/storm/catalogs/JiTData.java @@ -1,59 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; /** - * Class that represents data associated to JiT entries. It contains a String - * representing the file, an int representing the ACL, an int representing the - * user UID, an int representing the user GID. - * + * Class that represents data associated to JiT entries. It contains a String representing the file, + * an int representing the ACL, an int representing the user UID, an int representing the user GID. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date November 2006 */ public class JiTData { - private String file = ""; - private int uid = -1; - private int gid = -1; - private int acl = -1; + private String file = ""; + private int uid = -1; + private int gid = -1; + private int acl = -1; - /** - * Constructor requiring the complete name of the file as String, the acl as - * int, the uid and primary gid of the LocalUser bith as int. - */ - public JiTData(String file, int acl, int uid, int gid) { + /** + * Constructor requiring the complete name of the file as String, the acl as int, the uid and + * primary gid of the LocalUser bith as int. + */ + public JiTData(String file, int acl, int uid, int gid) { - this.file = file; - this.acl = acl; - this.uid = uid; - this.gid = gid; - } + this.file = file; + this.acl = acl; + this.uid = uid; + this.gid = gid; + } - public String pfn() { + public String pfn() { - return file; - } + return file; + } - public int acl() { + public int acl() { - return acl; - } + return acl; + } - public int uid() { + public int uid() { - return uid; - } + return uid; + } - public int gid() { + public int gid() { - return gid; - } + return gid; + } - public String toString() { + public String toString() { - return "file=" + file + " acl=" + acl + " uid=" + uid + " gid=" + gid; - } + return "file=" + file + " acl=" + acl + " uid=" + uid + " gid=" + gid; + } } diff --git a/src/main/java/it/grid/storm/catalogs/MalformedGridUserException.java b/src/main/java/it/grid/storm/catalogs/MalformedGridUserException.java index 2fecdce6..e895b279 100644 --- a/src/main/java/it/grid/storm/catalogs/MalformedGridUserException.java +++ b/src/main/java/it/grid/storm/catalogs/MalformedGridUserException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; /** * This class represents an Exception thrown when the RequestSummaryCatalog cannot create a * VomsGridUser with the available data. - * + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 1.0 diff --git a/src/main/java/it/grid/storm/catalogs/MultipleDataEntriesException.java b/src/main/java/it/grid/storm/catalogs/MultipleDataEntriesException.java index fa03e3c3..3abd6b75 100644 --- a/src/main/java/it/grid/storm/catalogs/MultipleDataEntriesException.java +++ b/src/main/java/it/grid/storm/catalogs/MultipleDataEntriesException.java @@ -1,37 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TRequestToken; /** - * Class that represents an Exception thrown by the ReservedSpaceCatalog when it - * finds more than one row of data for the specified request. - * + * Class that represents an Exception thrown by the ReservedSpaceCatalog when it finds more than one + * row of data for the specified request. + * * @author: EGRID ICTP * @version: 1.0 * @date: June 2005 */ public class MultipleDataEntriesException extends Exception { - private static final long serialVersionUID = 427636739469695868L; + private static final long serialVersionUID = 427636739469695868L; - private TRequestToken requestToken; + private TRequestToken requestToken; - /** - * Constructor tha trequires the attributes that caused the exception to be - * thrown. - */ - public MultipleDataEntriesException(TRequestToken requestToken) { + /** Constructor tha trequires the attributes that caused the exception to be thrown. */ + public MultipleDataEntriesException(TRequestToken requestToken) { - this.requestToken = requestToken; - } + this.requestToken = requestToken; + } - public String toString() { - - return "MultipleDataEntriesException: requestToken=" + requestToken; - } + public String toString() { + return "MultipleDataEntriesException: requestToken=" + requestToken; + } } diff --git a/src/main/java/it/grid/storm/catalogs/NoDataFoundException.java b/src/main/java/it/grid/storm/catalogs/NoDataFoundException.java index 548f0df9..ce3137a2 100644 --- a/src/main/java/it/grid/storm/catalogs/NoDataFoundException.java +++ b/src/main/java/it/grid/storm/catalogs/NoDataFoundException.java @@ -1,37 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TRequestToken; /** - * Class that represents an Exception thrown by the ReservedSpaceCatalog when it - * finds no data for the specified request. - * + * Class that represents an Exception thrown by the ReservedSpaceCatalog when it finds no data for + * the specified request. + * * @author: EGRID ICTP * @version: 1.0 * @date: June 2005 */ public class NoDataFoundException extends Exception { - private static final long serialVersionUID = -718255813130266566L; + private static final long serialVersionUID = -718255813130266566L; - private TRequestToken requestToken; + private TRequestToken requestToken; - /** - * Constructor tha trequires the attributes that caused the exception to be - * thrown. - */ - public NoDataFoundException(TRequestToken requestToken) { + /** Constructor tha trequires the attributes that caused the exception to be thrown. */ + public NoDataFoundException(TRequestToken requestToken) { - this.requestToken = requestToken; - } + this.requestToken = requestToken; + } - public String toString() { - - return "NoDataFoundException: requestToken=" + requestToken; - } + public String toString() { + return "NoDataFoundException: requestToken=" + requestToken; + } } diff --git a/src/main/java/it/grid/storm/catalogs/OverwriteModeConverter.java b/src/main/java/it/grid/storm/catalogs/OverwriteModeConverter.java index 45ba54d1..dd09e240 100644 --- a/src/main/java/it/grid/storm/catalogs/OverwriteModeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/OverwriteModeConverter.java @@ -1,90 +1,80 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.util.Map; +import it.grid.storm.config.Configuration; +import it.grid.storm.srm.types.TOverwriteMode; import java.util.HashMap; import java.util.Iterator; -import it.grid.storm.srm.types.TOverwriteMode; -import it.grid.storm.config.Configuration; +import java.util.Map; /** - * Package private auxiliary class used to convert between DB and StoRM object - * model representation of TOverwriteMode. - * + * Package private auxiliary class used to convert between DB and StoRM object model representation + * of TOverwriteMode. + * * @author: EGRID ICTP * @version: 2.0 * @date: June 2005 */ public class OverwriteModeConverter { - private Map DBtoSTORM = new HashMap(); - private Map STORMtoDB = new HashMap(); - - private static OverwriteModeConverter c = new OverwriteModeConverter(); + private Map DBtoSTORM = new HashMap(); + private Map STORMtoDB = new HashMap(); - /** - * Private constructor that fills in the conversion table; in particular, DB - * uses String values to represent TOverwriteMode: - * - * N NEVER A ALWAYS D WHENFILESAREDIFFERENT - */ - private OverwriteModeConverter() { + private static OverwriteModeConverter c = new OverwriteModeConverter(); - DBtoSTORM.put("N", TOverwriteMode.NEVER); - DBtoSTORM.put("A", TOverwriteMode.ALWAYS); - DBtoSTORM.put("D", TOverwriteMode.WHENFILESAREDIFFERENT); - Object aux; - for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext();) { - aux = i.next(); - STORMtoDB.put(DBtoSTORM.get(aux), aux); - } - } + /** + * Private constructor that fills in the conversion table; in particular, DB uses String values to + * represent TOverwriteMode: + * + *

N NEVER A ALWAYS D WHENFILESAREDIFFERENT + */ + private OverwriteModeConverter() { - /** - * Method that returns the only instance of OverwriteModeConverter. - */ - public static OverwriteModeConverter getInstance() { + DBtoSTORM.put("N", TOverwriteMode.NEVER); + DBtoSTORM.put("A", TOverwriteMode.ALWAYS); + DBtoSTORM.put("D", TOverwriteMode.WHENFILESAREDIFFERENT); + Object aux; + for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext(); ) { + aux = i.next(); + STORMtoDB.put(DBtoSTORM.get(aux), aux); + } + } - return c; - } + /** Method that returns the only instance of OverwriteModeConverter. */ + public static OverwriteModeConverter getInstance() { - /** - * Method that returns the int used by DPM to represent the given - * TOverwriteMode. "" is returned if no match is found. - */ - public String toDB(TOverwriteMode om) { + return c; + } - String aux = (String) STORMtoDB.get(om); - if (aux == null) - return ""; - return aux; - } + /** + * Method that returns the int used by DPM to represent the given TOverwriteMode. "" is returned + * if no match is found. + */ + public String toDB(TOverwriteMode om) { - /** - * Method that returns the TOverwriteMode used by StoRM to represent the - * supplied String representation of DPM. A configured default TOverwriteMode - * is returned in case no corresponding StoRM type is found. - * TOverwriteMode.EMPTY is returned if there are configuration errors. - */ - public TOverwriteMode toSTORM(String s) { + String aux = (String) STORMtoDB.get(om); + if (aux == null) return ""; + return aux; + } - TOverwriteMode aux = (TOverwriteMode) DBtoSTORM.get(s); - if (aux == null) - aux = (TOverwriteMode) DBtoSTORM.get(Configuration.getInstance() - .getDefaultOverwriteMode()); - if (aux == null) - return TOverwriteMode.EMPTY; - else - return aux; - } + /** + * Method that returns the TOverwriteMode used by StoRM to represent the supplied String + * representation of DPM. A configured default TOverwriteMode is returned in case no corresponding + * StoRM type is found. TOverwriteMode.EMPTY is returned if there are configuration errors. + */ + public TOverwriteMode toSTORM(String s) { - public String toString() { + TOverwriteMode aux = (TOverwriteMode) DBtoSTORM.get(s); + if (aux == null) + aux = (TOverwriteMode) DBtoSTORM.get(Configuration.getInstance().getDefaultOverwriteMode()); + if (aux == null) return TOverwriteMode.EMPTY; + else return aux; + } - return "OverWriteModeConverter.\nDBtoSTORM map:" + DBtoSTORM - + "\nSTORMtoDB map:" + STORMtoDB; - } + public String toString() { + return "OverWriteModeConverter.\nDBtoSTORM map:" + DBtoSTORM + "\nSTORMtoDB map:" + STORMtoDB; + } } diff --git a/src/main/java/it/grid/storm/catalogs/PersistentChunkData.java b/src/main/java/it/grid/storm/catalogs/PersistentChunkData.java index 95ed83b9..cd086f1a 100644 --- a/src/main/java/it/grid/storm/catalogs/PersistentChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/PersistentChunkData.java @@ -1,15 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; public interface PersistentChunkData extends ChunkData { - /** - * Method that returns the primary key in persistence, associated with This - * Chunk. - */ - public long getPrimaryKey(); - + /** Method that returns the primary key in persistence, associated with This Chunk. */ + public long getPrimaryKey(); } diff --git a/src/main/java/it/grid/storm/catalogs/PinLifetimeConverter.java b/src/main/java/it/grid/storm/catalogs/PinLifetimeConverter.java index 8a111afd..f0aea8c1 100644 --- a/src/main/java/it/grid/storm/catalogs/PinLifetimeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/PinLifetimeConverter.java @@ -1,76 +1,69 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.config.Configuration; +import it.grid.storm.srm.types.TLifeTimeInSeconds; /** - * Class that handles DB representation of a TLifetimeInSeconds, in particular - * it takes care of protocol specification: - * - * 0/null/negative are translated as default StoRM configurable values. StoRMs - * Empty TLifeTimeInSeconds is translated as 0. - * + * Class that handles DB representation of a TLifetimeInSeconds, in particular it takes care of + * protocol specification: + * + *

0/null/negative are translated as default StoRM configurable values. StoRMs Empty + * TLifeTimeInSeconds is translated as 0. + * * @author EGRID ICTP * @version 1.0 * @date March 2007 */ public class PinLifetimeConverter { - private static PinLifetimeConverter stc = new PinLifetimeConverter(); // only - // instance - - private PinLifetimeConverter() { + private static PinLifetimeConverter stc = new PinLifetimeConverter(); // only + // instance - } + private PinLifetimeConverter() {} - /** - * Method that returns the only instance of SizeInBytesIntConverter - */ - public static PinLifetimeConverter getInstance() { + /** Method that returns the only instance of SizeInBytesIntConverter */ + public static PinLifetimeConverter getInstance() { - return stc; - } + return stc; + } - /** - * Method that translates the Empty TLifeTimeInSeconds into the empty - * representation of DB which is 0. Any other value is left as is. - */ - public int toDB(long l) { + /** + * Method that translates the Empty TLifeTimeInSeconds into the empty representation of DB which + * is 0. Any other value is left as is. + */ + public int toDB(long l) { - if (l == TLifeTimeInSeconds.makeEmpty().value()) - return 0; - return new Long(l).intValue(); - } + if (l == TLifeTimeInSeconds.makeEmpty().value()) return 0; + return new Long(l).intValue(); + } - /** - * Method that returns the long corresponding to the int value in the DB, - * except if it is 0, NULL or negative; a configurable default value is - * returned instead, corresponding to the getPinLifetimeMinimum() - * Configuration class method. - */ - public long toStoRM(int s) { + /** + * Method that returns the long corresponding to the int value in the DB, except if it is 0, NULL + * or negative; a configurable default value is returned instead, corresponding to the + * getPinLifetimeMinimum() Configuration class method. + */ + public long toStoRM(int s) { - if (s == 0) { - return Configuration.getInstance().getPinLifetimeDefault(); - } else if (s < 0) { - // The default is used also as a Minimum - return Configuration.getInstance().getPinLifetimeDefault(); - } - return new Integer(s).longValue(); - } + if (s == 0) { + return Configuration.getInstance().getPinLifetimeDefault(); + } else if (s < 0) { + // The default is used also as a Minimum + return Configuration.getInstance().getPinLifetimeDefault(); + } + return new Integer(s).longValue(); + } - public long toStoRM(long s) { + public long toStoRM(long s) { - if (s == 0) { - return Configuration.getInstance().getPinLifetimeDefault(); - } else if (s < 0) { - // The default is used also as a Minimum - return Configuration.getInstance().getPinLifetimeDefault(); - } - return s; - } + if (s == 0) { + return Configuration.getInstance().getPinLifetimeDefault(); + } else if (s < 0) { + // The default is used also as a Minimum + return Configuration.getInstance().getPinLifetimeDefault(); + } + return s; + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtGChunkCatalog.java b/src/main/java/it/grid/storm/catalogs/PtGChunkCatalog.java index 31723b38..8b9b4e69 100644 --- a/src/main/java/it/grid/storm/catalogs/PtGChunkCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/PtGChunkCatalog.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -24,22 +23,20 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Class that represents StoRMs PtGChunkCatalog: it collects PtGChunkData and - * provides methods for looking up a PtGChunkData based on TRequestToken, as - * well as for adding a new entry and removing an existing one. - * + * Class that represents StoRMs PtGChunkCatalog: it collects PtGChunkData and provides methods for + * looking up a PtGChunkData based on TRequestToken, as well as for adding a new entry and removing + * an existing one. + * * @author EGRID - ICTP Trieste * @date April 26th, 2005 * @version 4.0 @@ -47,798 +44,769 @@ @SuppressWarnings("unused") public class PtGChunkCatalog { - private static final Logger log = LoggerFactory - .getLogger(PtGChunkCatalog.class); - - /* Only instance of PtGChunkCatalog present in StoRM! */ - private static final PtGChunkCatalog cat = new PtGChunkCatalog(); - private final PtGChunkDAO dao = PtGChunkDAO.getInstance(); - - /* - * Timer object in charge of transiting expired requests from SRM_FILE_PINNED - * to SRM_RELEASED! - */ - private final Timer transiter = new Timer(); - /* Delay time before starting cleaning thread! */ - private final long delay = Configuration.getInstance() - .getTransitInitialDelay() * 1000; - /* Period of execution of cleaning! */ - private final long period = Configuration.getInstance() - .getTransitTimeInterval() * 1000; - - /** - * Private constructor that starts the internal timer needed to periodically - * check and transit requests whose pinLifetime has expired and are in - * SRM_FILE_PINNED, to SRM_RELEASED. - */ - private PtGChunkCatalog() { - - TimerTask transitTask = new TimerTask() { - - @Override - public void run() { - - transitExpiredSRM_FILE_PINNED(); - } - }; - transiter.scheduleAtFixedRate(transitTask, delay, period); - } - - /** - * Method that returns the only instance of PtGChunkCatalog available. - */ - public static PtGChunkCatalog getInstance() { - - return cat; - } - - /** - * Method used to update into Persistence a retrieved PtGChunkData. In case - * any error occurs, the operation does not proceed but no Exception is - * thrown. Error messages get logged. - * - * Only fileSize, StatusCode, errString and transferURL are updated. Likewise - * for the request pinLifetime. - */ - synchronized public void update(PtGPersistentChunkData chunkData) { - - PtGChunkDataTO to = new PtGChunkDataTO(); - /* Primary key needed by DAO Object */ - to.setPrimaryKey(chunkData.getPrimaryKey()); - to.setFileSize(chunkData.getFileSize().value()); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - to.setTurl(TURLConverter.getInstance().toDB( - chunkData.getTransferURL().toString())); - to.setLifeTime(PinLifetimeConverter.getInstance().toDB( - chunkData.getPinLifeTime().value())); - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - to.setClientDN(chunkData.getUser().getDn()); - if (chunkData.getUser() instanceof AbstractGridUser) { - if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { - to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()) - .getFQANsAsString()); - } - - } - dao.update(to); - } - - /** - * Refresh method. THIS IS A WORK IN PROGRESS!!!! This method have to synch - * the ChunkData information with the database status intended as the status - * code and the TURL - * - * @param auxTO - * @param PtGChunkData - * inputChunk - * @return PtGChunkData outputChunk - */ - synchronized public PtGPersistentChunkData refreshStatus( - PtGPersistentChunkData inputChunk) { - - PtGChunkDataTO chunkDataTO = dao.refresh(inputChunk.getPrimaryKey()); - - log.debug("PtG CHUNK CATALOG: retrieved data " + chunkDataTO); - if (chunkDataTO == null) { - log.warn("PtG CHUNK CATALOG! Empty TO found in persistence for specified " - + "request: {}", inputChunk.getPrimaryKey()); - return inputChunk; - } - - /* - * In this first version the only field updated is the Status. Once - * updated, the new status is rewritten into the input ChunkData - */ - - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM(chunkDataTO.status()); - if (code != TStatusCode.EMPTY) { - status = new TReturnStatus(code, chunkDataTO.errString()); - } - inputChunk.setStatus(status); - TTURL turl = null; - try { - turl = TTURL.makeFromString(chunkDataTO.turl()); - } catch (InvalidTTURLAttributesException e) { - log.info("PtGChunkCatalog (FALSE-ERROR-in-abort-refresh-status?):" - + " built a TURL with protocol NULL (retrieved from the DB..)"); - } - inputChunk.setTransferURL(turl); - return inputChunk; - } - - /** - * Method that returns a Collection of PtGChunkData Objects matching the - * supplied TRequestToken. - * - * If any of the data associated to the TRequestToken is not well formed and - * so does not allow a PtGChunkData Object to be created, then that part of - * the request is dropped and gets logged, and the processing continues with - * the next part. All valid chunks get returned: the others get dropped. - * - * If there are no chunks to process then an empty Collection is returned, and - * a messagge gets logged. - */ - synchronized public Collection lookup(TRequestToken rt) { - - Collection chunkTOs = dao.find(rt); - log.debug("PtG CHUNK CATALOG: retrieved data " + chunkTOs); - ArrayList list = new ArrayList(); - if (chunkTOs.isEmpty()) { - log.warn("PtG CHUNK CATALOG! No chunks found in persistence for " - + "specified request: {}", rt); - return list; - } - PtGPersistentChunkData chunk; - for (PtGChunkDataTO chunkTO : chunkTOs) { - chunk = makeOne(chunkTO, rt); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(this.completeTO(chunkTO, chunk)); - } catch (InvalidReducedPtGChunkDataAttributesException e) { - log.warn("PtG CHUNK CATALOG! unable to add missing informations on DB " - + "to the request: {}", e.getMessage()); - } - } - log.debug("PtG CHUNK CATALOG: returning " + list); - return list; - } - - /** - * Generates a PtGChunkData from the received PtGChunkDataTO - * - * @param chunkDataTO - * @param rt - * @return - */ - private PtGPersistentChunkData makeOne(PtGChunkDataTO chunkDataTO, - TRequestToken rt) { - - StringBuilder errorSb = new StringBuilder(); - TSURL fromSURL = null; - try { - fromSURL = TSURL.makeFromStringValidate(chunkDataTO.fromSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (chunkDataTO.normalizedStFN() != null) { - fromSURL.setNormalizedStFN(chunkDataTO.normalizedStFN()); - } - if (chunkDataTO.surlUniqueID() != null) { - fromSURL.setUniqueID(chunkDataTO.surlUniqueID().intValue()); - } - // lifeTime - TLifeTimeInSeconds lifeTime = null; - try { - long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM( - chunkDataTO.lifeTime()); - // Check for max value allowed - long max = Configuration.getInstance().getPinLifetimeMaximum(); - if (pinLifeTime > max) { - log.warn("PinLifeTime is greater than the max value allowed." - + " Drop the value to the max = {} seconds", max); - pinLifeTime = max; - } - lifeTime = TLifeTimeInSeconds.make((pinLifeTime), TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // dirOption - TDirOption dirOption = null; - try { - dirOption = new TDirOption(chunkDataTO.dirOption(), - chunkDataTO.allLevelRecursive(), chunkDataTO.numLevel()); - } catch (InvalidTDirOptionAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // transferProtocols - TURLPrefix transferProtocols = TransferProtocolListConverter - .toSTORM(chunkDataTO.protocolList()); - if (transferProtocols.size() == 0) { - errorSb.append("\nEmpty list of TransferProtocols or could " - + "not translate TransferProtocols!"); - /* fail construction of PtGChunkData! */ - transferProtocols = null; - } - // fileSize - TSizeInBytes fileSize = null; - try { - fileSize = TSizeInBytes.make(chunkDataTO.fileSize(), SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - chunkDataTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + chunkDataTO.status()); - } else { - status = new TReturnStatus(code, chunkDataTO.errString()); - } - GridUserInterface gridUser = null; - try { - if (chunkDataTO.vomsAttributes() != null - && !chunkDataTO.vomsAttributes().trim().equals("")) { - gridUser = GridUserManager.makeVOMSGridUser(chunkDataTO.clientDN(), - chunkDataTO.vomsAttributesArray()); - } else { - gridUser = GridUserManager.makeGridUser(chunkDataTO.clientDN()); - } - - } catch (IllegalArgumentException e) { - log.error("Unexpected error on voms grid user creation." - + " IllegalArgumentException: {}", e.getMessage(), e); - } - // transferURL - /* - * whatever is read is just meaningless because PtG will fill it in!!! So - * create an Empty TTURL by default! Vital to avoid problems with unknown - * DPM NULL/EMPTY logic policy! - */ - TTURL transferURL = TTURL.makeEmpty(); - // make PtGChunkData - PtGPersistentChunkData aux = null; - try { - aux = new PtGPersistentChunkData(gridUser, rt, fromSURL, lifeTime, - dirOption, transferProtocols, fileSize, status, transferURL); - aux.setPrimaryKey(chunkDataTO.primaryKey()); - } catch (InvalidSurlRequestDataAttributesException e) { - dao.signalMalformedPtGChunk(chunkDataTO); - log.warn("PtG CHUNK CATALOG! Retrieved malformed PtG chunk data from " - + "persistence. Dropping chunk from request {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * - * Adds to the received PtGChunkDataTO the normalized StFN and the SURL unique - * ID taken from the PtGChunkData - * - * @param chunkTO - * @param chunk - */ - private void completeTO(ReducedPtGChunkDataTO chunkTO, - final ReducedPtGChunkData chunk) { - - chunkTO.setNormalizedStFN(chunk.fromSURL().normalizedStFN()); - chunkTO.setSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); - } - - /** - * - * Creates a ReducedPtGChunkDataTO from the received PtGChunkDataTO and - * completes it with the normalized StFN and the SURL unique ID taken from the - * PtGChunkData - * - * @param chunkTO - * @param chunk - * @return - * @throws InvalidReducedPtGChunkDataAttributesException - */ - private ReducedPtGChunkDataTO completeTO(PtGChunkDataTO chunkTO, - final PtGPersistentChunkData chunk) - throws InvalidReducedPtGChunkDataAttributesException { - - ReducedPtGChunkDataTO reducedChunkTO = this.reduce(chunkTO); - this.completeTO(reducedChunkTO, this.reduce(chunk)); - return reducedChunkTO; - } - - /** - * Creates a ReducedPtGChunkData from the data contained in the received - * PtGChunkData - * - * @param chunk - * @return - * @throws InvalidReducedPtGChunkDataAttributesException - */ - private ReducedPtGChunkData reduce(PtGPersistentChunkData chunk) - throws InvalidReducedPtGChunkDataAttributesException { - - ReducedPtGChunkData reducedChunk = new ReducedPtGChunkData(chunk.getSURL(), - chunk.getStatus()); - reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); - return reducedChunk; - } - - /** - * Creates a ReducedPtGChunkDataTO from the data contained in the received - * PtGChunkDataTO - * - * @param chunkTO - * @return - */ - private ReducedPtGChunkDataTO reduce(PtGChunkDataTO chunkTO) { - - ReducedPtGChunkDataTO reducedChunkTO = new ReducedPtGChunkDataTO(); - reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); - reducedChunkTO.setFromSURL(chunkTO.fromSURL()); - reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); - reducedChunkTO.setSurlUniqueID(chunkTO.surlUniqueID()); - reducedChunkTO.setStatus(chunkTO.status()); - reducedChunkTO.setErrString(chunkTO.errString()); - return reducedChunkTO; - } - - /** - * Checks if the received PtGChunkDataTO contains the fields not set by the - * front end but required - * - * @param chunkTO - * @return - */ - private boolean isComplete(PtGChunkDataTO chunkTO) { - - return (chunkTO.normalizedStFN() != null) - && (chunkTO.surlUniqueID() != null); - } - - /** - * Checks if the received ReducedPtGChunkDataTO contains the fields not set by - * the front end but required - * - * @param reducedChunkTO - * @return - */ - private boolean isComplete(ReducedPtGChunkDataTO reducedChunkTO) { - - return (reducedChunkTO.normalizedStFN() != null) - && (reducedChunkTO.surlUniqueID() != null); - } - - /** - * Method that returns a Collection of ReducedPtGChunkData Objects associated - * to the supplied TRequestToken. - * - * If any of the data retrieved for a given chunk is not well formed and so - * does not allow a ReducedPtGChunkData Object to be created, then that chunk - * is dropped and gets logged, while processing continues with the next one. - * All valid chunks get returned: the others get dropped. - * - * If there are no chunks associated to the given TRequestToken, then an empty - * Collection is returned and a message gets logged. - */ - synchronized public Collection lookupReducedPtGChunkData( - TRequestToken rt) { - - Collection reducedChunkDataTOs = dao.findReduced(rt - .getValue()); - log.debug("PtG CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); - ArrayList list = new ArrayList(); - if (reducedChunkDataTOs.isEmpty()) { - log.debug("PtG CHUNK CATALOG! No chunks found in persistence for {}", rt); - } else { - ReducedPtGChunkData reducedChunkData = null; - for (ReducedPtGChunkDataTO reducedChunkDataTO : reducedChunkDataTOs) { - reducedChunkData = makeOneReduced(reducedChunkDataTO); - if (reducedChunkData != null) { - list.add(reducedChunkData); - if (!this.isComplete(reducedChunkDataTO)) { - this.completeTO(reducedChunkDataTO, reducedChunkData); - dao.updateIncomplete(reducedChunkDataTO); - } - } - } - log.debug("PtG CHUNK CATALOG: returning {}", list); - } - return list; - } - - public Collection lookupReducedPtGChunkData( - TRequestToken requestToken, Collection surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.findReduced( - requestToken, surlsUniqueIDs, surlsArray); - log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildReducedChunkDataList(chunkDataTOCollection); - } - - public Collection lookupPtGChunkData(TSURL surl, - GridUserInterface user) { - - return lookupPtGChunkData(Arrays.asList(new TSURL[] { surl }), user); - } - - public Collection lookupPtGChunkData(TSURL surl) { - - return lookupPtGChunkData(Arrays.asList(new TSURL[] { surl })); - } - - public Collection lookupPtGChunkData( - List surls, GridUserInterface user) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, - surlsArray, user.getDn()); - log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildChunkDataList(chunkDataTOCollection); - } - - public Collection lookupPtGChunkData(List surls) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, - surlsArray); - log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildChunkDataList(chunkDataTOCollection); - } - - private Collection buildChunkDataList( - Collection chunkDataTOCollection) { - - ArrayList list = new ArrayList(); - PtGPersistentChunkData chunk; - for (PtGChunkDataTO chunkTO : chunkDataTOCollection) { - chunk = makeOne(chunkTO); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(this.completeTO(chunkTO, chunk)); - } catch (InvalidReducedPtGChunkDataAttributesException e) { - log.warn("PtG CHUNK CATALOG! unable to add missing informations on " - + "DB to the request: ", e.getMessage()); - } - } - return list; - } - - private PtGPersistentChunkData makeOne(PtGChunkDataTO chunkTO) { - - try { - return makeOne(chunkTO, - new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); - } catch (InvalidTRequestTokenAttributesException e) { - throw new IllegalStateException( - "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " - + e); - } - } - - /** - * Method that returns a Collection of ReducedPtGChunkData Objects matching - * the supplied GridUser and Collection of TSURLs. If any of the data - * retrieved for a given chunk is not well formed and so does not allow a - * ReducedPtGChunkData Object to be created, then that chunk is dropped and - * gets logged, while processing continues with the next one. All valid chunks - * get returned: the others get dropped. If there are no chunks associated to - * the given GridUser and Collection of TSURLs, then an empty Collection is - * returned and a message gets logged. - */ - synchronized public Collection lookupReducedPtGChunkData( - GridUserInterface gu, Collection tsurlCollection) { - - int[] surlsUniqueIDs = new int[tsurlCollection.size()]; - String[] surls = new String[tsurlCollection.size()]; - int index = 0; - for (TSURL tsurl : tsurlCollection) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOCollection = dao.findReduced( - gu.getDn(), surlsUniqueIDs, surls); - log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); - return buildReducedChunkDataList(chunkDataTOCollection); - } - - private Collection buildReducedChunkDataList( - Collection chunkDataTOCollection) { - - ArrayList list = new ArrayList(); - ReducedPtGChunkData reducedChunkData; - for (ReducedPtGChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { - reducedChunkData = makeOneReduced(reducedChunkDataTO); - if (reducedChunkData != null) { - list.add(reducedChunkData); - if (!isComplete(reducedChunkDataTO)) { - completeTO(reducedChunkDataTO, reducedChunkData); - dao.updateIncomplete(reducedChunkDataTO); - } - } - } - log.debug("PtG CHUNK CATALOG: returning {}",list); - return list; - } - - /** - * - * - * @param reducedChunkDataTO - * @return - */ - private ReducedPtGChunkData makeOneReduced( - ReducedPtGChunkDataTO reducedChunkDataTO) { - - StringBuilder errorSb = new StringBuilder(); - // fromSURL - TSURL fromSURL = null; - try { - fromSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.fromSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (reducedChunkDataTO.normalizedStFN() != null) { - fromSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); - } - if (reducedChunkDataTO.surlUniqueID() != null) { - fromSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - reducedChunkDataTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + reducedChunkDataTO.status()); - } else { - status = new TReturnStatus(code, reducedChunkDataTO.errString()); - } - // make ReducedPtGChunkData - ReducedPtGChunkData aux = null; - try { - aux = new ReducedPtGChunkData(fromSURL, status); - aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); - } catch (InvalidReducedPtGChunkDataAttributesException e) { - log.warn("PtG CHUNK CATALOG! Retrieved malformed Reduced PtG chunk " - + "data from persistence: dropping reduced chunk..."); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * Method used to add into Persistence a new entry. The supplied PtGChunkData - * gets the primary key changed to the value assigned in Persistence. - * - * This method is intended to be used by a recursive PtG request: the parent - * request supplies a directory which must be expanded, so all new children - * requests resulting from the files in the directory are added into - * persistence. - * - * So this method does _not_ add a new SRM prepare_to_get request into the DB! - * - * The only children data written into the DB are: sourceSURL, TDirOption, - * statusCode and explanation. - * - * In case of any error the operation does not proceed, but no Exception is - * thrown! Proper messages get logged by underlaying DAO. - */ - synchronized public void addChild(PtGPersistentChunkData chunkData) { - - PtGChunkDataTO to = new PtGChunkDataTO(); - /* needed for now to find ID of request! Must be changed soon! */ - to.setRequestToken(chunkData.getRequestToken().toString()); - to.setFromSURL(chunkData.getSURL().toString()); - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - - to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); - to.setDirOption(chunkData.getDirOption().isDirectory()); - to.setNumLevel(chunkData.getDirOption().getNumLevel()); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - to.setClientDN(chunkData.getUser().getDn()); - if (chunkData.getUser() instanceof AbstractGridUser) { - if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { - to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()) - .getFQANsAsString()); - } - - } - /* add the entry and update the Primary Key field! */ - dao.addChild(to); - /* set the assigned PrimaryKey! */ - chunkData.setPrimaryKey(to.primaryKey()); - } - - /** - * Method used to add into Persistence a new entry. The supplied PtGChunkData - * gets the primary key changed to the value assigned in the Persistence. The - * method requires the GridUser to whom associate the added request. - * - * This method is intended to be used by an srmCopy request in push mode which - * implies a local srmPtG. The only fields from PtGChunkData that are - * considered are: the requestToken, the sourceSURL, the pinLifetime, the - * dirOption, the protocolList, the status and error string. - * - * So this method _adds_ a new SRM prepare_to_get request into the DB! - * - * In case of any error the operation does not proceed, but no Exception is - * thrown! The underlaying DAO logs proper error messagges. - */ - synchronized public void add(PtGPersistentChunkData chunkData, - GridUserInterface gu) { - - PtGChunkDataTO to = new PtGChunkDataTO(); - to.setRequestToken(chunkData.getRequestToken().toString()); - to.setFromSURL(chunkData.getSURL().toString()); - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - - to.setLifeTime(new Long(chunkData.getPinLifeTime().value()).intValue()); - to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); - to.setDirOption(chunkData.getDirOption().isDirectory()); - to.setNumLevel(chunkData.getDirOption().getNumLevel()); - to.setProtocolList(TransferProtocolListConverter.toDB(chunkData - .getTransferProtocols())); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - - to.setClientDN(chunkData.getUser().getDn()); - if (chunkData.getUser() instanceof AbstractGridUser) { - if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { - to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()) - .getFQANsAsString()); - } - - } - - dao.addNew(to, gu.getDn()); // add the entry and update the Primary Key - // field! - chunkData.setPrimaryKey(to.primaryKey()); // set the assigned PrimaryKey! - } - - /** - * Method used to establish if in Persistence there is a PtGChunkData working - * on the supplied SURL, and whose state is SRM_FILE_PINNED, in which case - * true is returned. In case none are found or there is any problem, false is - * returned. This method is intended to be used by srmMv. - */ - synchronized public boolean isSRM_FILE_PINNED(TSURL surl) { - - return (dao.numberInSRM_FILE_PINNED(surl.uniqueId()) > 0); - - } - - /** - * Method used to transit the specified Collection of ReducedPtGChunkData from - * SRM_FILE_PINNED to SRM_RELEASED. Chunks in any other starting state are not - * transited. In case of any error nothing is done, but proper error messages - * get logged by the underlaying DAO. - */ - synchronized public void transitSRM_FILE_PINNEDtoSRM_RELEASED( - Collection chunks, TRequestToken token) { - - if (chunks == null || chunks.isEmpty()) { - return; - } - long[] primaryKeys = new long[chunks.size()]; - int index = 0; - for (ReducedPtGChunkData chunkData : chunks) { - if (chunkData != null) { - primaryKeys[index] = chunkData.primaryKey(); - index++; - } - - } - dao.transitSRM_FILE_PINNEDtoSRM_RELEASED(primaryKeys, token); - for (ReducedPtGChunkData chunkData : chunks) { - if (chunkData != null) { - primaryKeys[index] = chunkData.primaryKey(); - index++; - } - } - } - - /** - * Method used to force transition to SRM_RELEASED from SRM_FILE_PINNED, of - * all PtG Requests whose pinLifetime has expired and the state still has not - * been changed (a user forgot to run srmReleaseFiles)! - */ - synchronized public void transitExpiredSRM_FILE_PINNED() { - - List expiredSurls = dao.transitExpiredSRM_FILE_PINNED(); - } - - public void updateStatus(TRequestToken requestToken, TSURL surl, - TStatusCode statusCode, String explanation) { - - dao.updateStatus(requestToken, new int[] { surl.uniqueId() }, - new String[] { surl.rawSurl() }, statusCode, explanation); - } - - public void updateFromPreviousStatus(TSURL surl, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - dao.updateStatusOnMatchingStatus(new int[] { surl.uniqueId() }, - new String[] { surl.rawSurl() }, expectedStatusCode, newStatusCode, - explanation); - - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, - newStatusCode, explanation); - } - - public void updateFromPreviousStatus(TRequestToken requestToken, - List surlList, TStatusCode expectedStatusCode, - TStatusCode newStatusCode) { - - int[] surlsUniqueIDs = new int[surlList.size()]; - String[] surls = new String[surlList.size()]; - int index = 0; - for (TSURL tsurl : surlList) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - dao.updateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode); - } - + private static final Logger log = LoggerFactory.getLogger(PtGChunkCatalog.class); + + /* Only instance of PtGChunkCatalog present in StoRM! */ + private static final PtGChunkCatalog cat = new PtGChunkCatalog(); + private final PtGChunkDAO dao = PtGChunkDAO.getInstance(); + + /* + * Timer object in charge of transiting expired requests from SRM_FILE_PINNED + * to SRM_RELEASED! + */ + private final Timer transiter = new Timer(); + /* Delay time before starting cleaning thread! */ + private final long delay = Configuration.getInstance().getTransitInitialDelay() * 1000; + /* Period of execution of cleaning! */ + private final long period = Configuration.getInstance().getTransitTimeInterval() * 1000; + + /** + * Private constructor that starts the internal timer needed to periodically check and transit + * requests whose pinLifetime has expired and are in SRM_FILE_PINNED, to SRM_RELEASED. + */ + private PtGChunkCatalog() { + + TimerTask transitTask = + new TimerTask() { + + @Override + public void run() { + + transitExpiredSRM_FILE_PINNED(); + } + }; + transiter.scheduleAtFixedRate(transitTask, delay, period); + } + + /** Method that returns the only instance of PtGChunkCatalog available. */ + public static PtGChunkCatalog getInstance() { + + return cat; + } + + /** + * Method used to update into Persistence a retrieved PtGChunkData. In case any error occurs, the + * operation does not proceed but no Exception is thrown. Error messages get logged. + * + *

Only fileSize, StatusCode, errString and transferURL are updated. Likewise for the request + * pinLifetime. + */ + public synchronized void update(PtGPersistentChunkData chunkData) { + + PtGChunkDataTO to = new PtGChunkDataTO(); + /* Primary key needed by DAO Object */ + to.setPrimaryKey(chunkData.getPrimaryKey()); + to.setFileSize(chunkData.getFileSize().value()); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + to.setTurl(TURLConverter.getInstance().toDB(chunkData.getTransferURL().toString())); + to.setLifeTime(PinLifetimeConverter.getInstance().toDB(chunkData.getPinLifeTime().value())); + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + to.setClientDN(chunkData.getUser().getDn()); + if (chunkData.getUser() instanceof AbstractGridUser) { + if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { + to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()).getFQANsAsString()); + } + } + dao.update(to); + } + + /** + * Refresh method. THIS IS A WORK IN PROGRESS!!!! This method have to synch the ChunkData + * information with the database status intended as the status code and the TURL + * + * @param auxTO + * @param PtGChunkData inputChunk + * @return PtGChunkData outputChunk + */ + public synchronized PtGPersistentChunkData refreshStatus(PtGPersistentChunkData inputChunk) { + + PtGChunkDataTO chunkDataTO = dao.refresh(inputChunk.getPrimaryKey()); + + log.debug("PtG CHUNK CATALOG: retrieved data " + chunkDataTO); + if (chunkDataTO == null) { + log.warn( + "PtG CHUNK CATALOG! Empty TO found in persistence for specified " + "request: {}", + inputChunk.getPrimaryKey()); + return inputChunk; + } + + /* + * In this first version the only field updated is the Status. Once + * updated, the new status is rewritten into the input ChunkData + */ + + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(chunkDataTO.status()); + if (code != TStatusCode.EMPTY) { + status = new TReturnStatus(code, chunkDataTO.errString()); + } + inputChunk.setStatus(status); + TTURL turl = null; + try { + turl = TTURL.makeFromString(chunkDataTO.turl()); + } catch (InvalidTTURLAttributesException e) { + log.info( + "PtGChunkCatalog (FALSE-ERROR-in-abort-refresh-status?):" + + " built a TURL with protocol NULL (retrieved from the DB..)"); + } + inputChunk.setTransferURL(turl); + return inputChunk; + } + + /** + * Method that returns a Collection of PtGChunkData Objects matching the supplied TRequestToken. + * + *

If any of the data associated to the TRequestToken is not well formed and so does not allow + * a PtGChunkData Object to be created, then that part of the request is dropped and gets logged, + * and the processing continues with the next part. All valid chunks get returned: the others get + * dropped. + * + *

If there are no chunks to process then an empty Collection is returned, and a messagge gets + * logged. + */ + public synchronized Collection lookup(TRequestToken rt) { + + Collection chunkTOs = dao.find(rt); + log.debug("PtG CHUNK CATALOG: retrieved data " + chunkTOs); + ArrayList list = new ArrayList(); + if (chunkTOs.isEmpty()) { + log.warn( + "PtG CHUNK CATALOG! No chunks found in persistence for " + "specified request: {}", rt); + return list; + } + PtGPersistentChunkData chunk; + for (PtGChunkDataTO chunkTO : chunkTOs) { + chunk = makeOne(chunkTO, rt); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(this.completeTO(chunkTO, chunk)); + } catch (InvalidReducedPtGChunkDataAttributesException e) { + log.warn( + "PtG CHUNK CATALOG! unable to add missing informations on DB " + "to the request: {}", + e.getMessage()); + } + } + log.debug("PtG CHUNK CATALOG: returning " + list); + return list; + } + + /** + * Generates a PtGChunkData from the received PtGChunkDataTO + * + * @param chunkDataTO + * @param rt + * @return + */ + private PtGPersistentChunkData makeOne(PtGChunkDataTO chunkDataTO, TRequestToken rt) { + + StringBuilder errorSb = new StringBuilder(); + TSURL fromSURL = null; + try { + fromSURL = TSURL.makeFromStringValidate(chunkDataTO.fromSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (chunkDataTO.normalizedStFN() != null) { + fromSURL.setNormalizedStFN(chunkDataTO.normalizedStFN()); + } + if (chunkDataTO.surlUniqueID() != null) { + fromSURL.setUniqueID(chunkDataTO.surlUniqueID().intValue()); + } + // lifeTime + TLifeTimeInSeconds lifeTime = null; + try { + long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM(chunkDataTO.lifeTime()); + // Check for max value allowed + long max = Configuration.getInstance().getPinLifetimeMaximum(); + if (pinLifeTime > max) { + log.warn( + "PinLifeTime is greater than the max value allowed." + + " Drop the value to the max = {} seconds", + max); + pinLifeTime = max; + } + lifeTime = TLifeTimeInSeconds.make((pinLifeTime), TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // dirOption + TDirOption dirOption = null; + try { + dirOption = + new TDirOption( + chunkDataTO.dirOption(), chunkDataTO.allLevelRecursive(), chunkDataTO.numLevel()); + } catch (InvalidTDirOptionAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // transferProtocols + TURLPrefix transferProtocols = + TransferProtocolListConverter.toSTORM(chunkDataTO.protocolList()); + if (transferProtocols.size() == 0) { + errorSb.append( + "\nEmpty list of TransferProtocols or could " + "not translate TransferProtocols!"); + /* fail construction of PtGChunkData! */ + transferProtocols = null; + } + // fileSize + TSizeInBytes fileSize = null; + try { + fileSize = TSizeInBytes.make(chunkDataTO.fileSize(), SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(chunkDataTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + chunkDataTO.status()); + } else { + status = new TReturnStatus(code, chunkDataTO.errString()); + } + GridUserInterface gridUser = null; + try { + if (chunkDataTO.vomsAttributes() != null && !chunkDataTO.vomsAttributes().trim().equals("")) { + gridUser = + GridUserManager.makeVOMSGridUser( + chunkDataTO.clientDN(), chunkDataTO.vomsAttributesArray()); + } else { + gridUser = GridUserManager.makeGridUser(chunkDataTO.clientDN()); + } + + } catch (IllegalArgumentException e) { + log.error( + "Unexpected error on voms grid user creation." + " IllegalArgumentException: {}", + e.getMessage(), + e); + } + // transferURL + /* + * whatever is read is just meaningless because PtG will fill it in!!! So + * create an Empty TTURL by default! Vital to avoid problems with unknown + * DPM NULL/EMPTY logic policy! + */ + TTURL transferURL = TTURL.makeEmpty(); + // make PtGChunkData + PtGPersistentChunkData aux = null; + try { + aux = + new PtGPersistentChunkData( + gridUser, + rt, + fromSURL, + lifeTime, + dirOption, + transferProtocols, + fileSize, + status, + transferURL); + aux.setPrimaryKey(chunkDataTO.primaryKey()); + } catch (InvalidSurlRequestDataAttributesException e) { + dao.signalMalformedPtGChunk(chunkDataTO); + log.warn( + "PtG CHUNK CATALOG! Retrieved malformed PtG chunk data from " + + "persistence. Dropping chunk from request {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Adds to the received PtGChunkDataTO the normalized StFN and the SURL unique ID taken from the + * PtGChunkData + * + * @param chunkTO + * @param chunk + */ + private void completeTO(ReducedPtGChunkDataTO chunkTO, final ReducedPtGChunkData chunk) { + + chunkTO.setNormalizedStFN(chunk.fromSURL().normalizedStFN()); + chunkTO.setSurlUniqueID(new Integer(chunk.fromSURL().uniqueId())); + } + + /** + * Creates a ReducedPtGChunkDataTO from the received PtGChunkDataTO and completes it with the + * normalized StFN and the SURL unique ID taken from the PtGChunkData + * + * @param chunkTO + * @param chunk + * @return + * @throws InvalidReducedPtGChunkDataAttributesException + */ + private ReducedPtGChunkDataTO completeTO( + PtGChunkDataTO chunkTO, final PtGPersistentChunkData chunk) + throws InvalidReducedPtGChunkDataAttributesException { + + ReducedPtGChunkDataTO reducedChunkTO = this.reduce(chunkTO); + this.completeTO(reducedChunkTO, this.reduce(chunk)); + return reducedChunkTO; + } + + /** + * Creates a ReducedPtGChunkData from the data contained in the received PtGChunkData + * + * @param chunk + * @return + * @throws InvalidReducedPtGChunkDataAttributesException + */ + private ReducedPtGChunkData reduce(PtGPersistentChunkData chunk) + throws InvalidReducedPtGChunkDataAttributesException { + + ReducedPtGChunkData reducedChunk = new ReducedPtGChunkData(chunk.getSURL(), chunk.getStatus()); + reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); + return reducedChunk; + } + + /** + * Creates a ReducedPtGChunkDataTO from the data contained in the received PtGChunkDataTO + * + * @param chunkTO + * @return + */ + private ReducedPtGChunkDataTO reduce(PtGChunkDataTO chunkTO) { + + ReducedPtGChunkDataTO reducedChunkTO = new ReducedPtGChunkDataTO(); + reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); + reducedChunkTO.setFromSURL(chunkTO.fromSURL()); + reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); + reducedChunkTO.setSurlUniqueID(chunkTO.surlUniqueID()); + reducedChunkTO.setStatus(chunkTO.status()); + reducedChunkTO.setErrString(chunkTO.errString()); + return reducedChunkTO; + } + + /** + * Checks if the received PtGChunkDataTO contains the fields not set by the front end but required + * + * @param chunkTO + * @return + */ + private boolean isComplete(PtGChunkDataTO chunkTO) { + + return (chunkTO.normalizedStFN() != null) && (chunkTO.surlUniqueID() != null); + } + + /** + * Checks if the received ReducedPtGChunkDataTO contains the fields not set by the front end but + * required + * + * @param reducedChunkTO + * @return + */ + private boolean isComplete(ReducedPtGChunkDataTO reducedChunkTO) { + + return (reducedChunkTO.normalizedStFN() != null) && (reducedChunkTO.surlUniqueID() != null); + } + + /** + * Method that returns a Collection of ReducedPtGChunkData Objects associated to the supplied + * TRequestToken. + * + *

If any of the data retrieved for a given chunk is not well formed and so does not allow a + * ReducedPtGChunkData Object to be created, then that chunk is dropped and gets logged, while + * processing continues with the next one. All valid chunks get returned: the others get dropped. + * + *

If there are no chunks associated to the given TRequestToken, then an empty Collection is + * returned and a message gets logged. + */ + public synchronized Collection lookupReducedPtGChunkData(TRequestToken rt) { + + Collection reducedChunkDataTOs = dao.findReduced(rt.getValue()); + log.debug("PtG CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); + ArrayList list = new ArrayList(); + if (reducedChunkDataTOs.isEmpty()) { + log.debug("PtG CHUNK CATALOG! No chunks found in persistence for {}", rt); + } else { + ReducedPtGChunkData reducedChunkData = null; + for (ReducedPtGChunkDataTO reducedChunkDataTO : reducedChunkDataTOs) { + reducedChunkData = makeOneReduced(reducedChunkDataTO); + if (reducedChunkData != null) { + list.add(reducedChunkData); + if (!this.isComplete(reducedChunkDataTO)) { + this.completeTO(reducedChunkDataTO, reducedChunkData); + dao.updateIncomplete(reducedChunkDataTO); + } + } + } + log.debug("PtG CHUNK CATALOG: returning {}", list); + } + return list; + } + + public Collection lookupReducedPtGChunkData( + TRequestToken requestToken, Collection surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.findReduced(requestToken, surlsUniqueIDs, surlsArray); + log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildReducedChunkDataList(chunkDataTOCollection); + } + + public Collection lookupPtGChunkData(TSURL surl, GridUserInterface user) { + + return lookupPtGChunkData(Arrays.asList(new TSURL[] {surl}), user); + } + + public Collection lookupPtGChunkData(TSURL surl) { + + return lookupPtGChunkData(Arrays.asList(new TSURL[] {surl})); + } + + public Collection lookupPtGChunkData( + List surls, GridUserInterface user) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.find(surlsUniqueIDs, surlsArray, user.getDn()); + log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildChunkDataList(chunkDataTOCollection); + } + + public Collection lookupPtGChunkData(List surls) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = dao.find(surlsUniqueIDs, surlsArray); + log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildChunkDataList(chunkDataTOCollection); + } + + private Collection buildChunkDataList( + Collection chunkDataTOCollection) { + + ArrayList list = new ArrayList(); + PtGPersistentChunkData chunk; + for (PtGChunkDataTO chunkTO : chunkDataTOCollection) { + chunk = makeOne(chunkTO); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(this.completeTO(chunkTO, chunk)); + } catch (InvalidReducedPtGChunkDataAttributesException e) { + log.warn( + "PtG CHUNK CATALOG! unable to add missing informations on " + "DB to the request: ", + e.getMessage()); + } + } + return list; + } + + private PtGPersistentChunkData makeOne(PtGChunkDataTO chunkTO) { + + try { + return makeOne(chunkTO, new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); + } catch (InvalidTRequestTokenAttributesException e) { + throw new IllegalStateException( + "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " + e); + } + } + + /** + * Method that returns a Collection of ReducedPtGChunkData Objects matching the supplied GridUser + * and Collection of TSURLs. If any of the data retrieved for a given chunk is not well formed and + * so does not allow a ReducedPtGChunkData Object to be created, then that chunk is dropped and + * gets logged, while processing continues with the next one. All valid chunks get returned: the + * others get dropped. If there are no chunks associated to the given GridUser and Collection of + * TSURLs, then an empty Collection is returned and a message gets logged. + */ + public synchronized Collection lookupReducedPtGChunkData( + GridUserInterface gu, Collection tsurlCollection) { + + int[] surlsUniqueIDs = new int[tsurlCollection.size()]; + String[] surls = new String[tsurlCollection.size()]; + int index = 0; + for (TSURL tsurl : tsurlCollection) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOCollection = + dao.findReduced(gu.getDn(), surlsUniqueIDs, surls); + log.debug("PtG CHUNK CATALOG: retrieved data {}", chunkDataTOCollection); + return buildReducedChunkDataList(chunkDataTOCollection); + } + + private Collection buildReducedChunkDataList( + Collection chunkDataTOCollection) { + + ArrayList list = new ArrayList(); + ReducedPtGChunkData reducedChunkData; + for (ReducedPtGChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { + reducedChunkData = makeOneReduced(reducedChunkDataTO); + if (reducedChunkData != null) { + list.add(reducedChunkData); + if (!isComplete(reducedChunkDataTO)) { + completeTO(reducedChunkDataTO, reducedChunkData); + dao.updateIncomplete(reducedChunkDataTO); + } + } + } + log.debug("PtG CHUNK CATALOG: returning {}", list); + return list; + } + + /** + * @param reducedChunkDataTO + * @return + */ + private ReducedPtGChunkData makeOneReduced(ReducedPtGChunkDataTO reducedChunkDataTO) { + + StringBuilder errorSb = new StringBuilder(); + // fromSURL + TSURL fromSURL = null; + try { + fromSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.fromSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (reducedChunkDataTO.normalizedStFN() != null) { + fromSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); + } + if (reducedChunkDataTO.surlUniqueID() != null) { + fromSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(reducedChunkDataTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + reducedChunkDataTO.status()); + } else { + status = new TReturnStatus(code, reducedChunkDataTO.errString()); + } + // make ReducedPtGChunkData + ReducedPtGChunkData aux = null; + try { + aux = new ReducedPtGChunkData(fromSURL, status); + aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); + } catch (InvalidReducedPtGChunkDataAttributesException e) { + log.warn( + "PtG CHUNK CATALOG! Retrieved malformed Reduced PtG chunk " + + "data from persistence: dropping reduced chunk..."); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Method used to add into Persistence a new entry. The supplied PtGChunkData gets the primary key + * changed to the value assigned in Persistence. + * + *

This method is intended to be used by a recursive PtG request: the parent request supplies a + * directory which must be expanded, so all new children requests resulting from the files in the + * directory are added into persistence. + * + *

So this method does _not_ add a new SRM prepare_to_get request into the DB! + * + *

The only children data written into the DB are: sourceSURL, TDirOption, statusCode and + * explanation. + * + *

In case of any error the operation does not proceed, but no Exception is thrown! Proper + * messages get logged by underlaying DAO. + */ + public synchronized void addChild(PtGPersistentChunkData chunkData) { + + PtGChunkDataTO to = new PtGChunkDataTO(); + /* needed for now to find ID of request! Must be changed soon! */ + to.setRequestToken(chunkData.getRequestToken().toString()); + to.setFromSURL(chunkData.getSURL().toString()); + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + + to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); + to.setDirOption(chunkData.getDirOption().isDirectory()); + to.setNumLevel(chunkData.getDirOption().getNumLevel()); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + to.setClientDN(chunkData.getUser().getDn()); + if (chunkData.getUser() instanceof AbstractGridUser) { + if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { + to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()).getFQANsAsString()); + } + } + /* add the entry and update the Primary Key field! */ + dao.addChild(to); + /* set the assigned PrimaryKey! */ + chunkData.setPrimaryKey(to.primaryKey()); + } + + /** + * Method used to add into Persistence a new entry. The supplied PtGChunkData gets the primary key + * changed to the value assigned in the Persistence. The method requires the GridUser to whom + * associate the added request. + * + *

This method is intended to be used by an srmCopy request in push mode which implies a local + * srmPtG. The only fields from PtGChunkData that are considered are: the requestToken, the + * sourceSURL, the pinLifetime, the dirOption, the protocolList, the status and error string. + * + *

So this method _adds_ a new SRM prepare_to_get request into the DB! + * + *

In case of any error the operation does not proceed, but no Exception is thrown! The + * underlaying DAO logs proper error messagges. + */ + public synchronized void add(PtGPersistentChunkData chunkData, GridUserInterface gu) { + + PtGChunkDataTO to = new PtGChunkDataTO(); + to.setRequestToken(chunkData.getRequestToken().toString()); + to.setFromSURL(chunkData.getSURL().toString()); + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + + to.setLifeTime(new Long(chunkData.getPinLifeTime().value()).intValue()); + to.setAllLevelRecursive(chunkData.getDirOption().isAllLevelRecursive()); + to.setDirOption(chunkData.getDirOption().isDirectory()); + to.setNumLevel(chunkData.getDirOption().getNumLevel()); + to.setProtocolList(TransferProtocolListConverter.toDB(chunkData.getTransferProtocols())); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + + to.setClientDN(chunkData.getUser().getDn()); + if (chunkData.getUser() instanceof AbstractGridUser) { + if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { + to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()).getFQANsAsString()); + } + } + + dao.addNew(to, gu.getDn()); // add the entry and update the Primary Key + // field! + chunkData.setPrimaryKey(to.primaryKey()); // set the assigned PrimaryKey! + } + + /** + * Method used to establish if in Persistence there is a PtGChunkData working on the supplied + * SURL, and whose state is SRM_FILE_PINNED, in which case true is returned. In case none are + * found or there is any problem, false is returned. This method is intended to be used by srmMv. + */ + public synchronized boolean isSRM_FILE_PINNED(TSURL surl) { + + return (dao.numberInSRM_FILE_PINNED(surl.uniqueId()) > 0); + } + + /** + * Method used to transit the specified Collection of ReducedPtGChunkData from SRM_FILE_PINNED to + * SRM_RELEASED. Chunks in any other starting state are not transited. In case of any error + * nothing is done, but proper error messages get logged by the underlaying DAO. + */ + public synchronized void transitSRM_FILE_PINNEDtoSRM_RELEASED( + Collection chunks, TRequestToken token) { + + if (chunks == null || chunks.isEmpty()) { + return; + } + long[] primaryKeys = new long[chunks.size()]; + int index = 0; + for (ReducedPtGChunkData chunkData : chunks) { + if (chunkData != null) { + primaryKeys[index] = chunkData.primaryKey(); + index++; + } + } + dao.transitSRM_FILE_PINNEDtoSRM_RELEASED(primaryKeys, token); + for (ReducedPtGChunkData chunkData : chunks) { + if (chunkData != null) { + primaryKeys[index] = chunkData.primaryKey(); + index++; + } + } + } + + /** + * Method used to force transition to SRM_RELEASED from SRM_FILE_PINNED, of all PtG Requests whose + * pinLifetime has expired and the state still has not been changed (a user forgot to run + * srmReleaseFiles)! + */ + public synchronized void transitExpiredSRM_FILE_PINNED() { + + List expiredSurls = dao.transitExpiredSRM_FILE_PINNED(); + } + + public void updateStatus( + TRequestToken requestToken, TSURL surl, TStatusCode statusCode, String explanation) { + + dao.updateStatus( + requestToken, + new int[] {surl.uniqueId()}, + new String[] {surl.rawSurl()}, + statusCode, + explanation); + } + + public void updateFromPreviousStatus( + TSURL surl, TStatusCode expectedStatusCode, TStatusCode newStatusCode, String explanation) { + + dao.updateStatusOnMatchingStatus( + new int[] {surl.uniqueId()}, + new String[] {surl.rawSurl()}, + expectedStatusCode, + newStatusCode, + explanation); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, newStatusCode, explanation); + } + + public void updateFromPreviousStatus( + TRequestToken requestToken, + List surlList, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) { + + int[] surlsUniqueIDs = new int[surlList.size()]; + String[] surls = new String[surlList.size()]; + int index = 0; + for (TSURL tsurl : surlList) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + dao.updateStatusOnMatchingStatus( + requestToken, surlsUniqueIDs, surls, expectedStatusCode, newStatusCode); + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtGChunkDAO.java b/src/main/java/it/grid/storm/catalogs/PtGChunkDAO.java index 78a837bf..7d7103f4 100644 --- a/src/main/java/it/grid/storm/catalogs/PtGChunkDAO.java +++ b/src/main/java/it/grid/storm/catalogs/PtGChunkDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -17,10 +16,6 @@ import it.grid.storm.srm.types.TRequestType; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -38,1728 +33,1844 @@ import java.util.Map.Entry; import java.util.Timer; import java.util.TimerTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * DAO class for PtGChunkCatalog. This DAO is specifically designed to connect - * to a MySQL DB. The raw data found in those tables is pre-treated in order to - * turn it into the Object Model of StoRM. See Method comments for further info. - * - * BEWARE! DAO Adjusts for extra fields in the DB that are not present in the - * object model. - * + * DAO class for PtGChunkCatalog. This DAO is specifically designed to connect to a MySQL DB. The + * raw data found in those tables is pre-treated in order to turn it into the Object Model of StoRM. + * See Method comments for further info. + * + *

BEWARE! DAO Adjusts for extra fields in the DB that are not present in the object model. + * * @author EGRID ICTP * @version 3.0 * @date June 2005 */ public class PtGChunkDAO { - private static final Logger log = LoggerFactory.getLogger(PtGChunkDAO.class); - - /** String with the name of the class for the DB driver */ - private final String driver = Configuration.getInstance().getDBDriver(); - /** String referring to the URL of the DB */ - private final String url = Configuration.getInstance().getStormDbURL(); - /** String with the password for the DB */ - private final String password = Configuration.getInstance().getDBPassword(); - /** String with the name for the DB */ - private final String name = Configuration.getInstance().getDBUserName(); - - /** Connection to DB - WARNING!!! It is kept open all the time! */ - private Connection con = null; - /** boolean that tells whether reconnection is needed because of MySQL bug! */ - private boolean reconnect = false; - - /** Singleton instance */ - private final static PtGChunkDAO dao = new PtGChunkDAO(); - - /** timer thread that will run a task to alert when reconnecting is necessary! */ - private Timer clock = null; - /** - * timer task that will update the boolean signaling that a reconnection is - * needed! - */ - private TimerTask clockTask = null; - /** milliseconds that must pass before reconnecting to DB */ - private final long period = Configuration.getInstance() - .getDBReconnectPeriod() * 1000; - /** initial delay in milliseconds before starting timer */ - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - - private PtGChunkDAO() { - - setUpConnection(); - - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of the PtGChunkDAO. - */ - public static PtGChunkDAO getInstance() { - - return dao; - } - - /** - * Method used to add a new record to the DB: the supplied PtGChunkDataTO gets - * its primaryKey changed to the one assigned by the DB. - * - * The supplied PtGChunkData is used to fill in only the DB table where file - * specific info gets recorded: it does _not_ add a new request! So if - * spurious data is supplied, it will just stay there because of a lack of a - * parent request! - */ - public synchronized void addChild(PtGChunkDataTO to) { - - if (!checkConnection()) { - log.error("PTG CHUNK DAO: addChild - unable to get a valid connection!"); - return; - } - String str = null; - PreparedStatement id = null; // statement to find out the ID associated to - // the request token - ResultSet rsid = null; // result set containing the ID of the request. - try { - - // WARNING!!!! We are forced to run a query to get the ID of the request, - // which should NOT be so - // because the corresponding request object should have been changed with - // the extra field! However, it is not possible - // at the moment to perform such chage because of strict deadline and the - // change could wreak havoc - // the code. So we are forced to make this query!!! - - // begin transaction - con.setAutoCommit(false); - printWarnings(con.getWarnings()); - - // find ID of request corresponding to given RequestToken - str = "SELECT rq.ID FROM request_queue rq WHERE rq.r_token=?"; - - id = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - id.setString(1, to.requestToken()); - printWarnings(id.getWarnings()); - - log.debug("PTG CHUNK DAO: addChild; {}", id.toString()); - rsid = id.executeQuery(); - printWarnings(id.getWarnings()); - - /* ID of request in request_process! */ - int request_id = extractID(rsid); - int id_s = fillPtGTables(to, request_id); - - /* end transaction! */ - con.commit(); - printWarnings(con.getWarnings()); - con.setAutoCommit(true); - printWarnings(con.getWarnings()); - - // update primary key reading the generated key - to.setPrimaryKey(id_s); - } catch (SQLException e) { - log.error("PTG CHUNK DAO: unable to complete addChild! " - + "PtGChunkDataTO: {}; error: {}", to, e.getMessage(), e); - rollback(con); - } catch (Exception e) { - log.error("PTG CHUNK DAO: unable to complete addChild! " - + "PtGChunkDataTO: {}; error: {}", to, e.getMessage(), e); - rollback(con); - } finally { - close(rsid); - close(id); - } - } - - /** - * Method used to add a new record to the DB: the supplied PtGChunkDataTO gets - * its primaryKey changed to the one assigned by the DB. The client_dn must - * also be supplied as a String. - * - * The supplied PtGChunkData is used to fill in all the DB tables where file - * specific info gets recorded: it _adds_ a new request! - */ - public synchronized void addNew(PtGChunkDataTO to, String client_dn) { - - if (!checkConnection()) { - log.error("PTG CHUNK DAO: addNew - unable to get a valid connection!"); - return; - } - String str = null; - /* Result set containing the ID of the inserted new request */ - ResultSet rs_new = null; - /* Insert new request into process_request */ - PreparedStatement addNew = null; - /* Insert protocols for request. */ - PreparedStatement addProtocols = null; - try { - // begin transaction - con.setAutoCommit(false); - printWarnings(con.getWarnings()); - - // add to request_queue... - str = "INSERT INTO request_queue (config_RequestTypeID,client_dn,pinLifetime,status,errstring,r_token,nbreqfiles,timeStamp) VALUES (?,?,?,?,?,?,?,?)"; - addNew = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - printWarnings(con.getWarnings()); - /* Request type set to prepare to get! */ - addNew.setString(1, - RequestTypeConverter.getInstance().toDB(TRequestType.PREPARE_TO_GET)); - printWarnings(addNew.getWarnings()); - - addNew.setString(2, client_dn); - printWarnings(addNew.getWarnings()); - - addNew.setInt(3, to.lifeTime()); - printWarnings(addNew.getWarnings()); - - addNew.setInt( - 4, - StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_INPROGRESS)); - printWarnings(addNew.getWarnings()); - - addNew.setString(5, "New PtG Request resulting from srmCopy invocation."); - printWarnings(addNew.getWarnings()); - - addNew.setString(6, to.requestToken()); - printWarnings(addNew.getWarnings()); - - addNew.setInt(7, 1); // number of requested files set to 1! - printWarnings(addNew.getWarnings()); - - addNew.setTimestamp(8, new Timestamp(new Date().getTime())); - printWarnings(addNew.getWarnings()); - - log.trace("PTG CHUNK DAO: addNew; {}", addNew.toString()); - addNew.execute(); - printWarnings(addNew.getWarnings()); - - rs_new = addNew.getGeneratedKeys(); - int id_new = extractID(rs_new); - - // add protocols... - str = "INSERT INTO request_TransferProtocols (request_queueID,config_ProtocolsID) VALUES (?,?)"; - addProtocols = con.prepareStatement(str); - printWarnings(con.getWarnings()); - for (Iterator i = to.protocolList().iterator(); i.hasNext();) { - addProtocols.setInt(1, id_new); - printWarnings(addProtocols.getWarnings()); - - addProtocols.setString(2, i.next()); - printWarnings(addProtocols.getWarnings()); - - log.trace("PTG CHUNK DAO: addNew; {}", addProtocols.toString()); - addProtocols.execute(); - printWarnings(addProtocols.getWarnings()); - } - - // addChild... - int id_s = fillPtGTables(to, id_new); - - // end transaction! - con.commit(); - printWarnings(con.getWarnings()); - con.setAutoCommit(true); - printWarnings(con.getWarnings()); - - // update primary key reading the generated key - to.setPrimaryKey(id_s); - } catch (SQLException e) { - log.error("PTG CHUNK DAO: Rolling back! Unable to complete addNew! " - + "PtGChunkDataTO: {}; error: {}", to, e.getMessage(), e); - rollback(con); - } catch (Exception e) { - log.error("PTG CHUNK DAO: unable to complete addNew! " - + "PtGChunkDataTO: {}; error: {}", to, e.getMessage(), e); - rollback(con); - } finally { - close(rs_new); - close(addNew); - close(addProtocols); - } - } - - /** - * To be used inside a transaction - * - * @param to - * @param requestQueueID - * @return - * @throws SQLException - * @throws Exception - */ - private synchronized int fillPtGTables(PtGChunkDataTO to, int requestQueueID) - throws SQLException, Exception { - - String str = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_do = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_g = null; - /* Result set containing the ID of the inserted */ - ResultSet rs_s = null; - /* insert TDirOption for request */ - PreparedStatement addDirOption = null; - /* insert request_Get for request */ - PreparedStatement addGet = null; - PreparedStatement addChild = null; - - try { - // first fill in TDirOption - str = "INSERT INTO request_DirOption (isSourceADirectory,allLevelRecursive,numOfLevels) VALUES (?,?,?)"; - addDirOption = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - printWarnings(con.getWarnings()); - addDirOption.setBoolean(1, to.dirOption()); - printWarnings(addDirOption.getWarnings()); - - addDirOption.setBoolean(2, to.allLevelRecursive()); - printWarnings(addDirOption.getWarnings()); - - addDirOption.setInt(3, to.numLevel()); - printWarnings(addDirOption.getWarnings()); - - log.trace("PTG CHUNK DAO: addNew; {}", addDirOption.toString()); - addDirOption.execute(); - printWarnings(addDirOption.getWarnings()); - - rs_do = addDirOption.getGeneratedKeys(); - int id_do = extractID(rs_do); - - // second fill in request_Get... sourceSURL and TDirOption! - str = "INSERT INTO request_Get (request_DirOptionID,request_queueID,sourceSURL,normalized_sourceSURL_StFN,sourceSURL_uniqueID) VALUES (?,?,?,?,?)"; - addGet = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - printWarnings(con.getWarnings()); - addGet.setInt(1, id_do); - printWarnings(addGet.getWarnings()); - - addGet.setInt(2, requestQueueID); - printWarnings(addGet.getWarnings()); - - addGet.setString(3, to.fromSURL()); - printWarnings(addGet.getWarnings()); - - addGet.setString(4, to.normalizedStFN()); - printWarnings(addGet.getWarnings()); - - addGet.setInt(5, to.surlUniqueID()); - printWarnings(addGet.getWarnings()); - - log.trace("PTG CHUNK DAO: addNew; {}", addGet.toString()); - addGet.execute(); - printWarnings(addGet.getWarnings()); - - rs_g = addGet.getGeneratedKeys(); - int id_g = extractID(rs_g); - - // third fill in status_Get... - str = "INSERT INTO status_Get (request_GetID,statusCode,explanation) VALUES (?,?,?)"; - addChild = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); - printWarnings(con.getWarnings()); - addChild.setInt(1, id_g); - printWarnings(addChild.getWarnings()); - - addChild.setInt(2, to.status()); - printWarnings(addChild.getWarnings()); - - addChild.setString(3, to.errString()); - printWarnings(addChild.getWarnings()); - - log.trace("PTG CHUNK DAO: addNew; {}", addChild.toString()); - addChild.execute(); - printWarnings(addChild.getWarnings()); - - return id_g; - } finally { - close(rs_do); - close(rs_g); - close(rs_s); - close(addDirOption); - close(addGet); - close(addChild); - } - } - - /** - * Method used to save the changes made to a retrieved PtGChunkDataTO, back - * into the MySQL DB. - * - * Only the fileSize, transferURL, statusCode and explanation, of status_Get - * table are written to the DB. Likewise for the request pinLifetime. - * - * In case of any error, an error message gets logged but no exception is - * thrown. - */ - public synchronized void update(PtGChunkDataTO to) { - - if (!checkConnection()) { - log.error("PTG CHUNK DAO: update - unable to get a valid connection!"); - return; - } - PreparedStatement updateFileReq = null; - try { - // ready updateFileReq... - updateFileReq = con - .prepareStatement("UPDATE request_queue rq JOIN (status_Get sg, request_Get rg) ON (rq.ID=rg.request_queueID AND sg.request_GetID=rg.ID) " - + "SET sg.fileSize=?, sg.transferURL=?, sg.statusCode=?, sg.explanation=?, rq.pinLifetime=?, rg.normalized_sourceSURL_StFN=?, rg.sourceSURL_uniqueID=? " - + "WHERE rg.ID=?"); - printWarnings(con.getWarnings()); - - updateFileReq.setLong(1, to.fileSize()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(2, to.turl()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(3, to.status()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(4, to.errString()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(5, to.lifeTime()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setString(6, to.normalizedStFN()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setInt(7, to.surlUniqueID()); - printWarnings(updateFileReq.getWarnings()); - - updateFileReq.setLong(8, to.primaryKey()); - printWarnings(updateFileReq.getWarnings()); - // execute update - log.trace("PTG CHUNK DAO: update method; {}", updateFileReq.toString()); - updateFileReq.executeUpdate(); - printWarnings(updateFileReq.getWarnings()); - } catch (SQLException e) { - log.error("PtG CHUNK DAO: Unable to complete update! {}", - e.getMessage(), e); - } finally { - close(updateFileReq); - } - } - - /** - * Updates the request_Get represented by the received ReducedPtGChunkDataTO - * by setting its normalized_sourceSURL_StFN and sourceSURL_uniqueID - * - * @param chunkTO - */ - public synchronized void updateIncomplete(ReducedPtGChunkDataTO chunkTO) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: updateIncomplete - unable to get a valid connection!"); - return; - } - String str = "UPDATE request_Get rg SET rg.normalized_sourceSURL_StFN=?, rg.sourceSURL_uniqueID=? " - + "WHERE rg.ID=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - stmt.setString(1, chunkTO.normalizedStFN()); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, chunkTO.surlUniqueID()); - printWarnings(stmt.getWarnings()); - - stmt.setLong(3, chunkTO.primaryKey()); - printWarnings(stmt.getWarnings()); - - log.trace("PtG CHUNK DAO - update incomplete: {}", stmt.toString()); - stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - } catch (SQLException e) { - log.error("PtG CHUNK DAO: Unable to complete update incomplete! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * TODO WARNING! THIS IS A WORK IN PROGRESS!!! - * - * Method used to refresh the PtGChunkDataTO information from the MySQL DB. - * - * In this first version, only the statusCode and the TURL are reloaded from - * the DB. TODO The next version must contains all the information related to - * the Chunk! - * - * In case of any error, an error messagge gets logged but no exception is - * thrown. - */ - - public synchronized PtGChunkDataTO refresh(long primary_key) { - - if (!checkConnection()) { - log.error("PTG CHUNK DAO: refresh - unable to get a valid connection!"); - return null; - } - String queryString = null; - PreparedStatement find = null; - ResultSet rs = null; - - try { - // get chunks of the request - queryString = "SELECT sg.statusCode, sg.transferURL " - + "FROM status_Get sg " + "WHERE sg.request_GetID=?"; - find = con.prepareStatement(queryString); - printWarnings(con.getWarnings()); - find.setLong(1, primary_key); - printWarnings(find.getWarnings()); - log.trace("PTG CHUNK DAO: refresh status method; {}", find.toString()); - - rs = find.executeQuery(); - - printWarnings(find.getWarnings()); - PtGChunkDataTO chunkDataTO = null; - // The result shoul be un - while (rs.next()) { - chunkDataTO = new PtGChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sg.statusCode")); - chunkDataTO.setTurl(rs.getString("sg.transferURL")); - } - return chunkDataTO; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: {}", e.getMessage(), e); - /* Return null TransferObject! */ - return null; - } finally { - close(rs); - close(find); - } - } - - /** - * Method that queries the MySQL DB to find all entries matching the supplied - * TRequestToken. The Collection contains the corresponding PtGChunkDataTO - * objects. - * - * An initial simple query establishes the list of protocols associated with - * the request. A second complex query establishes all chunks associated with - * the request, by properly joining request_queue, request_Get, status_Get and - * request_DirOption. The considered fields are: - * - * (1) From status_Get: the ID field which becomes the TOs primary key, and - * statusCode. - * - * (2) From request_Get: sourceSURL - * - * (3) From request_queue: pinLifetime - * - * (4) From request_DirOption: isSourceADirectory, alLevelRecursive, - * numOfLevels - * - * In case of any error, a log gets written and an empty collection is - * returned. No exception is thrown. - * - * NOTE! Chunks in SRM_ABORTED status are NOT returned! - */ - public synchronized Collection find(TRequestToken requestToken) { - - if (!checkConnection()) { - log.error("PTG CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - String strToken = requestToken.toString(); - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - str = "SELECT tp.config_ProtocolsID " - + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " - + "WHERE rq.r_token=?"; - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List protocols = new ArrayList(); - find.setString(1, strToken); - printWarnings(find.getWarnings()); - - log.trace("PTG CHUNK DAO: find method; {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - while (rs.next()) { - protocols.add(rs.getString("tp.config_ProtocolsID")); - } - close(rs); - close(find); - - // get chunks of the request - str = "SELECT sg.statusCode, rq.pinLifetime, rg.ID, rq.timeStamp, rq.client_dn, rq.proxy, rg.sourceSURL, " - + "rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID, d.isSourceADirectory, " - + "d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "LEFT JOIN request_DirOption d ON rg.request_DirOptionID=d.ID " - + "WHERE rq.r_token=? AND sg.statusCode<>?"; - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - ArrayList list = new ArrayList(); - find.setString(1, strToken); - printWarnings(find.getWarnings()); - - find.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - printWarnings(find.getWarnings()); - - log.trace("PTG CHUNK DAO: find method; " + find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - PtGChunkDataTO chunkDataTO; - while (rs.next()) { - chunkDataTO = new PtGChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sg.statusCode")); - chunkDataTO.setRequestToken(strToken); - chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); - chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rg.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setClientDN(rs.getString("rq.client_dn")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separeted by the "#" char. The proxy is a BLOB, hence it has to be - * properly conveted in string. - */ - java.sql.Blob blob = rs.getBlob("rq.proxy"); - if (!rs.wasNull() && blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - chunkDataTO.setVomsAttributes(new String(bdata)); - } - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); - chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); - chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); - chunkDataTO.setProtocolList(protocols); - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: ", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedPtGChunkDataTO associated to the - * given TRequestToken expressed as String. - */ - public synchronized Collection findReduced( - String reqtoken) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - // get reduced chunks - String str = "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE rq.r_token=?"; - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, reqtoken); - printWarnings(find.getWarnings()); - - log.trace("PtG CHUNK DAO! findReduced with request token; {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - ReducedPtGChunkDataTO reducedChunkDataTO = null; - while (rs.next()) { - reducedChunkDataTO = new ReducedPtGChunkDataTO(); - reducedChunkDataTO.setStatus(rs.getInt("sg.statusCode")); - reducedChunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); - reducedChunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); - reducedChunkDataTO.setNormalizedStFN(rs - .getString("rg.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - reducedChunkDataTO.setSurlUniqueID(uniqueID); - } - - list.add(reducedChunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: {}", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - public synchronized Collection findReduced( - TRequestToken requestToken, int[] surlsUniqueIDs, String[] surlsArray) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - - try { - - String str = "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE rq.r_token=? AND ( rg.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) - + " AND rg.sourceSURL IN " - + makeSurlString(surlsArray) + " ) "; - - find = con.prepareStatement(str); - - printWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, requestToken.getValue()); - printWarnings(find.getWarnings()); - - log.trace("PtG CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - ReducedPtGChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new ReducedPtGChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sg.statusCode")); - chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); - chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rg.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(uniqueID); - } - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: {}", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedPtGChunkDataTO associated to the - * given griduser, and whose SURLs are contained in the supplied array of - * Strings. - */ - public synchronized Collection findReduced( - String griduser, int[] surlUniqueIDs, String[] surls) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - /* - * NOTE: we search also on the fromSurl because otherwise we lost all - * request_get that have not the uniqueID set because are not yet been - * used by anybody - */ - // get reduced chunks - String str = "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE rq.client_dn=? AND ( rg.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlUniqueIDs) - + " AND rg.sourceSURL IN " - + makeSurlString(surls) + " ) "; - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - ArrayList list = new ArrayList(); - find.setString(1, griduser); - printWarnings(find.getWarnings()); - - log.trace("PtG CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - ReducedPtGChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new ReducedPtGChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sg.statusCode")); - chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); - chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rg.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(uniqueID); - } - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: {}", e.getMessage(), e); - /* Return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method used in extraordinary situations to signal that data retrieved from - * the DB was malformed and could not be translated into the StoRM object - * model. - * - * This method attempts to change the status of the request to SRM_FAILURE and - * record it in the DB. - * - * This operation could potentially fail because the source of the malformed - * problems could be a problematic DB; indeed, initially only log messagges - * where recorded. - * - * Yet it soon became clear that the source of malformed data were the clients - * and/or FE recording info in the DB. In these circumstances the client would - * see its request as being in the SRM_IN_PROGRESS state for ever. Hence the - * pressing need to inform it of the encountered problems. - */ - public synchronized void signalMalformedPtGChunk(PtGChunkDataTO auxTO) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: signalMalformedPtGChunk - unable to get a valid connection!"); - return; - } - String signalSQL = "UPDATE status_Get SET statusCode=" - + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) - + ", explanation=? WHERE request_GetID=" + auxTO.primaryKey(); - PreparedStatement signal = null; - try { - signal = con.prepareStatement(signalSQL); - printWarnings(con.getWarnings()); - /* Prepared statement spares DB-specific String notation! */ - signal.setString(1, "Request is malformed!"); - printWarnings(signal.getWarnings()); - - log.trace("PTG CHUNK DAO: signalMalformed; {}", signal.toString()); - signal.executeUpdate(); - printWarnings(signal.getWarnings()); - } catch (SQLException e) { - log.error("PtGChunkDAO! Unable to signal in DB that the request was " - + "malformed! Request: {}; Exception: {}", auxTO.toString(), e.toString()); - } finally { - close(signal); - } - } - - /** - * Method that returns the number of Get requests on the given SURL, that are - * in SRM_FILE_PINNED state. - * - * This method is intended to be used by PtGChunkCatalog in the - * isSRM_FILE_PINNED method invocation. - * - * In case of any error, 0 is returned. - */ - // request_Get table - public synchronized int numberInSRM_FILE_PINNED(int surlUniqueID) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: numberInSRM_FILE_PINNED - unable to get a valid connection!"); - return 0; - } - String str = "SELECT COUNT(rg.ID) " - + "FROM status_Get sg JOIN request_Get rg " - + "ON (sg.request_GetID=rg.ID) " - + "WHERE rg.sourceSURL_uniqueID=? AND sg.statusCode=?"; - PreparedStatement find = null; - ResultSet rs = null; - try { - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - /* Prepared statement spares DB-specific String notation! */ - find.setInt(1, surlUniqueID); - printWarnings(find.getWarnings()); - - find.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - printWarnings(find.getWarnings()); - - log.trace("PtG CHUNK DAO - numberInSRM_FILE_PINNED method: {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - int numberFilePinned = 0; - if (rs.next()) { - numberFilePinned = rs.getInt(1); - } - return numberFilePinned; - } catch (SQLException e) { - log.error("PtG CHUNK DAO! Unable to determine numberInSRM_FILE_PINNED! " - + "Returning 0! {}", e.getMessage(), e); - return 0; - } finally { - close(rs); - close(find); - } - } - - /** - * Method that updates all expired requests in SRM_FILE_PINNED state, into - * SRM_RELEASED. - * - * This is needed when the client forgets to invoke srmReleaseFiles(). - * - * @return - */ - public synchronized List transitExpiredSRM_FILE_PINNED() { - - // tring to the surl unique ID - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: transitExpiredSRM_FILE_PINNED - unable to get a valid connection!"); - return new ArrayList(); - } - HashMap expiredSurlMap = new HashMap(); - String str = null; - // Statement statement = null; - PreparedStatement preparedStatement = null; - - /* Find all expired surls */ - try { - // start transaction - con.setAutoCommit(false); - - str = "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID " - + "FROM request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "WHERE sg.statusCode=?" - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - preparedStatement = con.prepareStatement(str); - preparedStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - - ResultSet res = preparedStatement.executeQuery(); - printWarnings(preparedStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rg.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("PtGChunkDAO! unable to build the TSURL from {}: " - + "InvalidTSURLAttributesException {}", sourceSURL, e.getMessage(), e); - } - } - expiredSurlMap.put(sourceSURL, uniqueID); - } - - if (expiredSurlMap.isEmpty()) { - commit(con); - log - .trace("PtGChunkDAO! No chunk of PtG request was transited from SRM_FILE_PINNED to SRM_RELEASED."); - return new ArrayList(); - } - } catch (SQLException e) { - log.error("PtGChunkDAO! SQLException. {}", e.getMessage(), e); - rollback(con); - return new ArrayList(); - } finally { - close(preparedStatement); - } - - /* Update status of all expired surls to SRM_RELEASED */ - - preparedStatement = null; - try { - - str = "UPDATE " - + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=? " - + "WHERE sg.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - preparedStatement = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - preparedStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - printWarnings(preparedStatement.getWarnings()); - - preparedStatement.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - printWarnings(preparedStatement.getWarnings()); - - log.trace("PtG CHUNK DAO - transitExpiredSRM_FILE_PINNED method: {}", - preparedStatement.toString()); - - int count = preparedStatement.executeUpdate(); - printWarnings(preparedStatement.getWarnings()); - - if (count == 0) { - log.trace("PtGChunkDAO! No chunk of PtG request was " - + "transited from SRM_FILE_PINNED to SRM_RELEASED."); - } else { - log.info("PtGChunkDAO! {} chunks of PtG requests were transited from" - + " SRM_FILE_PINNED to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("PtGChunkDAO! Unable to transit expired SRM_FILE_PINNED chunks " - + "of PtG requests, to SRM_RELEASED! {}", e.getMessage(), e); - rollback(con); - return new ArrayList(); - } finally { - close(preparedStatement); - } - - /* - * in order to enhance performance here we can check if there is any file - * system with tape (T1D0, T1D1), if there is not any we can skip the - * following - */ - - /* Find all not expired surls from PtG and BoL */ - - HashSet pinnedSurlSet = new HashSet(); - try { - - // SURLs pinned by PtGs - str = "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID FROM " - + "request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "WHERE sg.statusCode=?" - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; - - preparedStatement = con.prepareStatement(str); - preparedStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - - ResultSet res = preparedStatement.executeQuery(); - printWarnings(preparedStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rg.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("PtGChunkDAO! unable to build the TSURL from {}. " - + "InvalidTSURLAttributesException: {}", sourceSURL, e.getMessage()); - } - } - pinnedSurlSet.add(uniqueID); - } - - close(preparedStatement); - - // SURLs pinned by BoLs - str = "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " - + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " - + "WHERE sb.statusCode=?" - + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; - - preparedStatement = con.prepareStatement(str); - preparedStatement.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); - - res = preparedStatement.executeQuery(); - printWarnings(preparedStatement.getWarnings()); - - while (res.next()) { - String sourceSURL = res.getString("rb.sourceSURL"); - Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); - /* If the uniqueID is not setted compute it */ - if (res.wasNull()) { - try { - TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); - uniqueID = tsurl.uniqueId(); - } catch (InvalidTSURLAttributesException e) { - log.warn("PtGChunkDAO! unable to build the TSURL from {}. " - + "InvalidTSURLAttributesException: {}", sourceSURL, e.getMessage(), e); - } - } - pinnedSurlSet.add(uniqueID); - } - commit(con); - } catch (SQLException e) { - log.error("PtGChunkDAO! SQLException. {}", e.getMessage(), e); - rollback(con); - } finally { - close(preparedStatement); - } - - ArrayList expiredSurlList = new ArrayList(); - /* Remove the Extended Attribute pinned if there is not a valid surl on it */ - TSURL surl; - for (Entry surlEntry : expiredSurlMap.entrySet()) { - if (!pinnedSurlSet.contains(surlEntry.getValue())) { - try { - surl = TSURL.makeFromStringValidate(surlEntry.getKey()); - } catch (InvalidTSURLAttributesException e) { - log.error("Invalid SURL, cannot release the pin " - + "(Extended Attribute): {}", surlEntry.getKey()); - continue; - } - expiredSurlList.add(surl); - StoRI stori; - try { - stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); - } catch (Throwable e) { - log.error("Invalid SURL {} cannot release the pin. {}: {}", - surlEntry.getKey(), e.getClass().getCanonicalName(), e.getMessage(), e); - continue; - } - - if (stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { - StormEA.removePinned(stori.getAbsolutePath()); - } - } - } - return expiredSurlList; - } - - /** - * Method that updates all chunks in SRM_FILE_PINNED state, into SRM_RELEASED. - * An array of long representing the primary key of each chunk is required: - * only they get the status changed provided their current status is - * SRM_FILE_PINNED. - * - * This method is used during srmReleaseFiles - * - * In case of any error nothing happens and no exception is thrown, but proper - * messagges get logged. - */ - public synchronized void transitSRM_FILE_PINNEDtoSRM_RELEASED(long[] ids) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: transitSRM_FILE_PINNEDtoSRM_RELEASED - unable to get a valid connection!"); - return; - } - String str = "UPDATE status_Get sg SET sg.statusCode=? " - + "WHERE sg.statusCode=? AND sg.request_GetID IN " + makeWhereString(ids); - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - printWarnings(stmt.getWarnings()); - - log.trace("PtG CHUNK DAO - transitSRM_FILE_PINNEDtoSRM_RELEASED: {}", - stmt.toString()); - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PtG CHUNK DAO! No chunk of PtG request was " - + "transited from SRM_FILE_PINNED to SRM_RELEASED."); - } else { - log.info("PtG CHUNK DAO! {} chunks of PtG requests were transited " - + "from SRM_FILE_PINNED to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("PtG CHUNK DAO! Unable to transit chunks" - + " from SRM_FILE_PINNED to SRM_RELEASED! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * @param ids - * @param token - */ - public synchronized void transitSRM_FILE_PINNEDtoSRM_RELEASED(long[] ids, - TRequestToken token) { - - if (token == null) { - transitSRM_FILE_PINNEDtoSRM_RELEASED(ids); - return; - } - - /* - * If a request token has been specified, only the related Get requests - * have to be released. This is done adding the r.r_token="..." clause in - * the where subquery. - */ - if (!checkConnection()) { - log.error("PTG CHUNK DAO: transitSRM_FILE_PINNEDtoSRM_RELEASED - " - + "unable to get a valid connection!"); - return; - } - - String str = "UPDATE " - + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=? " + "WHERE sg.statusCode=? AND rq.r_token='" - + token.toString() + "' AND rg.ID IN " + makeWhereString(ids); - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1,StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2,StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); - printWarnings(stmt.getWarnings()); - - log.trace("PtG CHUNK DAO - transitSRM_FILE_PINNEDtoSRM_RELEASED: {}", stmt.toString()); - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PtG CHUNK DAO! No chunk of PtG request was" - + " transited from SRM_FILE_PINNED to SRM_RELEASED."); - } else { - log.info("PtG CHUNK DAO! {} chunks of PtG requests were transited from " - + "SRM_FILE_PINNED to SRM_RELEASED.", count); - } - } catch (SQLException e) { - log.error("PtG CHUNK DAO! Unable to transit chunks from " - + "SRM_FILE_PINNED to SRM_RELEASED! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - public synchronized void updateStatus(TRequestToken requestToken, - int[] surlUniqueIDs, String[] surls, TStatusCode statusCode, - String explanation) { - - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: updateStatus - unable to get a valid connection!"); - return; - } - String str = "UPDATE " - + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=? , sg.explanation=? " + "WHERE rq.r_token='" - + requestToken.toString() + "' AND ( rg.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlUniqueIDs) + " AND rg.sourceSURL IN " - + makeSurlString(surls) + " ) "; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1, StatusCodeConverter.getInstance().toDB(statusCode)); - printWarnings(stmt.getWarnings()); - - stmt.setString(2, (explanation != null ? explanation : "")); - printWarnings(stmt.getWarnings()); - - log.trace("PtG CHUNK DAO - updateStatus: {}", stmt.toString()); - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PtG CHUNK DAO! No chunk of PtG request was updated to {}.", - statusCode); - } else { - log.info("PtG CHUNK DAO! {} chunks of PtG requests were updated to {}.", - count, statusCode); - } - } catch (SQLException e) { - log.error("PtG CHUNK DAO! Unable to updated to {}! {}", statusCode, - e.getMessage(), e); - } finally { - close(stmt); - } - } - - public synchronized void updateStatusOnMatchingStatus(int[] surlsUniqueIDs, - String[] surls, TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surls == null || explanation == null - || surlsUniqueIDs.length == 0 || surls.length == 0 - || surlsUniqueIDs.length != surls.length) { - - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs + " surls=" - + surls + " explanation=" + explanation); - } - - doUpdateStatusOnMatchingStatus(null, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, explanation, false, true, true); - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || explanation == null) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken + " explanation=" - + explanation); - } - doUpdateStatusOnMatchingStatus(requestToken, null, null, - expectedStatusCode, newStatusCode, explanation, true, false, true); - } - - public synchronized void updateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlsUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode) - throws IllegalArgumentException { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || surlsUniqueIDs == null || surls == null || surlsUniqueIDs.length == 0 - || surls.length == 0 || surlsUniqueIDs.length != surls.length) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken - + "surlsUniqueIDs=" + surlsUniqueIDs + " surls=" + surls); - } - doUpdateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, null, true, true, false); - } - - public synchronized void doUpdateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation, boolean withRequestToken, boolean withSurls, - boolean withExplanation) throws IllegalArgumentException { - - if ((withRequestToken && requestToken == null) - || (withExplanation && explanation == null) - || (withSurls && (surlUniqueIDs == null || surls == null))) { - - throw new IllegalArgumentException( - "Unable to perform the doUpdateStatusOnMatchingStatus, " - + "invalid arguments: withRequestToken=" + withRequestToken - + " requestToken=" + requestToken + " withSurls=" + withSurls - + " surlUniqueIDs=" + surlUniqueIDs + " surls=" + surls - + " withExplaination=" + withExplanation + " explanation=" - + explanation); - } - if (!checkConnection()) { - log - .error("PTG CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); - return; - } - String str = "UPDATE status_Get sg JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=? "; - if (withExplanation) { - str += " , " + buildExpainationSet(explanation); - } - str += " WHERE sg.statusCode=? "; - if (withRequestToken) { - str += " AND " + buildTokenWhereClause(requestToken); - } - if (withSurls) { - str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); - } - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); - printWarnings(stmt.getWarnings()); - - stmt - .setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); - printWarnings(stmt.getWarnings()); - - log.trace("PtG CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PtG CHUNK DAO! No chunk of PtG request was updated " - + "from {} to {}.", expectedStatusCode, newStatusCode); - } else { - log.debug("PtG CHUNK DAO! {} chunks of PtG requests were updated " - + "from {} to {}.", count, expectedStatusCode, newStatusCode); - } - } catch (SQLException e) { - log.error("PtG CHUNK DAO! Unable to updated from {} to {}! {}", - expectedStatusCode, newStatusCode, e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Auxiliary method used to close a ResultSet - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("PTG CHUNK DAO! Unable to close ResultSet! Error: {}", - e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to close a Statement - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("PTG CHUNK DAO! Unable to close Statement {} - Error: {}", - stmt.toString(), e.getMessage(), e); - } - } - } - - private void commit(Connection con) { - - if (con != null) { - try { - con.commit(); - con.setAutoCommit(true); - } catch (SQLException e) { - log.error("PtG, SQL Exception: {}", e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to roll back a failed transaction - */ - private void rollback(Connection con) { - - if (con != null) { - try { - con.rollback(); - con.setAutoCommit(true); - log.error("PTG CHUNK DAO: roll back successful!"); - } catch (SQLException e2) { - log.error("PTG CHUNK DAO: roll back failed! {}", e2.getMessage(), e2); - } - } - } - - /** - * Private method that returns the generated ID: it throws an exception in - * case of any problem! - */ - private int extractID(ResultSet rs) throws Exception { - - if (rs == null) { - throw new Exception("PTG CHUNK DAO! Null ResultSet!"); - } - if (rs.next()) { - return rs.getInt(1); - } else { - log.error("PTG CHUNK DAO! It was not possible to establish " - + "the assigned autoincrement primary key!"); - throw new Exception("PTG CHUNK DAO! It was not possible to" - + " establish the assigned autoincrement primary key!"); - } - } - - /** - * Method that returns a String containing all IDs. - */ - private String makeWhereString(long[] rowids) { - - StringBuilder sb = new StringBuilder("("); - int n = rowids.length; - for (int i = 0; i < n; i++) { - sb.append(rowids[i]); - if (i < (n - 1)) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surl's IDs. - */ - private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { - - StringBuilder sb = new StringBuilder("("); - for (int i = 0; i < surlUniqueIDs.length; i++) { - if (i > 0) { - sb.append(","); - } - sb.append(surlUniqueIDs[i]); - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surls. - */ - private String makeSurlString(String[] surls) { - - StringBuilder sb = new StringBuilder("("); - int n = surls.length; - - for (int i = 0; i < n; i++) { - - SURL requestedSURL; - - try { - requestedSURL = SURL.makeSURLfromString(surls[i]); - } catch (NamespaceException e) { - log.error(e.getMessage()); - log.debug("Skip '{}' during query creation", surls[i]); - continue; - } - - sb.append("'"); - sb.append(requestedSURL.getNormalFormAsString()); - sb.append("','"); - sb.append(requestedSURL.getQueryFormAsString()); - sb.append("'"); - - if (i < (n - 1)) { - sb.append(","); - } - } - - sb.append(")"); - return sb.toString(); - } - - /** - * Auxiliary method that sets up the connection to the DB, as well as the - * prepared statement. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - printWarnings(con.getWarnings()); - response = con.isValid(0); - } catch (ClassNotFoundException | SQLException e) { - log.error("PTG CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private boolean checkConnection() { - - boolean response = true; - if (reconnect) { - log.debug("PTG CHUNK DAO! Reconnecting to DB! "); - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method that tales down a connection to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - log.error("PTG CHUNK DAO! Exception in takeDownConnection method: {}", - e.getMessage(), e); - } - } - } - - public Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0 || dn == null) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " dn=" + dn); - } - return find(surlsUniqueIDs, surlsArray, dn, true); - } - - public Collection find(int[] surlsUniqueIDs, - String[] surlsArray) throws IllegalArgumentException { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray); - } - return find(surlsUniqueIDs, surlsArray, null, false); - } - - private synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn, boolean withDn) - throws IllegalArgumentException { - - if ((withDn && dn == null) || surlsUniqueIDs == null - || surlsUniqueIDs.length == 0 || surlsArray == null - || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " withDn=" + withDn + " dn=" + dn); - } - if (!checkConnection()) { - log.error("PTG CHUNK DAO: find - unable to get a valid connection!"); - return new ArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - - try { - - String str = "SELECT rq.ID, rq.r_token, sg.statusCode, rq.pinLifetime, rg.ID, rq.timeStamp, " - + "rq.client_dn, rq.proxy, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID, " - + "d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "LEFT JOIN request_DirOption d ON rg.request_DirOptionID=d.ID " - + "WHERE ( rg.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) - + " AND rg.sourceSURL IN " - + makeSurlString(surlsArray) + " )"; - - if (withDn) { - - str += " AND rq.client_dn=\'" + dn + "\'"; - } - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List list = new ArrayList(); - - log.trace("PTG CHUNK DAO - find method: {}", find.toString()); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - PtGChunkDataTO chunkDataTO = null; - while (rs.next()) { - - chunkDataTO = new PtGChunkDataTO(); - chunkDataTO.setStatus(rs.getInt("sg.statusCode")); - chunkDataTO.setRequestToken(rs.getString("rq.r_token")); - chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); - chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); - - chunkDataTO.setNormalizedStFN(rs - .getString("rg.normalized_sourceSURL_StFN")); - int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); - } - - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setClientDN(rs.getString("rq.client_dn")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separeted by the "#" char. The proxy is a BLOB, hence it has to be - * properly conveted in string. - */ - java.sql.Blob blob = rs.getBlob("rq.proxy"); - if (!rs.wasNull() && blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - chunkDataTO.setVomsAttributes(new String(bdata)); - } - chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); - chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); - chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); - - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTG CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return new ArrayList(); - } finally { - close(rs); - close(find); - } - } - - private String buildExpainationSet(String explanation) { - - return " sg.explanation='" + explanation + "' "; - } - - private String buildTokenWhereClause(TRequestToken requestToken) { - - return " rq.r_token='" + requestToken.toString() + "' "; - } - - private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { - - return " ( rg.sourceSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) + " AND rg.sourceSURL IN " - + makeSurlString(surls) + " ) "; - } - + private static final Logger log = LoggerFactory.getLogger(PtGChunkDAO.class); + + /** String with the name of the class for the DB driver */ + private final String driver = Configuration.getInstance().getDBDriver(); + /** String referring to the URL of the DB */ + private final String url = Configuration.getInstance().getStormDbURL(); + /** String with the password for the DB */ + private final String password = Configuration.getInstance().getDBPassword(); + /** String with the name for the DB */ + private final String name = Configuration.getInstance().getDBUserName(); + + /** Connection to DB - WARNING!!! It is kept open all the time! */ + private Connection con = null; + /** boolean that tells whether reconnection is needed because of MySQL bug! */ + private boolean reconnect = false; + + /** Singleton instance */ + private static final PtGChunkDAO dao = new PtGChunkDAO(); + + /** timer thread that will run a task to alert when reconnecting is necessary! */ + private Timer clock = null; + /** timer task that will update the boolean signaling that a reconnection is needed! */ + private TimerTask clockTask = null; + /** milliseconds that must pass before reconnecting to DB */ + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + /** initial delay in milliseconds before starting timer */ + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + + private PtGChunkDAO() { + + setUpConnection(); + + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of the PtGChunkDAO. */ + public static PtGChunkDAO getInstance() { + + return dao; + } + + /** + * Method used to add a new record to the DB: the supplied PtGChunkDataTO gets its primaryKey + * changed to the one assigned by the DB. + * + *

The supplied PtGChunkData is used to fill in only the DB table where file specific info gets + * recorded: it does _not_ add a new request! So if spurious data is supplied, it will just stay + * there because of a lack of a parent request! + */ + public synchronized void addChild(PtGChunkDataTO to) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: addChild - unable to get a valid connection!"); + return; + } + String str = null; + PreparedStatement id = null; // statement to find out the ID associated to + // the request token + ResultSet rsid = null; // result set containing the ID of the request. + try { + + // WARNING!!!! We are forced to run a query to get the ID of the request, + // which should NOT be so + // because the corresponding request object should have been changed with + // the extra field! However, it is not possible + // at the moment to perform such chage because of strict deadline and the + // change could wreak havoc + // the code. So we are forced to make this query!!! + + // begin transaction + con.setAutoCommit(false); + printWarnings(con.getWarnings()); + + // find ID of request corresponding to given RequestToken + str = "SELECT rq.ID FROM request_queue rq WHERE rq.r_token=?"; + + id = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + id.setString(1, to.requestToken()); + printWarnings(id.getWarnings()); + + log.debug("PTG CHUNK DAO: addChild; {}", id.toString()); + rsid = id.executeQuery(); + printWarnings(id.getWarnings()); + + /* ID of request in request_process! */ + int request_id = extractID(rsid); + int id_s = fillPtGTables(to, request_id); + + /* end transaction! */ + con.commit(); + printWarnings(con.getWarnings()); + con.setAutoCommit(true); + printWarnings(con.getWarnings()); + + // update primary key reading the generated key + to.setPrimaryKey(id_s); + } catch (SQLException e) { + log.error( + "PTG CHUNK DAO: unable to complete addChild! " + "PtGChunkDataTO: {}; error: {}", + to, + e.getMessage(), + e); + rollback(con); + } catch (Exception e) { + log.error( + "PTG CHUNK DAO: unable to complete addChild! " + "PtGChunkDataTO: {}; error: {}", + to, + e.getMessage(), + e); + rollback(con); + } finally { + close(rsid); + close(id); + } + } + + /** + * Method used to add a new record to the DB: the supplied PtGChunkDataTO gets its primaryKey + * changed to the one assigned by the DB. The client_dn must also be supplied as a String. + * + *

The supplied PtGChunkData is used to fill in all the DB tables where file specific info gets + * recorded: it _adds_ a new request! + */ + public synchronized void addNew(PtGChunkDataTO to, String client_dn) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: addNew - unable to get a valid connection!"); + return; + } + String str = null; + /* Result set containing the ID of the inserted new request */ + ResultSet rs_new = null; + /* Insert new request into process_request */ + PreparedStatement addNew = null; + /* Insert protocols for request. */ + PreparedStatement addProtocols = null; + try { + // begin transaction + con.setAutoCommit(false); + printWarnings(con.getWarnings()); + + // add to request_queue... + str = + "INSERT INTO request_queue (config_RequestTypeID,client_dn,pinLifetime,status,errstring,r_token,nbreqfiles,timeStamp) VALUES (?,?,?,?,?,?,?,?)"; + addNew = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + printWarnings(con.getWarnings()); + /* Request type set to prepare to get! */ + addNew.setString(1, RequestTypeConverter.getInstance().toDB(TRequestType.PREPARE_TO_GET)); + printWarnings(addNew.getWarnings()); + + addNew.setString(2, client_dn); + printWarnings(addNew.getWarnings()); + + addNew.setInt(3, to.lifeTime()); + printWarnings(addNew.getWarnings()); + + addNew.setInt(4, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + printWarnings(addNew.getWarnings()); + + addNew.setString(5, "New PtG Request resulting from srmCopy invocation."); + printWarnings(addNew.getWarnings()); + + addNew.setString(6, to.requestToken()); + printWarnings(addNew.getWarnings()); + + addNew.setInt(7, 1); // number of requested files set to 1! + printWarnings(addNew.getWarnings()); + + addNew.setTimestamp(8, new Timestamp(new Date().getTime())); + printWarnings(addNew.getWarnings()); + + log.trace("PTG CHUNK DAO: addNew; {}", addNew.toString()); + addNew.execute(); + printWarnings(addNew.getWarnings()); + + rs_new = addNew.getGeneratedKeys(); + int id_new = extractID(rs_new); + + // add protocols... + str = + "INSERT INTO request_TransferProtocols (request_queueID,config_ProtocolsID) VALUES (?,?)"; + addProtocols = con.prepareStatement(str); + printWarnings(con.getWarnings()); + for (Iterator i = to.protocolList().iterator(); i.hasNext(); ) { + addProtocols.setInt(1, id_new); + printWarnings(addProtocols.getWarnings()); + + addProtocols.setString(2, i.next()); + printWarnings(addProtocols.getWarnings()); + + log.trace("PTG CHUNK DAO: addNew; {}", addProtocols.toString()); + addProtocols.execute(); + printWarnings(addProtocols.getWarnings()); + } + + // addChild... + int id_s = fillPtGTables(to, id_new); + + // end transaction! + con.commit(); + printWarnings(con.getWarnings()); + con.setAutoCommit(true); + printWarnings(con.getWarnings()); + + // update primary key reading the generated key + to.setPrimaryKey(id_s); + } catch (SQLException e) { + log.error( + "PTG CHUNK DAO: Rolling back! Unable to complete addNew! " + + "PtGChunkDataTO: {}; error: {}", + to, + e.getMessage(), + e); + rollback(con); + } catch (Exception e) { + log.error( + "PTG CHUNK DAO: unable to complete addNew! " + "PtGChunkDataTO: {}; error: {}", + to, + e.getMessage(), + e); + rollback(con); + } finally { + close(rs_new); + close(addNew); + close(addProtocols); + } + } + + /** + * To be used inside a transaction + * + * @param to + * @param requestQueueID + * @return + * @throws SQLException + * @throws Exception + */ + private synchronized int fillPtGTables(PtGChunkDataTO to, int requestQueueID) + throws SQLException, Exception { + + String str = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_do = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_g = null; + /* Result set containing the ID of the inserted */ + ResultSet rs_s = null; + /* insert TDirOption for request */ + PreparedStatement addDirOption = null; + /* insert request_Get for request */ + PreparedStatement addGet = null; + PreparedStatement addChild = null; + + try { + // first fill in TDirOption + str = + "INSERT INTO request_DirOption (isSourceADirectory,allLevelRecursive,numOfLevels) VALUES (?,?,?)"; + addDirOption = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + printWarnings(con.getWarnings()); + addDirOption.setBoolean(1, to.dirOption()); + printWarnings(addDirOption.getWarnings()); + + addDirOption.setBoolean(2, to.allLevelRecursive()); + printWarnings(addDirOption.getWarnings()); + + addDirOption.setInt(3, to.numLevel()); + printWarnings(addDirOption.getWarnings()); + + log.trace("PTG CHUNK DAO: addNew; {}", addDirOption.toString()); + addDirOption.execute(); + printWarnings(addDirOption.getWarnings()); + + rs_do = addDirOption.getGeneratedKeys(); + int id_do = extractID(rs_do); + + // second fill in request_Get... sourceSURL and TDirOption! + str = + "INSERT INTO request_Get (request_DirOptionID,request_queueID,sourceSURL,normalized_sourceSURL_StFN,sourceSURL_uniqueID) VALUES (?,?,?,?,?)"; + addGet = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + printWarnings(con.getWarnings()); + addGet.setInt(1, id_do); + printWarnings(addGet.getWarnings()); + + addGet.setInt(2, requestQueueID); + printWarnings(addGet.getWarnings()); + + addGet.setString(3, to.fromSURL()); + printWarnings(addGet.getWarnings()); + + addGet.setString(4, to.normalizedStFN()); + printWarnings(addGet.getWarnings()); + + addGet.setInt(5, to.surlUniqueID()); + printWarnings(addGet.getWarnings()); + + log.trace("PTG CHUNK DAO: addNew; {}", addGet.toString()); + addGet.execute(); + printWarnings(addGet.getWarnings()); + + rs_g = addGet.getGeneratedKeys(); + int id_g = extractID(rs_g); + + // third fill in status_Get... + str = "INSERT INTO status_Get (request_GetID,statusCode,explanation) VALUES (?,?,?)"; + addChild = con.prepareStatement(str, Statement.RETURN_GENERATED_KEYS); + printWarnings(con.getWarnings()); + addChild.setInt(1, id_g); + printWarnings(addChild.getWarnings()); + + addChild.setInt(2, to.status()); + printWarnings(addChild.getWarnings()); + + addChild.setString(3, to.errString()); + printWarnings(addChild.getWarnings()); + + log.trace("PTG CHUNK DAO: addNew; {}", addChild.toString()); + addChild.execute(); + printWarnings(addChild.getWarnings()); + + return id_g; + } finally { + close(rs_do); + close(rs_g); + close(rs_s); + close(addDirOption); + close(addGet); + close(addChild); + } + } + + /** + * Method used to save the changes made to a retrieved PtGChunkDataTO, back into the MySQL DB. + * + *

Only the fileSize, transferURL, statusCode and explanation, of status_Get table are written + * to the DB. Likewise for the request pinLifetime. + * + *

In case of any error, an error message gets logged but no exception is thrown. + */ + public synchronized void update(PtGChunkDataTO to) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: update - unable to get a valid connection!"); + return; + } + PreparedStatement updateFileReq = null; + try { + // ready updateFileReq... + updateFileReq = + con.prepareStatement( + "UPDATE request_queue rq JOIN (status_Get sg, request_Get rg) ON (rq.ID=rg.request_queueID AND sg.request_GetID=rg.ID) " + + "SET sg.fileSize=?, sg.transferURL=?, sg.statusCode=?, sg.explanation=?, rq.pinLifetime=?, rg.normalized_sourceSURL_StFN=?, rg.sourceSURL_uniqueID=? " + + "WHERE rg.ID=?"); + printWarnings(con.getWarnings()); + + updateFileReq.setLong(1, to.fileSize()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(2, to.turl()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(3, to.status()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(4, to.errString()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(5, to.lifeTime()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setString(6, to.normalizedStFN()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setInt(7, to.surlUniqueID()); + printWarnings(updateFileReq.getWarnings()); + + updateFileReq.setLong(8, to.primaryKey()); + printWarnings(updateFileReq.getWarnings()); + // execute update + log.trace("PTG CHUNK DAO: update method; {}", updateFileReq.toString()); + updateFileReq.executeUpdate(); + printWarnings(updateFileReq.getWarnings()); + } catch (SQLException e) { + log.error("PtG CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); + } finally { + close(updateFileReq); + } + } + + /** + * Updates the request_Get represented by the received ReducedPtGChunkDataTO by setting its + * normalized_sourceSURL_StFN and sourceSURL_uniqueID + * + * @param chunkTO + */ + public synchronized void updateIncomplete(ReducedPtGChunkDataTO chunkTO) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: updateIncomplete - unable to get a valid connection!"); + return; + } + String str = + "UPDATE request_Get rg SET rg.normalized_sourceSURL_StFN=?, rg.sourceSURL_uniqueID=? " + + "WHERE rg.ID=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + stmt.setString(1, chunkTO.normalizedStFN()); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, chunkTO.surlUniqueID()); + printWarnings(stmt.getWarnings()); + + stmt.setLong(3, chunkTO.primaryKey()); + printWarnings(stmt.getWarnings()); + + log.trace("PtG CHUNK DAO - update incomplete: {}", stmt.toString()); + stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + } catch (SQLException e) { + log.error("PtG CHUNK DAO: Unable to complete update incomplete! {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * TODO WARNING! THIS IS A WORK IN PROGRESS!!! + * + *

Method used to refresh the PtGChunkDataTO information from the MySQL DB. + * + *

In this first version, only the statusCode and the TURL are reloaded from the DB. TODO The + * next version must contains all the information related to the Chunk! + * + *

In case of any error, an error messagge gets logged but no exception is thrown. + */ + public synchronized PtGChunkDataTO refresh(long primary_key) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: refresh - unable to get a valid connection!"); + return null; + } + String queryString = null; + PreparedStatement find = null; + ResultSet rs = null; + + try { + // get chunks of the request + queryString = + "SELECT sg.statusCode, sg.transferURL " + + "FROM status_Get sg " + + "WHERE sg.request_GetID=?"; + find = con.prepareStatement(queryString); + printWarnings(con.getWarnings()); + find.setLong(1, primary_key); + printWarnings(find.getWarnings()); + log.trace("PTG CHUNK DAO: refresh status method; {}", find.toString()); + + rs = find.executeQuery(); + + printWarnings(find.getWarnings()); + PtGChunkDataTO chunkDataTO = null; + // The result shoul be un + while (rs.next()) { + chunkDataTO = new PtGChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sg.statusCode")); + chunkDataTO.setTurl(rs.getString("sg.transferURL")); + } + return chunkDataTO; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: {}", e.getMessage(), e); + /* Return null TransferObject! */ + return null; + } finally { + close(rs); + close(find); + } + } + + /** + * Method that queries the MySQL DB to find all entries matching the supplied TRequestToken. The + * Collection contains the corresponding PtGChunkDataTO objects. + * + *

An initial simple query establishes the list of protocols associated with the request. A + * second complex query establishes all chunks associated with the request, by properly joining + * request_queue, request_Get, status_Get and request_DirOption. The considered fields are: + * + *

(1) From status_Get: the ID field which becomes the TOs primary key, and statusCode. + * + *

(2) From request_Get: sourceSURL + * + *

(3) From request_queue: pinLifetime + * + *

(4) From request_DirOption: isSourceADirectory, alLevelRecursive, numOfLevels + * + *

In case of any error, a log gets written and an empty collection is returned. No exception + * is thrown. + * + *

NOTE! Chunks in SRM_ABORTED status are NOT returned! + */ + public synchronized Collection find(TRequestToken requestToken) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + String strToken = requestToken.toString(); + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + str = + "SELECT tp.config_ProtocolsID " + + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " + + "WHERE rq.r_token=?"; + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List protocols = new ArrayList(); + find.setString(1, strToken); + printWarnings(find.getWarnings()); + + log.trace("PTG CHUNK DAO: find method; {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + while (rs.next()) { + protocols.add(rs.getString("tp.config_ProtocolsID")); + } + close(rs); + close(find); + + // get chunks of the request + str = + "SELECT sg.statusCode, rq.pinLifetime, rg.ID, rq.timeStamp, rq.client_dn, rq.proxy, rg.sourceSURL, " + + "rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID, d.isSourceADirectory, " + + "d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "LEFT JOIN request_DirOption d ON rg.request_DirOptionID=d.ID " + + "WHERE rq.r_token=? AND sg.statusCode<>?"; + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + ArrayList list = new ArrayList(); + find.setString(1, strToken); + printWarnings(find.getWarnings()); + + find.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + printWarnings(find.getWarnings()); + + log.trace("PTG CHUNK DAO: find method; " + find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + PtGChunkDataTO chunkDataTO; + while (rs.next()) { + chunkDataTO = new PtGChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sg.statusCode")); + chunkDataTO.setRequestToken(strToken); + chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); + chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rg.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setClientDN(rs.getString("rq.client_dn")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separeted by the "#" char. The proxy is a BLOB, hence it has + * to be properly conveted in string. + */ + java.sql.Blob blob = rs.getBlob("rq.proxy"); + if (!rs.wasNull() && blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + chunkDataTO.setVomsAttributes(new String(bdata)); + } + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); + chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); + chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); + chunkDataTO.setProtocolList(protocols); + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: ", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedPtGChunkDataTO associated to the given TRequestToken + * expressed as String. + */ + public synchronized Collection findReduced(String reqtoken) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + // get reduced chunks + String str = + "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE rq.r_token=?"; + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, reqtoken); + printWarnings(find.getWarnings()); + + log.trace("PtG CHUNK DAO! findReduced with request token; {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + ReducedPtGChunkDataTO reducedChunkDataTO = null; + while (rs.next()) { + reducedChunkDataTO = new ReducedPtGChunkDataTO(); + reducedChunkDataTO.setStatus(rs.getInt("sg.statusCode")); + reducedChunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); + reducedChunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); + reducedChunkDataTO.setNormalizedStFN(rs.getString("rg.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + reducedChunkDataTO.setSurlUniqueID(uniqueID); + } + + list.add(reducedChunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: {}", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + public synchronized Collection findReduced( + TRequestToken requestToken, int[] surlsUniqueIDs, String[] surlsArray) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + + try { + + String str = + "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE rq.r_token=? AND ( rg.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rg.sourceSURL IN " + + makeSurlString(surlsArray) + + " ) "; + + find = con.prepareStatement(str); + + printWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, requestToken.getValue()); + printWarnings(find.getWarnings()); + + log.trace("PtG CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + ReducedPtGChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new ReducedPtGChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sg.statusCode")); + chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); + chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rg.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(uniqueID); + } + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: {}", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedPtGChunkDataTO associated to the given griduser, and + * whose SURLs are contained in the supplied array of Strings. + */ + public synchronized Collection findReduced( + String griduser, int[] surlUniqueIDs, String[] surls) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: findReduced - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + /* + * NOTE: we search also on the fromSurl because otherwise we lost all + * request_get that have not the uniqueID set because are not yet been + * used by anybody + */ + // get reduced chunks + String str = + "SELECT sg.statusCode, rg.ID, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE rq.client_dn=? AND ( rg.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlUniqueIDs) + + " AND rg.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + ArrayList list = new ArrayList(); + find.setString(1, griduser); + printWarnings(find.getWarnings()); + + log.trace("PtG CHUNK DAO! findReduced with griduser+surlarray; {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + ReducedPtGChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new ReducedPtGChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sg.statusCode")); + chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); + chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rg.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(uniqueID); + } + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: {}", e.getMessage(), e); + /* Return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method used in extraordinary situations to signal that data retrieved from the DB was malformed + * and could not be translated into the StoRM object model. + * + *

This method attempts to change the status of the request to SRM_FAILURE and record it in the + * DB. + * + *

This operation could potentially fail because the source of the malformed problems could be + * a problematic DB; indeed, initially only log messagges where recorded. + * + *

Yet it soon became clear that the source of malformed data were the clients and/or FE + * recording info in the DB. In these circumstances the client would see its request as being in + * the SRM_IN_PROGRESS state for ever. Hence the pressing need to inform it of the encountered + * problems. + */ + public synchronized void signalMalformedPtGChunk(PtGChunkDataTO auxTO) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: signalMalformedPtGChunk - unable to get a valid connection!"); + return; + } + String signalSQL = + "UPDATE status_Get SET statusCode=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) + + ", explanation=? WHERE request_GetID=" + + auxTO.primaryKey(); + PreparedStatement signal = null; + try { + signal = con.prepareStatement(signalSQL); + printWarnings(con.getWarnings()); + /* Prepared statement spares DB-specific String notation! */ + signal.setString(1, "Request is malformed!"); + printWarnings(signal.getWarnings()); + + log.trace("PTG CHUNK DAO: signalMalformed; {}", signal.toString()); + signal.executeUpdate(); + printWarnings(signal.getWarnings()); + } catch (SQLException e) { + log.error( + "PtGChunkDAO! Unable to signal in DB that the request was " + + "malformed! Request: {}; Exception: {}", + auxTO.toString(), + e.toString()); + } finally { + close(signal); + } + } + + /** + * Method that returns the number of Get requests on the given SURL, that are in SRM_FILE_PINNED + * state. + * + *

This method is intended to be used by PtGChunkCatalog in the isSRM_FILE_PINNED method + * invocation. + * + *

In case of any error, 0 is returned. + */ + // request_Get table + public synchronized int numberInSRM_FILE_PINNED(int surlUniqueID) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: numberInSRM_FILE_PINNED - unable to get a valid connection!"); + return 0; + } + String str = + "SELECT COUNT(rg.ID) " + + "FROM status_Get sg JOIN request_Get rg " + + "ON (sg.request_GetID=rg.ID) " + + "WHERE rg.sourceSURL_uniqueID=? AND sg.statusCode=?"; + PreparedStatement find = null; + ResultSet rs = null; + try { + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + /* Prepared statement spares DB-specific String notation! */ + find.setInt(1, surlUniqueID); + printWarnings(find.getWarnings()); + + find.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + printWarnings(find.getWarnings()); + + log.trace("PtG CHUNK DAO - numberInSRM_FILE_PINNED method: {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + int numberFilePinned = 0; + if (rs.next()) { + numberFilePinned = rs.getInt(1); + } + return numberFilePinned; + } catch (SQLException e) { + log.error( + "PtG CHUNK DAO! Unable to determine numberInSRM_FILE_PINNED! " + "Returning 0! {}", + e.getMessage(), + e); + return 0; + } finally { + close(rs); + close(find); + } + } + + /** + * Method that updates all expired requests in SRM_FILE_PINNED state, into SRM_RELEASED. + * + *

This is needed when the client forgets to invoke srmReleaseFiles(). + * + * @return + */ + public synchronized List transitExpiredSRM_FILE_PINNED() { + + // tring to the surl unique ID + if (!checkConnection()) { + log.error("PTG CHUNK DAO: transitExpiredSRM_FILE_PINNED - unable to get a valid connection!"); + return new ArrayList(); + } + HashMap expiredSurlMap = new HashMap(); + String str = null; + // Statement statement = null; + PreparedStatement preparedStatement = null; + + /* Find all expired surls */ + try { + // start transaction + con.setAutoCommit(false); + + str = + "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID " + + "FROM request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "WHERE sg.statusCode=?" + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + preparedStatement = con.prepareStatement(str); + preparedStatement.setInt( + 1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + + ResultSet res = preparedStatement.executeQuery(); + printWarnings(preparedStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rg.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "PtGChunkDAO! unable to build the TSURL from {}: " + + "InvalidTSURLAttributesException {}", + sourceSURL, + e.getMessage(), + e); + } + } + expiredSurlMap.put(sourceSURL, uniqueID); + } + + if (expiredSurlMap.isEmpty()) { + commit(con); + log.trace( + "PtGChunkDAO! No chunk of PtG request was transited from SRM_FILE_PINNED to SRM_RELEASED."); + return new ArrayList(); + } + } catch (SQLException e) { + log.error("PtGChunkDAO! SQLException. {}", e.getMessage(), e); + rollback(con); + return new ArrayList(); + } finally { + close(preparedStatement); + } + + /* Update status of all expired surls to SRM_RELEASED */ + + preparedStatement = null; + try { + + str = + "UPDATE " + + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=? " + + "WHERE sg.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + preparedStatement = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + preparedStatement.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + printWarnings(preparedStatement.getWarnings()); + + preparedStatement.setInt( + 2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + printWarnings(preparedStatement.getWarnings()); + + log.trace( + "PtG CHUNK DAO - transitExpiredSRM_FILE_PINNED method: {}", preparedStatement.toString()); + + int count = preparedStatement.executeUpdate(); + printWarnings(preparedStatement.getWarnings()); + + if (count == 0) { + log.trace( + "PtGChunkDAO! No chunk of PtG request was " + + "transited from SRM_FILE_PINNED to SRM_RELEASED."); + } else { + log.info( + "PtGChunkDAO! {} chunks of PtG requests were transited from" + + " SRM_FILE_PINNED to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "PtGChunkDAO! Unable to transit expired SRM_FILE_PINNED chunks " + + "of PtG requests, to SRM_RELEASED! {}", + e.getMessage(), + e); + rollback(con); + return new ArrayList(); + } finally { + close(preparedStatement); + } + + /* + * in order to enhance performance here we can check if there is any file + * system with tape (T1D0, T1D1), if there is not any we can skip the + * following + */ + + /* Find all not expired surls from PtG and BoL */ + + HashSet pinnedSurlSet = new HashSet(); + try { + + // SURLs pinned by PtGs + str = + "SELECT rg.sourceSURL , rg.sourceSURL_uniqueID FROM " + + "request_Get rg JOIN (status_Get sg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "WHERE sg.statusCode=?" + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; + + preparedStatement = con.prepareStatement(str); + preparedStatement.setInt( + 1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + + ResultSet res = preparedStatement.executeQuery(); + printWarnings(preparedStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rg.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rg.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "PtGChunkDAO! unable to build the TSURL from {}. " + + "InvalidTSURLAttributesException: {}", + sourceSURL, + e.getMessage()); + } + } + pinnedSurlSet.add(uniqueID); + } + + close(preparedStatement); + + // SURLs pinned by BoLs + str = + "SELECT rb.sourceSURL , rb.sourceSURL_uniqueID FROM " + + "request_BoL rb JOIN (status_BoL sb, request_queue rq) ON sb.request_BoLID=rb.ID AND rb.request_queueID=rq.ID " + + "WHERE sb.statusCode=?" + + " AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) < rq.pinLifetime "; + + preparedStatement = con.prepareStatement(str); + preparedStatement.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_SUCCESS)); + + res = preparedStatement.executeQuery(); + printWarnings(preparedStatement.getWarnings()); + + while (res.next()) { + String sourceSURL = res.getString("rb.sourceSURL"); + Integer uniqueID = new Integer(res.getInt("rb.sourceSURL_uniqueID")); + /* If the uniqueID is not setted compute it */ + if (res.wasNull()) { + try { + TSURL tsurl = TSURL.makeFromStringWellFormed(sourceSURL); + uniqueID = tsurl.uniqueId(); + } catch (InvalidTSURLAttributesException e) { + log.warn( + "PtGChunkDAO! unable to build the TSURL from {}. " + + "InvalidTSURLAttributesException: {}", + sourceSURL, + e.getMessage(), + e); + } + } + pinnedSurlSet.add(uniqueID); + } + commit(con); + } catch (SQLException e) { + log.error("PtGChunkDAO! SQLException. {}", e.getMessage(), e); + rollback(con); + } finally { + close(preparedStatement); + } + + ArrayList expiredSurlList = new ArrayList(); + /* Remove the Extended Attribute pinned if there is not a valid surl on it */ + TSURL surl; + for (Entry surlEntry : expiredSurlMap.entrySet()) { + if (!pinnedSurlSet.contains(surlEntry.getValue())) { + try { + surl = TSURL.makeFromStringValidate(surlEntry.getKey()); + } catch (InvalidTSURLAttributesException e) { + log.error( + "Invalid SURL, cannot release the pin " + "(Extended Attribute): {}", + surlEntry.getKey()); + continue; + } + expiredSurlList.add(surl); + StoRI stori; + try { + stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); + } catch (Throwable e) { + log.error( + "Invalid SURL {} cannot release the pin. {}: {}", + surlEntry.getKey(), + e.getClass().getCanonicalName(), + e.getMessage(), + e); + continue; + } + + if (stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { + StormEA.removePinned(stori.getAbsolutePath()); + } + } + } + return expiredSurlList; + } + + /** + * Method that updates all chunks in SRM_FILE_PINNED state, into SRM_RELEASED. An array of long + * representing the primary key of each chunk is required: only they get the status changed + * provided their current status is SRM_FILE_PINNED. + * + *

This method is used during srmReleaseFiles + * + *

In case of any error nothing happens and no exception is thrown, but proper messagges get + * logged. + */ + public synchronized void transitSRM_FILE_PINNEDtoSRM_RELEASED(long[] ids) { + + if (!checkConnection()) { + log.error( + "PTG CHUNK DAO: transitSRM_FILE_PINNEDtoSRM_RELEASED - unable to get a valid connection!"); + return; + } + String str = + "UPDATE status_Get sg SET sg.statusCode=? " + + "WHERE sg.statusCode=? AND sg.request_GetID IN " + + makeWhereString(ids); + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + printWarnings(stmt.getWarnings()); + + log.trace("PtG CHUNK DAO - transitSRM_FILE_PINNEDtoSRM_RELEASED: {}", stmt.toString()); + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "PtG CHUNK DAO! No chunk of PtG request was " + + "transited from SRM_FILE_PINNED to SRM_RELEASED."); + } else { + log.info( + "PtG CHUNK DAO! {} chunks of PtG requests were transited " + + "from SRM_FILE_PINNED to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "PtG CHUNK DAO! Unable to transit chunks" + " from SRM_FILE_PINNED to SRM_RELEASED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + /** + * @param ids + * @param token + */ + public synchronized void transitSRM_FILE_PINNEDtoSRM_RELEASED(long[] ids, TRequestToken token) { + + if (token == null) { + transitSRM_FILE_PINNEDtoSRM_RELEASED(ids); + return; + } + + /* + * If a request token has been specified, only the related Get requests + * have to be released. This is done adding the r.r_token="..." clause in + * the where subquery. + */ + if (!checkConnection()) { + log.error( + "PTG CHUNK DAO: transitSRM_FILE_PINNEDtoSRM_RELEASED - " + + "unable to get a valid connection!"); + return; + } + + String str = + "UPDATE " + + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=? " + + "WHERE sg.statusCode=? AND rq.r_token='" + + token.toString() + + "' AND rg.ID IN " + + makeWhereString(ids); + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_RELEASED)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FILE_PINNED)); + printWarnings(stmt.getWarnings()); + + log.trace("PtG CHUNK DAO - transitSRM_FILE_PINNEDtoSRM_RELEASED: {}", stmt.toString()); + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "PtG CHUNK DAO! No chunk of PtG request was" + + " transited from SRM_FILE_PINNED to SRM_RELEASED."); + } else { + log.info( + "PtG CHUNK DAO! {} chunks of PtG requests were transited from " + + "SRM_FILE_PINNED to SRM_RELEASED.", + count); + } + } catch (SQLException e) { + log.error( + "PtG CHUNK DAO! Unable to transit chunks from " + "SRM_FILE_PINNED to SRM_RELEASED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + public synchronized void updateStatus( + TRequestToken requestToken, + int[] surlUniqueIDs, + String[] surls, + TStatusCode statusCode, + String explanation) { + + if (!checkConnection()) { + log.error("PTG CHUNK DAO: updateStatus - unable to get a valid connection!"); + return; + } + String str = + "UPDATE " + + "status_Get sg JOIN (request_Get rg, request_queue rq) ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=? , sg.explanation=? " + + "WHERE rq.r_token='" + + requestToken.toString() + + "' AND ( rg.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlUniqueIDs) + + " AND rg.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(statusCode)); + printWarnings(stmt.getWarnings()); + + stmt.setString(2, (explanation != null ? explanation : "")); + printWarnings(stmt.getWarnings()); + + log.trace("PtG CHUNK DAO - updateStatus: {}", stmt.toString()); + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace("PtG CHUNK DAO! No chunk of PtG request was updated to {}.", statusCode); + } else { + log.info("PtG CHUNK DAO! {} chunks of PtG requests were updated to {}.", count, statusCode); + } + } catch (SQLException e) { + log.error("PtG CHUNK DAO! Unable to updated to {}! {}", statusCode, e.getMessage(), e); + } finally { + close(stmt); + } + } + + public synchronized void updateStatusOnMatchingStatus( + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surls == null + || explanation == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls + + " explanation=" + + explanation); + } + + doUpdateStatusOnMatchingStatus( + null, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + explanation, + false, + true, + true); + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (requestToken == null || requestToken.getValue().trim().isEmpty() || explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + " explanation=" + + explanation); + } + doUpdateStatusOnMatchingStatus( + requestToken, + null, + null, + expectedStatusCode, + newStatusCode, + explanation, + true, + false, + true); + } + + public synchronized void updateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) + throws IllegalArgumentException { + + if (requestToken == null + || requestToken.getValue().trim().isEmpty() + || surlsUniqueIDs == null + || surls == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + "surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls); + } + doUpdateStatusOnMatchingStatus( + requestToken, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + null, + true, + true, + false); + } + + public synchronized void doUpdateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation, + boolean withRequestToken, + boolean withSurls, + boolean withExplanation) + throws IllegalArgumentException { + + if ((withRequestToken && requestToken == null) + || (withExplanation && explanation == null) + || (withSurls && (surlUniqueIDs == null || surls == null))) { + + throw new IllegalArgumentException( + "Unable to perform the doUpdateStatusOnMatchingStatus, " + + "invalid arguments: withRequestToken=" + + withRequestToken + + " requestToken=" + + requestToken + + " withSurls=" + + withSurls + + " surlUniqueIDs=" + + surlUniqueIDs + + " surls=" + + surls + + " withExplaination=" + + withExplanation + + " explanation=" + + explanation); + } + if (!checkConnection()) { + log.error("PTG CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); + return; + } + String str = + "UPDATE status_Get sg JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=? "; + if (withExplanation) { + str += " , " + buildExpainationSet(explanation); + } + str += " WHERE sg.statusCode=? "; + if (withRequestToken) { + str += " AND " + buildTokenWhereClause(requestToken); + } + if (withSurls) { + str += " AND " + buildSurlsWhereClause(surlUniqueIDs, surls); + } + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); + printWarnings(stmt.getWarnings()); + + log.trace("PtG CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt.toString()); + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "PtG CHUNK DAO! No chunk of PtG request was updated " + "from {} to {}.", + expectedStatusCode, + newStatusCode); + } else { + log.debug( + "PtG CHUNK DAO! {} chunks of PtG requests were updated " + "from {} to {}.", + count, + expectedStatusCode, + newStatusCode); + } + } catch (SQLException e) { + log.error( + "PtG CHUNK DAO! Unable to updated from {} to {}! {}", + expectedStatusCode, + newStatusCode, + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + /** Auxiliary method used to close a ResultSet */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("PTG CHUNK DAO! Unable to close ResultSet! Error: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method used to close a Statement */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "PTG CHUNK DAO! Unable to close Statement {} - Error: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + private void commit(Connection con) { + + if (con != null) { + try { + con.commit(); + con.setAutoCommit(true); + } catch (SQLException e) { + log.error("PtG, SQL Exception: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method used to roll back a failed transaction */ + private void rollback(Connection con) { + + if (con != null) { + try { + con.rollback(); + con.setAutoCommit(true); + log.error("PTG CHUNK DAO: roll back successful!"); + } catch (SQLException e2) { + log.error("PTG CHUNK DAO: roll back failed! {}", e2.getMessage(), e2); + } + } + } + + /** + * Private method that returns the generated ID: it throws an exception in case of any problem! + */ + private int extractID(ResultSet rs) throws Exception { + + if (rs == null) { + throw new Exception("PTG CHUNK DAO! Null ResultSet!"); + } + if (rs.next()) { + return rs.getInt(1); + } else { + log.error( + "PTG CHUNK DAO! It was not possible to establish " + + "the assigned autoincrement primary key!"); + throw new Exception( + "PTG CHUNK DAO! It was not possible to" + + " establish the assigned autoincrement primary key!"); + } + } + + /** Method that returns a String containing all IDs. */ + private String makeWhereString(long[] rowids) { + + StringBuilder sb = new StringBuilder("("); + int n = rowids.length; + for (int i = 0; i < n; i++) { + sb.append(rowids[i]); + if (i < (n - 1)) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surl's IDs. */ + private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { + + StringBuilder sb = new StringBuilder("("); + for (int i = 0; i < surlUniqueIDs.length; i++) { + if (i > 0) { + sb.append(","); + } + sb.append(surlUniqueIDs[i]); + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surls. */ + private String makeSurlString(String[] surls) { + + StringBuilder sb = new StringBuilder("("); + int n = surls.length; + + for (int i = 0; i < n; i++) { + + SURL requestedSURL; + + try { + requestedSURL = SURL.makeSURLfromString(surls[i]); + } catch (NamespaceException e) { + log.error(e.getMessage()); + log.debug("Skip '{}' during query creation", surls[i]); + continue; + } + + sb.append("'"); + sb.append(requestedSURL.getNormalFormAsString()); + sb.append("','"); + sb.append(requestedSURL.getQueryFormAsString()); + sb.append("'"); + + if (i < (n - 1)) { + sb.append(","); + } + } + + sb.append(")"); + return sb.toString(); + } + + /** Auxiliary method that sets up the connection to the DB, as well as the prepared statement. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + printWarnings(con.getWarnings()); + response = con.isValid(0); + } catch (ClassNotFoundException | SQLException e) { + log.error("PTG CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); + } + return response; + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private boolean checkConnection() { + + boolean response = true; + if (reconnect) { + log.debug("PTG CHUNK DAO! Reconnecting to DB! "); + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method that tales down a connection to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + log.error("PTG CHUNK DAO! Exception in takeDownConnection method: {}", e.getMessage(), e); + } + } + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray, String dn) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0 + || dn == null) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " dn=" + + dn); + } + return find(surlsUniqueIDs, surlsArray, dn, true); + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray) + throws IllegalArgumentException { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray); + } + return find(surlsUniqueIDs, surlsArray, null, false); + } + + private synchronized Collection find( + int[] surlsUniqueIDs, String[] surlsArray, String dn, boolean withDn) + throws IllegalArgumentException { + + if ((withDn && dn == null) + || surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " withDn=" + + withDn + + " dn=" + + dn); + } + if (!checkConnection()) { + log.error("PTG CHUNK DAO: find - unable to get a valid connection!"); + return new ArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + + try { + + String str = + "SELECT rq.ID, rq.r_token, sg.statusCode, rq.pinLifetime, rg.ID, rq.timeStamp, " + + "rq.client_dn, rq.proxy, rg.sourceSURL, rg.normalized_sourceSURL_StFN, rg.sourceSURL_uniqueID, " + + "d.isSourceADirectory, d.allLevelRecursive, d.numOfLevels " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "LEFT JOIN request_DirOption d ON rg.request_DirOptionID=d.ID " + + "WHERE ( rg.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rg.sourceSURL IN " + + makeSurlString(surlsArray) + + " )"; + + if (withDn) { + + str += " AND rq.client_dn=\'" + dn + "\'"; + } + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List list = new ArrayList(); + + log.trace("PTG CHUNK DAO - find method: {}", find.toString()); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + PtGChunkDataTO chunkDataTO = null; + while (rs.next()) { + + chunkDataTO = new PtGChunkDataTO(); + chunkDataTO.setStatus(rs.getInt("sg.statusCode")); + chunkDataTO.setRequestToken(rs.getString("rq.r_token")); + chunkDataTO.setPrimaryKey(rs.getLong("rg.ID")); + chunkDataTO.setFromSURL(rs.getString("rg.sourceSURL")); + + chunkDataTO.setNormalizedStFN(rs.getString("rg.normalized_sourceSURL_StFN")); + int uniqueID = rs.getInt("rg.sourceSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(new Integer(uniqueID)); + } + + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setLifeTime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setClientDN(rs.getString("rq.client_dn")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separeted by the "#" char. The proxy is a BLOB, hence it has + * to be properly conveted in string. + */ + java.sql.Blob blob = rs.getBlob("rq.proxy"); + if (!rs.wasNull() && blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + chunkDataTO.setVomsAttributes(new String(bdata)); + } + chunkDataTO.setDirOption(rs.getBoolean("d.isSourceADirectory")); + chunkDataTO.setAllLevelRecursive(rs.getBoolean("d.allLevelRecursive")); + chunkDataTO.setNumLevel(rs.getInt("d.numOfLevels")); + + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTG CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return new ArrayList(); + } finally { + close(rs); + close(find); + } + } + + private String buildExpainationSet(String explanation) { + + return " sg.explanation='" + explanation + "' "; + } + + private String buildTokenWhereClause(TRequestToken requestToken) { + + return " rq.r_token='" + requestToken.toString() + "' "; + } + + private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { + + return " ( rg.sourceSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rg.sourceSURL IN " + + makeSurlString(surls) + + " ) "; + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtGChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/PtGChunkDataTO.java index d5dab7f0..25c2864a 100644 --- a/src/main/java/it/grid/storm/catalogs/PtGChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/PtGChunkDataTO.java @@ -1,298 +1,283 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.common.types.TURLPrefix; +import it.grid.storm.namespace.model.Protocol; +import it.grid.storm.srm.types.TStatusCode; import java.sql.Timestamp; import java.util.List; -import it.grid.storm.namespace.model.Protocol; /** - * Class that represents a row in the Persistence Layer: this is all raw data - * referring to the PtGChunkData proper, that is, String and primitive types. - * - * Each field is initialized with default values as per SRM 2.2 specification: - * protocolList GSIFTP dirOption false status SRM_REQUEST_QUEUED - * - * All other fields are 0 if int, or a white space if String. - * + * Class that represents a row in the Persistence Layer: this is all raw data referring to the + * PtGChunkData proper, that is, String and primitive types. + * + *

Each field is initialized with default values as per SRM 2.2 specification: protocolList + * GSIFTP dirOption false status SRM_REQUEST_QUEUED + * + *

All other fields are 0 if int, or a white space if String. + * * @author EGRID ICTP * @version 3.0 * @date June 2005 */ public class PtGChunkDataTO { - private static final String FQAN_SEPARATOR = "#"; - /* Database table request_Get fields BEGIN */ - private long primaryKey = -1; // ID primary key of record in DB - private boolean dirOption; // initialised in constructor - private String fromSURL = " "; - private String normalizedStFN = null; - private Integer surlUniqueID = null; - /* Database table request_Get fields END */ - - private String requestToken = " "; - private int lifetime = 0; - private boolean allLevelRecursive; // initialised in constructor - private int numLevel; // initialised in constructor - private List protocolList = null; // initialised in constructor - private long filesize = 0; - private int status; // initialised in constructor - private String errString = " "; - private String turl = " "; - private Timestamp timeStamp; - private String clientDN = null; - private String vomsAttributes = null; - - public PtGChunkDataTO() { + private static final String FQAN_SEPARATOR = "#"; + /* Database table request_Get fields BEGIN */ + private long primaryKey = -1; // ID primary key of record in DB + private boolean dirOption; // initialised in constructor + private String fromSURL = " "; + private String normalizedStFN = null; + private Integer surlUniqueID = null; + /* Database table request_Get fields END */ - TURLPrefix protocolPreferences = new TURLPrefix(); - protocolPreferences.addProtocol(Protocol.GSIFTP); - this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); - this.status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - this.dirOption = false; - // - this.allLevelRecursive = false; - this.numLevel = 0; - } + private String requestToken = " "; + private int lifetime = 0; + private boolean allLevelRecursive; // initialised in constructor + private int numLevel; // initialised in constructor + private List protocolList = null; // initialised in constructor + private long filesize = 0; + private int status; // initialised in constructor + private String errString = " "; + private String turl = " "; + private Timestamp timeStamp; + private String clientDN = null; + private String vomsAttributes = null; - public long primaryKey() { + public PtGChunkDataTO() { - return primaryKey; - } + TURLPrefix protocolPreferences = new TURLPrefix(); + protocolPreferences.addProtocol(Protocol.GSIFTP); + this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); + this.status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + this.dirOption = false; + // + this.allLevelRecursive = false; + this.numLevel = 0; + } - public void setPrimaryKey(long n) { + public long primaryKey() { - primaryKey = n; - } + return primaryKey; + } - public String requestToken() { + public void setPrimaryKey(long n) { - return requestToken; - } + primaryKey = n; + } - public void setRequestToken(String s) { + public String requestToken() { - requestToken = s; - } + return requestToken; + } - public Timestamp timeStamp() { + public void setRequestToken(String s) { - return timeStamp; - } + requestToken = s; + } - public void setTimeStamp(Timestamp timeStamp) { + public Timestamp timeStamp() { - this.timeStamp = timeStamp; - } + return timeStamp; + } - public String fromSURL() { + public void setTimeStamp(Timestamp timeStamp) { - return fromSURL; - } + this.timeStamp = timeStamp; + } - public void setFromSURL(String s) { + public String fromSURL() { - fromSURL = s; - } + return fromSURL; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + public void setFromSURL(String s) { - this.normalizedStFN = normalizedStFN; - } + fromSURL = s; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - return normalizedStFN; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @param sURLUniqueID - * the sURLUniqueID to set - */ - public void setSurlUniqueID(Integer sURLUniqueID) { + /** @return the normalizedStFN */ + public String normalizedStFN() { - this.surlUniqueID = sURLUniqueID; - } + return normalizedStFN; + } - /** - * @return the sURLUniqueID - */ - public Integer surlUniqueID() { + /** @param sURLUniqueID the sURLUniqueID to set */ + public void setSurlUniqueID(Integer sURLUniqueID) { - return surlUniqueID; - } + this.surlUniqueID = sURLUniqueID; + } - public int lifeTime() { + /** @return the sURLUniqueID */ + public Integer surlUniqueID() { - return lifetime; - } + return surlUniqueID; + } - public void setLifeTime(int n) { + public int lifeTime() { - lifetime = n; - } + return lifetime; + } - public boolean dirOption() { + public void setLifeTime(int n) { - return dirOption; - } + lifetime = n; + } - public void setDirOption(boolean b) { + public boolean dirOption() { - dirOption = b; - } + return dirOption; + } - public boolean allLevelRecursive() { + public void setDirOption(boolean b) { - return allLevelRecursive; - } + dirOption = b; + } - public void setAllLevelRecursive(boolean b) { + public boolean allLevelRecursive() { - allLevelRecursive = b; - } + return allLevelRecursive; + } - public int numLevel() { + public void setAllLevelRecursive(boolean b) { - return numLevel; - } + allLevelRecursive = b; + } - public void setNumLevel(int n) { + public int numLevel() { - numLevel = n; - } + return numLevel; + } - public List protocolList() { + public void setNumLevel(int n) { - return protocolList; - } + numLevel = n; + } - public void setProtocolList(List l) { + public List protocolList() { - if ((l != null) && (!l.isEmpty())) - protocolList = l; - } + return protocolList; + } - public long fileSize() { + public void setProtocolList(List l) { - return filesize; - } + if ((l != null) && (!l.isEmpty())) protocolList = l; + } - public void setFileSize(long n) { + public long fileSize() { - filesize = n; - } + return filesize; + } - public int status() { + public void setFileSize(long n) { - return status; - } + filesize = n; + } - public void setStatus(int n) { + public int status() { - status = n; - } + return status; + } - public String errString() { + public void setStatus(int n) { - return errString; - } + status = n; + } - public void setErrString(String s) { + public String errString() { - errString = s; - } + return errString; + } - public String turl() { + public void setErrString(String s) { - return turl; - } + errString = s; + } - public void setTurl(String s) { + public String turl() { - turl = s; - } + return turl; + } - public String clientDN() { + public void setTurl(String s) { - return clientDN; - } + turl = s; + } - public void setClientDN(String s) { + public String clientDN() { - clientDN = s; - } + return clientDN; + } - public String vomsAttributes() { + public void setClientDN(String s) { - return vomsAttributes; - } + clientDN = s; + } - public void setVomsAttributes(String s) { + public String vomsAttributes() { - vomsAttributes = s; - } + return vomsAttributes; + } - public void setVomsAttributes(String[] fqaNsAsString) { + public void setVomsAttributes(String s) { - vomsAttributes = ""; - for (int i = 0; i < fqaNsAsString.length; i++) { - vomsAttributes += fqaNsAsString[i]; - if (i < fqaNsAsString.length - 1) { - vomsAttributes += FQAN_SEPARATOR; - } - } + vomsAttributes = s; + } - } + public void setVomsAttributes(String[] fqaNsAsString) { - public String[] vomsAttributesArray() { + vomsAttributes = ""; + for (int i = 0; i < fqaNsAsString.length; i++) { + vomsAttributes += fqaNsAsString[i]; + if (i < fqaNsAsString.length - 1) { + vomsAttributes += FQAN_SEPARATOR; + } + } + } - return vomsAttributes.split(FQAN_SEPARATOR); - } + public String[] vomsAttributesArray() { - public String toString() { + return vomsAttributes.split(FQAN_SEPARATOR); + } - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(requestToken); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(lifetime); - sb.append(" "); - sb.append(dirOption); - sb.append(" "); - sb.append(allLevelRecursive); - sb.append(" "); - sb.append(numLevel); - sb.append(" "); - sb.append(protocolList); - sb.append(" "); - sb.append(filesize); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - sb.append(turl); - return sb.toString(); - } + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(requestToken); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(lifetime); + sb.append(" "); + sb.append(dirOption); + sb.append(" "); + sb.append(allLevelRecursive); + sb.append(" "); + sb.append(numLevel); + sb.append(" "); + sb.append(protocolList); + sb.append(" "); + sb.append(filesize); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + sb.append(turl); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtGData.java b/src/main/java/it/grid/storm/catalogs/PtGData.java index 6e851823..d68c8654 100644 --- a/src/main/java/it/grid/storm/catalogs/PtGData.java +++ b/src/main/java/it/grid/storm/catalogs/PtGData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -10,33 +9,25 @@ public interface PtGData extends FileTransferData { - /** - * Method that returns the requested pin life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds getPinLifeTime(); + /** Method that returns the requested pin life time for this chunk of the srm request. */ + public TLifeTimeInSeconds getPinLifeTime(); - /** - * Method that returns the dirOption specified in the srm request. - */ - public TDirOption getDirOption(); + /** Method that returns the dirOption specified in the srm request. */ + public TDirOption getDirOption(); - /** - * Method that returns the file size for this chunk of the srm request. - */ - public TSizeInBytes getFileSize(); + /** Method that returns the file size for this chunk of the srm request. */ + public TSizeInBytes getFileSize(); - /** - * Method used to set the size of the file corresponding to the requested - * SURL. If the supplied TSizeInByte is null, then nothing gets set! - */ - public void setFileSize(TSizeInBytes size); - - /** - * Method that sets the status of this request to SRM_FILE_PINNED; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_FILE_PINNED(String explanation); + /** + * Method used to set the size of the file corresponding to the requested SURL. If the supplied + * TSizeInByte is null, then nothing gets set! + */ + public void setFileSize(TSizeInBytes size); + /** + * Method that sets the status of this request to SRM_FILE_PINNED; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + public void changeStatusSRM_FILE_PINNED(String explanation); } diff --git a/src/main/java/it/grid/storm/catalogs/PtGPersistentChunkData.java b/src/main/java/it/grid/storm/catalogs/PtGPersistentChunkData.java index a6fe466a..273dad1e 100644 --- a/src/main/java/it/grid/storm/catalogs/PtGPersistentChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/PtGPersistentChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -14,191 +13,182 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a PrepareToGetChunkData, that is part of a multifile - * PrepareToGet srm request. It contains data about: the requestToken, the - * fromSURL, the requested lifeTime of pinning, the TDirOption which tells - * whether the requested SURL is a directory and if it must be recursed at all - * levels, as well as the desired number of levels to recurse, the desired - * transferProtocols in order of preference, the fileSize, and the transferURL - * for the supplied SURL. - * + * This class represents a PrepareToGetChunkData, that is part of a multifile PrepareToGet srm + * request. It contains data about: the requestToken, the fromSURL, the requested lifeTime of + * pinning, the TDirOption which tells whether the requested SURL is a directory and if it must be + * recursed at all levels, as well as the desired number of levels to recurse, the desired + * transferProtocols in order of preference, the fileSize, and the transferURL for the supplied + * SURL. + * * @author EGRID - ICTP Trieste * @date March 21st, 2005 * @version 3.0 */ -public class PtGPersistentChunkData extends IdentityPtGData implements - PersistentChunkData { - - private static final Logger log = LoggerFactory - .getLogger(PtGPersistentChunkData.class); - - /** - * long representing the primary key for the persistence layer, in the - * status_Get table - */ - private long primaryKey = -1; - - /** - * This is the requestToken of the multifile srm request to which this chunk - * belongs - */ - private TRequestToken requestToken; - - /** - * @param requestToken - * @param fromSURL - * @param lifeTime - * @param dirOption - * @param desiredProtocols - * @param fileSize - * @param status - * @param transferURL - * @throws InvalidPtGDataAttributesException - */ - public PtGPersistentChunkData(GridUserInterface auth, - TRequestToken requestToken, TSURL fromSURL, TLifeTimeInSeconds lifeTime, - TDirOption dirOption, TURLPrefix desiredProtocols, TSizeInBytes fileSize, - TReturnStatus status, TTURL transferURL) - throws InvalidPtGDataAttributesException, - InvalidPtGDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(auth, fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, - status, transferURL); - if (requestToken == null) { - log.debug("PtGPersistentChunkData: requestToken is null!"); - throw new InvalidPtGPersistentChunkDataAttributesException(requestToken, - fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, - transferURL); - } - - this.requestToken = requestToken; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - @Override - public long getPrimaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the requestToken of the srm request to which this chunk - * belongs. - */ - @Override - public TRequestToken getRequestToken() { - - return requestToken; - } - - /** - * Method that sets the status of this request to SRM_FILE_PINNED; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_FILE_PINNED(String explanation) { - - setStatus(TStatusCode.SRM_FILE_PINNED, explanation); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = super.hashCode(); - result = prime * result + (int) (primaryKey ^ (primaryKey >>> 32)); - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PtGPersistentChunkData other = (PtGPersistentChunkData) obj; - if (primaryKey != other.primaryKey) { - return false; - } - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - return true; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("PtGPersistentChunkData [primaryKey="); - builder.append(primaryKey); - builder.append(", requestToken="); - builder.append(requestToken); - builder.append(", pinLifeTime="); - builder.append(pinLifeTime); - builder.append(", dirOption="); - builder.append(dirOption); - builder.append(", fileSize="); - builder.append(fileSize); - builder.append(", transferProtocols="); - builder.append(transferProtocols); - builder.append(", SURL="); - builder.append(SURL); - builder.append(", status="); - builder.append(status); - builder.append(", transferURL="); - builder.append(transferURL); - builder.append("]"); - return builder.toString(); - } - - @Override - public long getIdentifier() { - - return getPrimaryKey(); - } +public class PtGPersistentChunkData extends IdentityPtGData implements PersistentChunkData { + + private static final Logger log = LoggerFactory.getLogger(PtGPersistentChunkData.class); + + /** long representing the primary key for the persistence layer, in the status_Get table */ + private long primaryKey = -1; + + /** This is the requestToken of the multifile srm request to which this chunk belongs */ + private TRequestToken requestToken; + + /** + * @param requestToken + * @param fromSURL + * @param lifeTime + * @param dirOption + * @param desiredProtocols + * @param fileSize + * @param status + * @param transferURL + * @throws InvalidPtGDataAttributesException + */ + public PtGPersistentChunkData( + GridUserInterface auth, + TRequestToken requestToken, + TSURL fromSURL, + TLifeTimeInSeconds lifeTime, + TDirOption dirOption, + TURLPrefix desiredProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtGDataAttributesException, InvalidPtGDataAttributesException, + InvalidFileTransferDataAttributesException, InvalidSurlRequestDataAttributesException { + + super(auth, fromSURL, lifeTime, dirOption, desiredProtocols, fileSize, status, transferURL); + if (requestToken == null) { + log.debug("PtGPersistentChunkData: requestToken is null!"); + throw new InvalidPtGPersistentChunkDataAttributesException( + requestToken, + fromSURL, + lifeTime, + dirOption, + desiredProtocols, + fileSize, + status, + transferURL); + } + + this.requestToken = requestToken; + } + + /** Method used to get the primary key used in the persistence layer! */ + @Override + public long getPrimaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the requestToken of the srm request to which this chunk belongs. */ + @Override + public TRequestToken getRequestToken() { + + return requestToken; + } + + /** + * Method that sets the status of this request to SRM_FILE_PINNED; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + public void changeStatusSRM_FILE_PINNED(String explanation) { + + setStatus(TStatusCode.SRM_FILE_PINNED, explanation); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = super.hashCode(); + result = prime * result + (int) (primaryKey ^ (primaryKey >>> 32)); + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PtGPersistentChunkData other = (PtGPersistentChunkData) obj; + if (primaryKey != other.primaryKey) { + return false; + } + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + return true; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("PtGPersistentChunkData [primaryKey="); + builder.append(primaryKey); + builder.append(", requestToken="); + builder.append(requestToken); + builder.append(", pinLifeTime="); + builder.append(pinLifeTime); + builder.append(", dirOption="); + builder.append(dirOption); + builder.append(", fileSize="); + builder.append(fileSize); + builder.append(", transferProtocols="); + builder.append(transferProtocols); + builder.append(", SURL="); + builder.append(SURL); + builder.append(", status="); + builder.append(status); + builder.append(", transferURL="); + builder.append(transferURL); + builder.append("]"); + return builder.toString(); + } + + @Override + public long getIdentifier() { + + return getPrimaryKey(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtPChunkCatalog.java b/src/main/java/it/grid/storm/catalogs/PtPChunkCatalog.java index df12c1e1..de37da9a 100644 --- a/src/main/java/it/grid/storm/catalogs/PtPChunkCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/PtPChunkCatalog.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -25,576 +24,569 @@ import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Class that represents StoRMs PtPChunkCatalog: it collects PtPChunkData and - * provides methods for looking up a PtPChunkData based on TRequestToken, as - * well as for updating data into persistence. Methods are also supplied to - * evaluate if a SURL is in SRM_SPACE_AVAILABLE state, and to transit expired - * SURLs in SRM_SPACE_AVAILABLE state to SRM_FILE_LIFETIME_EXPIRED. - * + * Class that represents StoRMs PtPChunkCatalog: it collects PtPChunkData and provides methods for + * looking up a PtPChunkData based on TRequestToken, as well as for updating data into persistence. + * Methods are also supplied to evaluate if a SURL is in SRM_SPACE_AVAILABLE state, and to transit + * expired SURLs in SRM_SPACE_AVAILABLE state to SRM_FILE_LIFETIME_EXPIRED. + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 3.0 */ public class PtPChunkCatalog { - private static final Logger log = LoggerFactory - .getLogger(PtPChunkCatalog.class); - - /* only instance of PtPChunkCatalog present in StoRM! */ - private static final PtPChunkCatalog cat = new PtPChunkCatalog(); - private final PtPChunkDAO dao = PtPChunkDAO.getInstance(); - - private PtPChunkCatalog() {} - - /** - * Method that returns the only instance of PtPChunkCatalog available. - */ - public static PtPChunkCatalog getInstance() { - - return cat; - } - - /** - * Method used to update into Persistence a retrieved PtPChunkData. - */ - synchronized public void update(PtPPersistentChunkData chunkData) { - - PtPChunkDataTO to = new PtPChunkDataTO(); - /* rimary key needed by DAO Object */ - to.setPrimaryKey(chunkData.getPrimaryKey()); - to.setStatus(StatusCodeConverter.getInstance().toDB( - chunkData.getStatus().getStatusCode())); - to.setErrString(chunkData.getStatus().getExplanation()); - to.setTransferURL(TURLConverter.getInstance().toDB( - chunkData.getTransferURL().toString())); - to.setPinLifetime(PinLifetimeConverter.getInstance().toDB( - chunkData.pinLifetime().value())); - to.setFileLifetime(FileLifetimeConverter.getInstance().toDB( - chunkData.fileLifetime().value())); - to.setFileStorageType(FileStorageTypeConverter.getInstance().toDB( - chunkData.fileStorageType())); - to.setOverwriteOption(OverwriteModeConverter.getInstance().toDB( - chunkData.overwriteOption())); - to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); - to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); - to.setClientDN(chunkData.getUser().getDn()); - if (chunkData.getUser() instanceof AbstractGridUser) { - if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { - to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()) - .getFQANsAsString()); - } - - } - dao.update(to); - } - - /** - * Method that returns a Collection of PtPChunkData Objects matching the - * supplied TRequestToken. If any of the data associated to the TRequestToken - * is not well formed and so does not allow a PtPChunkData Object to be - * created, then that part of the request is dropped, gets logged and an - * attempt is made to write in the DB that the chunk was malformed; the - * processing continues with the next part. Only the valid chunks get - * returned. If there are no chunks to process then an empty Collection is - * returned, and a messagge gets logged. NOTE! Chunks in SRM_ABORTED status - * are NOT returned! This is imporant because this method is intended to be - * used by the Feeders to fetch all chunks in the request, and aborted chunks - * should not be picked up for processing! - */ - synchronized public Collection lookup( - final TRequestToken rt) { - - Collection chunkTOs = dao.find(rt); - log.debug("PtPChunkCatalog: retrieved data {}", chunkTOs); - return buildChunkDataList(chunkTOs); - } - - /** - * Private method used to create a PtPChunkData object, from a PtPChunkDataTO - * and TRequestToken. If a chunk cannot be created, an error messagge gets - * logged and an attempt is made to signal in the DB that the chunk is - * malformed. - */ - private PtPPersistentChunkData makeOne(PtPChunkDataTO auxTO, TRequestToken rt) { - - StringBuilder errorSb = new StringBuilder(); - // toSURL - TSURL toSURL = null; - try { - toSURL = TSURL.makeFromStringValidate(auxTO.toSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (auxTO.normalizedStFN() != null) { - toSURL.setNormalizedStFN(auxTO.normalizedStFN()); - } - if (auxTO.surlUniqueID() != null) { - toSURL.setUniqueID(auxTO.surlUniqueID().intValue()); - } - // pinLifetime - TLifeTimeInSeconds pinLifetime = null; - try { - long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM( - auxTO.pinLifetime()); - // Check for max value allowed - long max = Configuration.getInstance().getPinLifetimeMaximum(); - if (pinLifeTime > max) { - log.warn("PinLifeTime is greater than the max value allowed. Drop the " - + "value to the max = {} seconds", max); - pinLifeTime = max; - } - pinLifetime = TLifeTimeInSeconds.make(pinLifeTime, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // fileLifetime - TLifeTimeInSeconds fileLifetime = null; - try { - fileLifetime = TLifeTimeInSeconds.make(FileLifetimeConverter - .getInstance().toStoRM(auxTO.fileLifetime()), TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // fileStorageType - TFileStorageType fileStorageType = FileStorageTypeConverter.getInstance() - .toSTORM(auxTO.fileStorageType()); - if (fileStorageType == TFileStorageType.EMPTY) { - errorSb.append("\nTFileStorageType could not be translated from " - + "its String representation! String: " + auxTO.fileStorageType()); - // Use the default value defined in Configuration. - fileStorageType = TFileStorageType.getTFileStorageType(Configuration - .getInstance().getDefaultFileStorageType()); - errorSb.append("\nUsed the default TFileStorageType as defined " - + "in StoRM config.: " + fileStorageType); - } - // expectedFileSize - // - // WARNING! A converter is used because the DB uses 0 for empty, whereas - // StoRM object model does allow a 0 size! Since this is an optional - // field - // in the SRM specs, null must be converted explicitly to Empty - // TSizeInBytes - // because it is indeed well formed! - TSizeInBytes expectedFileSize = null; - TSizeInBytes emptySize = TSizeInBytes.makeEmpty(); - long sizeTranslation = SizeInBytesIntConverter.getInstance().toStoRM( - auxTO.expectedFileSize()); - if (emptySize.value() == sizeTranslation) { - expectedFileSize = emptySize; - } else { - try { - expectedFileSize = TSizeInBytes.make(auxTO.expectedFileSize(), - SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - } - // spaceToken! - // - // WARNING! A converter is still needed because of DB logic for missing - // SpaceToken makes use of NULL, whereas StoRM object model does not - // allow - // for null! It makes use of a specific Empty type. - // - // Indeed, the SpaceToken field is optional, so a request with a null - // value - // for the SpaceToken field in the DB, _is_ well formed! - TSpaceToken spaceToken = null; - TSpaceToken emptyToken = TSpaceToken.makeEmpty(); - /** - * convert empty string representation of DPM into StoRM representation; - */ - String spaceTokenTranslation = SpaceTokenStringConverter.getInstance() - .toStoRM(auxTO.spaceToken()); - if (emptyToken.toString().equals(spaceTokenTranslation)) { - spaceToken = emptyToken; - } else { - try { - spaceToken = TSpaceToken.make(spaceTokenTranslation); - } catch (InvalidTSpaceTokenAttributesException e) { - errorSb.append("\n"); - errorSb.append(e); - } - } - // overwriteOption! - TOverwriteMode overwriteOption = OverwriteModeConverter.getInstance() - .toSTORM(auxTO.overwriteOption()); - if (overwriteOption == TOverwriteMode.EMPTY) { - errorSb.append("\nTOverwriteMode could not be translated " - + "from its String representation! String: " + auxTO.overwriteOption()); - overwriteOption = null; - } - // transferProtocols - TURLPrefix transferProtocols = TransferProtocolListConverter.toSTORM(auxTO - .protocolList()); - if (transferProtocols.size() == 0) { - errorSb.append("\nEmpty list of TransferProtocols " - + "or could not translate TransferProtocols!"); - transferProtocols = null; // fail construction of PtPChunkData! - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance() - .toSTORM(auxTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + auxTO.status()); - } else { - status = new TReturnStatus(code, auxTO.errString()); - } - GridUserInterface gridUser = null; - try { - if (auxTO.vomsAttributes() != null - && !auxTO.vomsAttributes().trim().equals("")) { - gridUser = GridUserManager.makeVOMSGridUser(auxTO.clientDN(), - auxTO.vomsAttributesArray()); - } else { - gridUser = GridUserManager.makeGridUser(auxTO.clientDN()); - } - - } catch (IllegalArgumentException e) { - log.error("Unexpected error on voms grid user creation. " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - - // transferURL - /** - * whatever is read is just meaningless because PtP will fill it in!!! So - * create an Empty TTURL by default! Vital to avoid problems with unknown - * DPM NULL/EMPTY logic policy! - */ - TTURL transferURL = TTURL.makeEmpty(); - // make PtPChunkData - PtPPersistentChunkData aux = null; - try { - aux = new PtPPersistentChunkData(gridUser, rt, toSURL, pinLifetime, - fileLifetime, fileStorageType, spaceToken, expectedFileSize, - transferProtocols, overwriteOption, status, transferURL); - aux.setPrimaryKey(auxTO.primaryKey()); - } catch (InvalidPtPPersistentChunkDataAttributesException e) { - dao.signalMalformedPtPChunk(auxTO); - log.warn("PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" - + " from persistence. Dropping chunk from request: {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } catch (InvalidPtPDataAttributesException e) { - dao.signalMalformedPtPChunk(auxTO); - log.warn("PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" - + " from persistence. Dropping chunk from request: {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } catch (InvalidFileTransferDataAttributesException e) { - dao.signalMalformedPtPChunk(auxTO); - log.warn("PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" - + " from persistence. Dropping chunk from request: {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } catch (InvalidSurlRequestDataAttributesException e) { - dao.signalMalformedPtPChunk(auxTO); - log.warn("PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" - + " from persistence. Dropping chunk from request: {}", rt); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - /** - * - * Adds to the received PtPChunkDataTO the normalized StFN and the SURL unique - * ID taken from the PtPChunkData - * - * @param chunkTO - * @param chunk - */ - private void completeTO(ReducedPtPChunkDataTO chunkTO, - final ReducedPtPChunkData chunk) { - - chunkTO.setNormalizedStFN(chunk.toSURL().normalizedStFN()); - chunkTO.setSurlUniqueID(new Integer(chunk.toSURL().uniqueId())); - } - - /** - * - * Creates a ReducedPtGChunkDataTO from the received PtGChunkDataTO and - * completes it with the normalized StFN and the SURL unique ID taken from the - * PtGChunkData - * - * @param chunkTO - * @param chunk - * @return - * @throws InvalidReducedPtPChunkDataAttributesException - */ - private ReducedPtPChunkDataTO completeTO(PtPChunkDataTO chunkTO, - final PtPPersistentChunkData chunk) - throws InvalidReducedPtPChunkDataAttributesException { - - ReducedPtPChunkDataTO reducedChunkTO = this.reduce(chunkTO); - this.completeTO(reducedChunkTO, this.reduce(chunk)); - return reducedChunkTO; - } - - /** - * Creates a ReducedPtPChunkData from the data contained in the received - * PtPChunkData - * - * @param chunk - * @return - * @throws InvalidReducedPtPChunkDataAttributesException - */ - private ReducedPtPChunkData reduce(PtPPersistentChunkData chunk) - throws InvalidReducedPtPChunkDataAttributesException { - - ReducedPtPChunkData reducedChunk = new ReducedPtPChunkData(chunk.getSURL(), - chunk.getStatus(), chunk.fileStorageType(), chunk.fileLifetime()); - reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); - return reducedChunk; - } - - /** - * Creates a ReducedPtPChunkDataTO from the data contained in the received - * PtPChunkDataTO - * - * @param chunkTO - * @return - */ - private ReducedPtPChunkDataTO reduce(PtPChunkDataTO chunkTO) { - - ReducedPtPChunkDataTO reducedChunkTO = new ReducedPtPChunkDataTO(); - reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); - reducedChunkTO.setToSURL(chunkTO.toSURL()); - reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); - reducedChunkTO.setSurlUniqueID(chunkTO.surlUniqueID()); - reducedChunkTO.setStatus(chunkTO.status()); - reducedChunkTO.setErrString(chunkTO.errString()); - return reducedChunkTO; - } - - /** - * Checks if the received PtPChunkDataTO contains the fields not set by the - * front end but required - * - * @param chunkTO - * @return - */ - private boolean isComplete(PtPChunkDataTO chunkTO) { - - return (chunkTO.normalizedStFN() != null) - && (chunkTO.surlUniqueID() != null); - } - - /** - * Checks if the received ReducedPtGChunkDataTO contains the fields not set by - * the front end but required - * - * @param reducedChunkTO - * @return - */ - private boolean isComplete(ReducedPtPChunkDataTO reducedChunkTO) { - - return (reducedChunkTO.normalizedStFN() != null) - && (reducedChunkTO.surlUniqueID() != null); - } - - public Collection lookupReducedPtPChunkData( - TRequestToken requestToken, Collection surls) { - - Collection reducedChunkDataTOs = dao.findReduced( - requestToken.getValue(), surls); - log.debug("PtP CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); - return buildReducedChunkDataList(reducedChunkDataTOs); - } - - public Collection lookupPtPChunkData(TSURL surl, - GridUserInterface user) { - - return lookupPtPChunkData( - (List) Arrays.asList(new TSURL[] { surl }), user); - } - - private Collection lookupPtPChunkData( - List surls, GridUserInterface user) { - - int[] surlsUniqueIDs = new int[surls.size()]; - String[] surlsArray = new String[surls.size()]; - int index = 0; - for (TSURL tsurl : surls) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surlsArray[index] = tsurl.rawSurl(); - index++; - } - Collection chunkDataTOs = dao.find(surlsUniqueIDs, - surlsArray, user.getDn()); - log.debug("PtP CHUNK CATALOG: retrieved data {}", chunkDataTOs); - return buildChunkDataList(chunkDataTOs); - } - - private Collection buildChunkDataList( - Collection chunkDataTOs) { - - ArrayList list = new ArrayList(); - PtPPersistentChunkData chunk; - for (PtPChunkDataTO chunkTO : chunkDataTOs) { - chunk = makeOne(chunkTO); - if (chunk == null) { - continue; - } - list.add(chunk); - if (isComplete(chunkTO)) { - continue; - } - try { - dao.updateIncomplete(completeTO(chunkTO, chunk)); - } catch (InvalidReducedPtPChunkDataAttributesException e) { - log.warn("PtG CHUNK CATALOG! unable to add missing informations on " - + "DB to the request: {}", e.getMessage()); - } - } - log.debug("PtPChunkCatalog: returning {}\n\n", list); - return list; - } - - private PtPPersistentChunkData makeOne(PtPChunkDataTO chunkTO) { - - try { - return makeOne(chunkTO, - new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); - } catch (InvalidTRequestTokenAttributesException e) { - throw new IllegalStateException( - "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " - + e); - } - } - - private Collection buildReducedChunkDataList( - Collection chunkDataTOCollection) { - - ArrayList list = new ArrayList(); - ReducedPtPChunkData reducedChunkData; - for (ReducedPtPChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { - reducedChunkData = makeOneReduced(reducedChunkDataTO); - if (reducedChunkData != null) { - list.add(reducedChunkData); - if (!this.isComplete(reducedChunkDataTO)) { - this.completeTO(reducedChunkDataTO, reducedChunkData); - dao.updateIncomplete(reducedChunkDataTO); - } - } - } - log.debug("PtP CHUNK CATALOG: returning {}", list); - return list; - } - - private ReducedPtPChunkData makeOneReduced( - ReducedPtPChunkDataTO reducedChunkDataTO) { - - StringBuilder errorSb = new StringBuilder(); - // fromSURL - TSURL toSURL = null; - try { - toSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.toSURL()); - } catch (InvalidTSURLAttributesException e) { - errorSb.append(e); - } - if (reducedChunkDataTO.normalizedStFN() != null) { - toSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); - } - if (reducedChunkDataTO.surlUniqueID() != null) { - toSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); - } - // status - TReturnStatus status = null; - TStatusCode code = StatusCodeConverter.getInstance().toSTORM( - reducedChunkDataTO.status()); - if (code == TStatusCode.EMPTY) { - errorSb.append("\nRetrieved StatusCode was not recognised: " - + reducedChunkDataTO.status()); - } else { - status = new TReturnStatus(code, reducedChunkDataTO.errString()); - } - // fileStorageType - TFileStorageType fileStorageType = FileStorageTypeConverter.getInstance() - .toSTORM(reducedChunkDataTO.fileStorageType()); - if (fileStorageType == TFileStorageType.EMPTY) { - errorSb.append("\nTFileStorageType could not be " - + "translated from its String representation! String: " - + reducedChunkDataTO.fileStorageType()); - // Use the default value defined in Configuration. - fileStorageType = TFileStorageType.getTFileStorageType(Configuration - .getInstance().getDefaultFileStorageType()); - errorSb - .append("\nUsed the default TFileStorageType as defined in StoRM config.: " - + fileStorageType); - } - // fileLifetime - TLifeTimeInSeconds fileLifetime = null; - try { - fileLifetime = TLifeTimeInSeconds.make(FileLifetimeConverter - .getInstance().toStoRM(reducedChunkDataTO.fileLifetime()), - TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - errorSb.append("\n"); - errorSb.append(e); - } - // make ReducedPtPChunkData - ReducedPtPChunkData aux = null; - try { - aux = new ReducedPtPChunkData(toSURL, status, fileStorageType, - fileLifetime); - aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); - } catch (InvalidReducedPtPChunkDataAttributesException e) { - log.warn("PtP CHUNK CATALOG! Retrieved malformed Reduced PtP" - + " chunk data from persistence: dropping reduced chunk..."); - log.warn(e.getMessage(), e); - log.warn(errorSb.toString()); - } - // end... - return aux; - } - - public int updateStatus(TRequestToken requestToken, TSURL surl, - TStatusCode statusCode, String explanation) { - - return dao.updateStatus(requestToken, new int[] { surl.uniqueId() }, - new String[] { surl.rawSurl() }, statusCode, explanation); - } - - public int updateFromPreviousStatus(TRequestToken requestToken, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - return dao.updateStatusOnMatchingStatus(requestToken, expectedStatusCode, - newStatusCode, explanation); - } - - public int updateFromPreviousStatus(TRequestToken requestToken, - List surlList, TStatusCode expectedStatusCode, - TStatusCode newStatusCode) { - - int[] surlsUniqueIDs = new int[surlList.size()]; - String[] surls = new String[surlList.size()]; - int index = 0; - for (TSURL tsurl : surlList) { - surlsUniqueIDs[index] = tsurl.uniqueId(); - surls[index] = tsurl.rawSurl(); - index++; - } - return dao.updateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode); - } - + private static final Logger log = LoggerFactory.getLogger(PtPChunkCatalog.class); + + /* only instance of PtPChunkCatalog present in StoRM! */ + private static final PtPChunkCatalog cat = new PtPChunkCatalog(); + private final PtPChunkDAO dao = PtPChunkDAO.getInstance(); + + private PtPChunkCatalog() {} + + /** Method that returns the only instance of PtPChunkCatalog available. */ + public static PtPChunkCatalog getInstance() { + + return cat; + } + + /** Method used to update into Persistence a retrieved PtPChunkData. */ + public synchronized void update(PtPPersistentChunkData chunkData) { + + PtPChunkDataTO to = new PtPChunkDataTO(); + /* rimary key needed by DAO Object */ + to.setPrimaryKey(chunkData.getPrimaryKey()); + to.setStatus(StatusCodeConverter.getInstance().toDB(chunkData.getStatus().getStatusCode())); + to.setErrString(chunkData.getStatus().getExplanation()); + to.setTransferURL(TURLConverter.getInstance().toDB(chunkData.getTransferURL().toString())); + to.setPinLifetime(PinLifetimeConverter.getInstance().toDB(chunkData.pinLifetime().value())); + to.setFileLifetime(FileLifetimeConverter.getInstance().toDB(chunkData.fileLifetime().value())); + to.setFileStorageType(FileStorageTypeConverter.getInstance().toDB(chunkData.fileStorageType())); + to.setOverwriteOption(OverwriteModeConverter.getInstance().toDB(chunkData.overwriteOption())); + to.setNormalizedStFN(chunkData.getSURL().normalizedStFN()); + to.setSurlUniqueID(new Integer(chunkData.getSURL().uniqueId())); + to.setClientDN(chunkData.getUser().getDn()); + if (chunkData.getUser() instanceof AbstractGridUser) { + if (((AbstractGridUser) chunkData.getUser()).hasVoms()) { + to.setVomsAttributes(((AbstractGridUser) chunkData.getUser()).getFQANsAsString()); + } + } + dao.update(to); + } + + /** + * Method that returns a Collection of PtPChunkData Objects matching the supplied TRequestToken. + * If any of the data associated to the TRequestToken is not well formed and so does not allow a + * PtPChunkData Object to be created, then that part of the request is dropped, gets logged and an + * attempt is made to write in the DB that the chunk was malformed; the processing continues with + * the next part. Only the valid chunks get returned. If there are no chunks to process then an + * empty Collection is returned, and a messagge gets logged. NOTE! Chunks in SRM_ABORTED status + * are NOT returned! This is imporant because this method is intended to be used by the Feeders to + * fetch all chunks in the request, and aborted chunks should not be picked up for processing! + */ + public synchronized Collection lookup(final TRequestToken rt) { + + Collection chunkTOs = dao.find(rt); + log.debug("PtPChunkCatalog: retrieved data {}", chunkTOs); + return buildChunkDataList(chunkTOs); + } + + /** + * Private method used to create a PtPChunkData object, from a PtPChunkDataTO and TRequestToken. + * If a chunk cannot be created, an error messagge gets logged and an attempt is made to signal in + * the DB that the chunk is malformed. + */ + private PtPPersistentChunkData makeOne(PtPChunkDataTO auxTO, TRequestToken rt) { + + StringBuilder errorSb = new StringBuilder(); + // toSURL + TSURL toSURL = null; + try { + toSURL = TSURL.makeFromStringValidate(auxTO.toSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (auxTO.normalizedStFN() != null) { + toSURL.setNormalizedStFN(auxTO.normalizedStFN()); + } + if (auxTO.surlUniqueID() != null) { + toSURL.setUniqueID(auxTO.surlUniqueID().intValue()); + } + // pinLifetime + TLifeTimeInSeconds pinLifetime = null; + try { + long pinLifeTime = PinLifetimeConverter.getInstance().toStoRM(auxTO.pinLifetime()); + // Check for max value allowed + long max = Configuration.getInstance().getPinLifetimeMaximum(); + if (pinLifeTime > max) { + log.warn( + "PinLifeTime is greater than the max value allowed. Drop the " + + "value to the max = {} seconds", + max); + pinLifeTime = max; + } + pinLifetime = TLifeTimeInSeconds.make(pinLifeTime, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // fileLifetime + TLifeTimeInSeconds fileLifetime = null; + try { + fileLifetime = + TLifeTimeInSeconds.make( + FileLifetimeConverter.getInstance().toStoRM(auxTO.fileLifetime()), TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // fileStorageType + TFileStorageType fileStorageType = + FileStorageTypeConverter.getInstance().toSTORM(auxTO.fileStorageType()); + if (fileStorageType == TFileStorageType.EMPTY) { + errorSb.append( + "\nTFileStorageType could not be translated from " + + "its String representation! String: " + + auxTO.fileStorageType()); + // Use the default value defined in Configuration. + fileStorageType = + TFileStorageType.getTFileStorageType( + Configuration.getInstance().getDefaultFileStorageType()); + errorSb.append( + "\nUsed the default TFileStorageType as defined " + + "in StoRM config.: " + + fileStorageType); + } + // expectedFileSize + // + // WARNING! A converter is used because the DB uses 0 for empty, whereas + // StoRM object model does allow a 0 size! Since this is an optional + // field + // in the SRM specs, null must be converted explicitly to Empty + // TSizeInBytes + // because it is indeed well formed! + TSizeInBytes expectedFileSize = null; + TSizeInBytes emptySize = TSizeInBytes.makeEmpty(); + long sizeTranslation = SizeInBytesIntConverter.getInstance().toStoRM(auxTO.expectedFileSize()); + if (emptySize.value() == sizeTranslation) { + expectedFileSize = emptySize; + } else { + try { + expectedFileSize = TSizeInBytes.make(auxTO.expectedFileSize(), SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + } + // spaceToken! + // + // WARNING! A converter is still needed because of DB logic for missing + // SpaceToken makes use of NULL, whereas StoRM object model does not + // allow + // for null! It makes use of a specific Empty type. + // + // Indeed, the SpaceToken field is optional, so a request with a null + // value + // for the SpaceToken field in the DB, _is_ well formed! + TSpaceToken spaceToken = null; + TSpaceToken emptyToken = TSpaceToken.makeEmpty(); + /** convert empty string representation of DPM into StoRM representation; */ + String spaceTokenTranslation = + SpaceTokenStringConverter.getInstance().toStoRM(auxTO.spaceToken()); + if (emptyToken.toString().equals(spaceTokenTranslation)) { + spaceToken = emptyToken; + } else { + try { + spaceToken = TSpaceToken.make(spaceTokenTranslation); + } catch (InvalidTSpaceTokenAttributesException e) { + errorSb.append("\n"); + errorSb.append(e); + } + } + // overwriteOption! + TOverwriteMode overwriteOption = + OverwriteModeConverter.getInstance().toSTORM(auxTO.overwriteOption()); + if (overwriteOption == TOverwriteMode.EMPTY) { + errorSb.append( + "\nTOverwriteMode could not be translated " + + "from its String representation! String: " + + auxTO.overwriteOption()); + overwriteOption = null; + } + // transferProtocols + TURLPrefix transferProtocols = TransferProtocolListConverter.toSTORM(auxTO.protocolList()); + if (transferProtocols.size() == 0) { + errorSb.append( + "\nEmpty list of TransferProtocols " + "or could not translate TransferProtocols!"); + transferProtocols = null; // fail construction of PtPChunkData! + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(auxTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + auxTO.status()); + } else { + status = new TReturnStatus(code, auxTO.errString()); + } + GridUserInterface gridUser = null; + try { + if (auxTO.vomsAttributes() != null && !auxTO.vomsAttributes().trim().equals("")) { + gridUser = GridUserManager.makeVOMSGridUser(auxTO.clientDN(), auxTO.vomsAttributesArray()); + } else { + gridUser = GridUserManager.makeGridUser(auxTO.clientDN()); + } + + } catch (IllegalArgumentException e) { + log.error( + "Unexpected error on voms grid user creation. " + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + + // transferURL + /** + * whatever is read is just meaningless because PtP will fill it in!!! So create an Empty TTURL + * by default! Vital to avoid problems with unknown DPM NULL/EMPTY logic policy! + */ + TTURL transferURL = TTURL.makeEmpty(); + // make PtPChunkData + PtPPersistentChunkData aux = null; + try { + aux = + new PtPPersistentChunkData( + gridUser, + rt, + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); + aux.setPrimaryKey(auxTO.primaryKey()); + } catch (InvalidPtPPersistentChunkDataAttributesException e) { + dao.signalMalformedPtPChunk(auxTO); + log.warn( + "PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" + + " from persistence. Dropping chunk from request: {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } catch (InvalidPtPDataAttributesException e) { + dao.signalMalformedPtPChunk(auxTO); + log.warn( + "PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" + + " from persistence. Dropping chunk from request: {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } catch (InvalidFileTransferDataAttributesException e) { + dao.signalMalformedPtPChunk(auxTO); + log.warn( + "PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" + + " from persistence. Dropping chunk from request: {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } catch (InvalidSurlRequestDataAttributesException e) { + dao.signalMalformedPtPChunk(auxTO); + log.warn( + "PtP CHUNK CATALOG! Retrieved malformed PtP chunk data" + + " from persistence. Dropping chunk from request: {}", + rt); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + /** + * Adds to the received PtPChunkDataTO the normalized StFN and the SURL unique ID taken from the + * PtPChunkData + * + * @param chunkTO + * @param chunk + */ + private void completeTO(ReducedPtPChunkDataTO chunkTO, final ReducedPtPChunkData chunk) { + + chunkTO.setNormalizedStFN(chunk.toSURL().normalizedStFN()); + chunkTO.setSurlUniqueID(new Integer(chunk.toSURL().uniqueId())); + } + + /** + * Creates a ReducedPtGChunkDataTO from the received PtGChunkDataTO and completes it with the + * normalized StFN and the SURL unique ID taken from the PtGChunkData + * + * @param chunkTO + * @param chunk + * @return + * @throws InvalidReducedPtPChunkDataAttributesException + */ + private ReducedPtPChunkDataTO completeTO( + PtPChunkDataTO chunkTO, final PtPPersistentChunkData chunk) + throws InvalidReducedPtPChunkDataAttributesException { + + ReducedPtPChunkDataTO reducedChunkTO = this.reduce(chunkTO); + this.completeTO(reducedChunkTO, this.reduce(chunk)); + return reducedChunkTO; + } + + /** + * Creates a ReducedPtPChunkData from the data contained in the received PtPChunkData + * + * @param chunk + * @return + * @throws InvalidReducedPtPChunkDataAttributesException + */ + private ReducedPtPChunkData reduce(PtPPersistentChunkData chunk) + throws InvalidReducedPtPChunkDataAttributesException { + + ReducedPtPChunkData reducedChunk = + new ReducedPtPChunkData( + chunk.getSURL(), chunk.getStatus(), chunk.fileStorageType(), chunk.fileLifetime()); + reducedChunk.setPrimaryKey(chunk.getPrimaryKey()); + return reducedChunk; + } + + /** + * Creates a ReducedPtPChunkDataTO from the data contained in the received PtPChunkDataTO + * + * @param chunkTO + * @return + */ + private ReducedPtPChunkDataTO reduce(PtPChunkDataTO chunkTO) { + + ReducedPtPChunkDataTO reducedChunkTO = new ReducedPtPChunkDataTO(); + reducedChunkTO.setPrimaryKey(chunkTO.primaryKey()); + reducedChunkTO.setToSURL(chunkTO.toSURL()); + reducedChunkTO.setNormalizedStFN(chunkTO.normalizedStFN()); + reducedChunkTO.setSurlUniqueID(chunkTO.surlUniqueID()); + reducedChunkTO.setStatus(chunkTO.status()); + reducedChunkTO.setErrString(chunkTO.errString()); + return reducedChunkTO; + } + + /** + * Checks if the received PtPChunkDataTO contains the fields not set by the front end but required + * + * @param chunkTO + * @return + */ + private boolean isComplete(PtPChunkDataTO chunkTO) { + + return (chunkTO.normalizedStFN() != null) && (chunkTO.surlUniqueID() != null); + } + + /** + * Checks if the received ReducedPtGChunkDataTO contains the fields not set by the front end but + * required + * + * @param reducedChunkTO + * @return + */ + private boolean isComplete(ReducedPtPChunkDataTO reducedChunkTO) { + + return (reducedChunkTO.normalizedStFN() != null) && (reducedChunkTO.surlUniqueID() != null); + } + + public Collection lookupReducedPtPChunkData( + TRequestToken requestToken, Collection surls) { + + Collection reducedChunkDataTOs = + dao.findReduced(requestToken.getValue(), surls); + log.debug("PtP CHUNK CATALOG: retrieved data {}", reducedChunkDataTOs); + return buildReducedChunkDataList(reducedChunkDataTOs); + } + + public Collection lookupPtPChunkData(TSURL surl, GridUserInterface user) { + + return lookupPtPChunkData((List) Arrays.asList(new TSURL[] {surl}), user); + } + + private Collection lookupPtPChunkData( + List surls, GridUserInterface user) { + + int[] surlsUniqueIDs = new int[surls.size()]; + String[] surlsArray = new String[surls.size()]; + int index = 0; + for (TSURL tsurl : surls) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surlsArray[index] = tsurl.rawSurl(); + index++; + } + Collection chunkDataTOs = dao.find(surlsUniqueIDs, surlsArray, user.getDn()); + log.debug("PtP CHUNK CATALOG: retrieved data {}", chunkDataTOs); + return buildChunkDataList(chunkDataTOs); + } + + private Collection buildChunkDataList( + Collection chunkDataTOs) { + + ArrayList list = new ArrayList(); + PtPPersistentChunkData chunk; + for (PtPChunkDataTO chunkTO : chunkDataTOs) { + chunk = makeOne(chunkTO); + if (chunk == null) { + continue; + } + list.add(chunk); + if (isComplete(chunkTO)) { + continue; + } + try { + dao.updateIncomplete(completeTO(chunkTO, chunk)); + } catch (InvalidReducedPtPChunkDataAttributesException e) { + log.warn( + "PtG CHUNK CATALOG! unable to add missing informations on " + "DB to the request: {}", + e.getMessage()); + } + } + log.debug("PtPChunkCatalog: returning {}\n\n", list); + return list; + } + + private PtPPersistentChunkData makeOne(PtPChunkDataTO chunkTO) { + + try { + return makeOne(chunkTO, new TRequestToken(chunkTO.requestToken(), chunkTO.timeStamp())); + } catch (InvalidTRequestTokenAttributesException e) { + throw new IllegalStateException( + "Unexpected InvalidTRequestTokenAttributesException in TRequestToken: " + e); + } + } + + private Collection buildReducedChunkDataList( + Collection chunkDataTOCollection) { + + ArrayList list = new ArrayList(); + ReducedPtPChunkData reducedChunkData; + for (ReducedPtPChunkDataTO reducedChunkDataTO : chunkDataTOCollection) { + reducedChunkData = makeOneReduced(reducedChunkDataTO); + if (reducedChunkData != null) { + list.add(reducedChunkData); + if (!this.isComplete(reducedChunkDataTO)) { + this.completeTO(reducedChunkDataTO, reducedChunkData); + dao.updateIncomplete(reducedChunkDataTO); + } + } + } + log.debug("PtP CHUNK CATALOG: returning {}", list); + return list; + } + + private ReducedPtPChunkData makeOneReduced(ReducedPtPChunkDataTO reducedChunkDataTO) { + + StringBuilder errorSb = new StringBuilder(); + // fromSURL + TSURL toSURL = null; + try { + toSURL = TSURL.makeFromStringValidate(reducedChunkDataTO.toSURL()); + } catch (InvalidTSURLAttributesException e) { + errorSb.append(e); + } + if (reducedChunkDataTO.normalizedStFN() != null) { + toSURL.setNormalizedStFN(reducedChunkDataTO.normalizedStFN()); + } + if (reducedChunkDataTO.surlUniqueID() != null) { + toSURL.setUniqueID(reducedChunkDataTO.surlUniqueID().intValue()); + } + // status + TReturnStatus status = null; + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(reducedChunkDataTO.status()); + if (code == TStatusCode.EMPTY) { + errorSb.append("\nRetrieved StatusCode was not recognised: " + reducedChunkDataTO.status()); + } else { + status = new TReturnStatus(code, reducedChunkDataTO.errString()); + } + // fileStorageType + TFileStorageType fileStorageType = + FileStorageTypeConverter.getInstance().toSTORM(reducedChunkDataTO.fileStorageType()); + if (fileStorageType == TFileStorageType.EMPTY) { + errorSb.append( + "\nTFileStorageType could not be " + + "translated from its String representation! String: " + + reducedChunkDataTO.fileStorageType()); + // Use the default value defined in Configuration. + fileStorageType = + TFileStorageType.getTFileStorageType( + Configuration.getInstance().getDefaultFileStorageType()); + errorSb.append( + "\nUsed the default TFileStorageType as defined in StoRM config.: " + fileStorageType); + } + // fileLifetime + TLifeTimeInSeconds fileLifetime = null; + try { + fileLifetime = + TLifeTimeInSeconds.make( + FileLifetimeConverter.getInstance().toStoRM(reducedChunkDataTO.fileLifetime()), + TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + errorSb.append("\n"); + errorSb.append(e); + } + // make ReducedPtPChunkData + ReducedPtPChunkData aux = null; + try { + aux = new ReducedPtPChunkData(toSURL, status, fileStorageType, fileLifetime); + aux.setPrimaryKey(reducedChunkDataTO.primaryKey()); + } catch (InvalidReducedPtPChunkDataAttributesException e) { + log.warn( + "PtP CHUNK CATALOG! Retrieved malformed Reduced PtP" + + " chunk data from persistence: dropping reduced chunk..."); + log.warn(e.getMessage(), e); + log.warn(errorSb.toString()); + } + // end... + return aux; + } + + public int updateStatus( + TRequestToken requestToken, TSURL surl, TStatusCode statusCode, String explanation) { + + return dao.updateStatus( + requestToken, + new int[] {surl.uniqueId()}, + new String[] {surl.rawSurl()}, + statusCode, + explanation); + } + + public int updateFromPreviousStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + return dao.updateStatusOnMatchingStatus( + requestToken, expectedStatusCode, newStatusCode, explanation); + } + + public int updateFromPreviousStatus( + TRequestToken requestToken, + List surlList, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) { + + int[] surlsUniqueIDs = new int[surlList.size()]; + String[] surls = new String[surlList.size()]; + int index = 0; + for (TSURL tsurl : surlList) { + surlsUniqueIDs[index] = tsurl.uniqueId(); + surls[index] = tsurl.rawSurl(); + index++; + } + return dao.updateStatusOnMatchingStatus( + requestToken, surlsUniqueIDs, surls, expectedStatusCode, newStatusCode); + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtPChunkDAO.java b/src/main/java/it/grid/storm/catalogs/PtPChunkDAO.java index 388c7853..ff3d5b30 100644 --- a/src/main/java/it/grid/storm/catalogs/PtPChunkDAO.java +++ b/src/main/java/it/grid/storm/catalogs/PtPChunkDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -16,18 +15,12 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import it.grid.storm.config.Configuration; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.naming.SURL; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -41,1301 +34,1408 @@ import java.util.Map; import java.util.Timer; import java.util.TimerTask; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * DAO class for PtPChunkCatalog. This DAO is specifically designed to connect - * to a MySQL DB. The raw data found in those tables is pre-treated in order to - * turn it into the Object Model of StoRM. See Method comments for further info. - * BEWARE! DAO Adjusts for extra fields in the DB that are not present in the - * object model. - * + * DAO class for PtPChunkCatalog. This DAO is specifically designed to connect to a MySQL DB. The + * raw data found in those tables is pre-treated in order to turn it into the Object Model of StoRM. + * See Method comments for further info. BEWARE! DAO Adjusts for extra fields in the DB that are not + * present in the object model. + * * @author EGRID ICTP * @version 2.0 * @date June 2005 */ public class PtPChunkDAO { - private static final Logger log = LoggerFactory.getLogger(PtPChunkDAO.class); - - /* String with the name of the class for the DB driver */ - private final String driver = Configuration.getInstance().getDBDriver(); - /* String referring to the URL of the DB */ - private final String url = Configuration.getInstance().getStormDbURL(); - /* String with the password for the DB */ - private final String password = Configuration.getInstance().getDBPassword(); - /* String with the name for the DB */ - private final String name = Configuration.getInstance().getDBUserName(); - /* Connection to DB - WARNING!!! It is kept open all the time! */ - private Connection con = null; - - private static final PtPChunkDAO dao = new PtPChunkDAO(); - - /* timer thread that will run a task to alert when reconnecting is necessary! */ - private Timer clock = null; - /* - * timer task that will update the boolean signaling that a reconnection is - * needed - */ - private TimerTask clockTask = null; - /* milliseconds that must pass before reconnecting to DB */ - private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; - /* initial delay in milliseconds before starting timer */ - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - - /* boolean that tells whether reconnection is needed because of MySQL bug! */ - private boolean reconnect = false; - - private StatusCodeConverter statusCodeConverter = StatusCodeConverter.getInstance(); - - private PtPChunkDAO() { - - setUpConnection(); - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of the PtPChunkDAO. - */ - public static PtPChunkDAO getInstance() { - - return dao; - } - - /** - * Method used to save the changes made to a retrieved PtPChunkDataTO, back - * into the MySQL DB. Only the transferURL, statusCode and explanation, of - * status_Put table get written to the DB. Likewise for the pinLifetime and - * fileLifetime of request_queue. In case of any error, an error messagge gets - * logged but no exception is thrown. - */ - public synchronized void update(PtPChunkDataTO to) { - - if (!checkConnection()) { - log.error("PtP CHUNK DAO: update - unable to get a valid connection!"); - return; - } - PreparedStatement updatePut = null; - try { - // prepare statement... - updatePut = con - .prepareStatement("UPDATE " - + "request_queue rq JOIN (status_Put sp, request_Put rp) ON " - + "(rq.ID=rp.request_queueID AND sp.request_PutID=rp.ID) " - + "SET sp.transferURL=?, sp.statusCode=?, sp.explanation=?, rq.pinLifetime=?, rq.fileLifetime=?, rq.config_FileStorageTypeID=?, rq.config_OverwriteID=?, " - + "rp.normalized_targetSURL_StFN=?, rp.targetSURL_uniqueID=? " - + "WHERE rp.ID=?"); - printWarnings(con.getWarnings()); - - updatePut.setString(1, to.transferURL()); - printWarnings(updatePut.getWarnings()); - - updatePut.setInt(2, to.status()); - printWarnings(updatePut.getWarnings()); - - updatePut.setString(3, to.errString()); - printWarnings(updatePut.getWarnings()); - - updatePut.setInt(4, to.pinLifetime()); - printWarnings(updatePut.getWarnings()); - - updatePut.setInt(5, to.fileLifetime()); - printWarnings(updatePut.getWarnings()); - - updatePut.setString(6, to.fileStorageType()); - printWarnings(updatePut.getWarnings()); - - updatePut.setString(7, to.overwriteOption()); - printWarnings(updatePut.getWarnings()); - - updatePut.setString(8, to.normalizedStFN()); - printWarnings(updatePut.getWarnings()); - - updatePut.setInt(9, to.surlUniqueID()); - printWarnings(updatePut.getWarnings()); - - updatePut.setLong(10, to.primaryKey()); - printWarnings(updatePut.getWarnings()); - // run updateStatusPut... - log.trace("PtP CHUNK DAO - update method: {}", updatePut); - updatePut.executeUpdate(); - printWarnings(updatePut.getWarnings()); - } catch (SQLException e) { - log.error("PtP CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); - } finally { - close(updatePut); - } - } - - /** - * Updates the request_Put represented by the received ReducedPtPChunkDataTO - * by setting its normalized_targetSURL_StFN and targetSURL_uniqueID - * - * @param chunkTO - */ - public synchronized void updateIncomplete(ReducedPtPChunkDataTO chunkTO) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: updateIncomplete - unable to get a valid connection!"); - return; - } - String str = "UPDATE request_Put SET normalized_targetSURL_StFN=?, targetSURL_uniqueID=? " - + "WHERE ID=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - stmt.setString(1, chunkTO.normalizedStFN()); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, chunkTO.surlUniqueID()); - printWarnings(stmt.getWarnings()); - - stmt.setLong(3, chunkTO.primaryKey()); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - update incomplete: {}", stmt); - stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - } catch (SQLException e) { - log.error("PtP CHUNK DAO: Unable to complete update incomplete! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method used to refresh the PtPChunkDataTO information from the MySQL DB. - * This method is intended to be used during the srmAbortRequest/File - * operation. In case of any error, an error message gets logged but no - * exception is thrown; a null PtPChunkDataTO is returned. - */ - public synchronized PtPChunkDataTO refresh(long id) { - - if (!checkConnection()) { - log.error("PtP CHUNK DAO: refresh - unable to get a valid connection!"); - return null; - } - String prot = "SELECT tp.config_ProtocolsID FROM request_TransferProtocols tp " - + "WHERE tp.request_queueID IN " - + "(SELECT rp.request_queueID FROM request_Put rp WHERE rp.ID=?)"; - - String refresh = "SELECT rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, rq.s_token, rq.r_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode, sp.transferURL " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rq.ID=rp.request_queueID AND sp.request_PutID=rp.ID) " - + "WHERE rp.ID=?"; - - PreparedStatement stmt = null; - ResultSet rs = null; - PtPChunkDataTO chunkDataTO = null; - - try { - // get protocols for the request - stmt = con.prepareStatement(prot); - printWarnings(con.getWarnings()); - - List protocols = Lists.newArrayList(); - stmt.setLong(1, id); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - refresh method: {}", stmt); - rs = stmt.executeQuery(); - printWarnings(stmt.getWarnings()); - while (rs.next()) { - protocols.add(rs.getString("tp.config_ProtocolsID")); - } - close(rs); - close(stmt); - - // get chunk of the request - stmt = con.prepareStatement(refresh); - printWarnings(con.getWarnings()); - - stmt.setLong(1, id); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - refresh method: {}", stmt); - rs = stmt.executeQuery(); - printWarnings(stmt.getWarnings()); - - if (rs.next()) { - chunkDataTO = new PtPChunkDataTO(); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setRequestToken(rs.getString("rq.r_token")); - chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); - chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); - int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); - } - - chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); - chunkDataTO.setProtocolList(protocols); - chunkDataTO.setStatus(rs.getInt("sp.statusCode")); - chunkDataTO.setTransferURL(rs.getString("sp.transferURL")); - chunkDataTO.setClientDN(rs.getString("rq.client_dn")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separated by the "#" char. The proxy is a BLOB, hence it has to be - * properly converted in string. - */ - java.sql.Blob blob = rs.getBlob("rq.proxy"); - if (!rs.wasNull() && blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - chunkDataTO.setVomsAttributes(new String(bdata)); - } - if (rs.next()) { - log.warn("ATTENTION in PtP CHUNK DAO! Possible DB corruption! " - + "refresh method invoked for specific chunk with id {}, but found " - + "more than one such chunks!", id); - } - } else { - log.warn("ATTENTION in PtP CHUNK DAO! Possible DB corruption! " - + "refresh method invoked for specific chunk with id {}, but chunk " - + "NOT found in persistence!", id); - } - } catch (SQLException e) { - log.error("PtP CHUNK DAO! Unable to refresh chunk! {}", e.getMessage(), e); - chunkDataTO = null; - } finally { - close(rs); - close(stmt); - } - return chunkDataTO; - } - - /** - * Method that queries the MySQL DB to find all entries matching the supplied - * TRequestToken. The Collection contains the corresponding PtPChunkDataTO - * objects. An initial simple query establishes the list of protocols - * associated with the request. A second complex query establishes all chunks - * associated with the request, by properly joining request_queue, request_Put - * and status_Put. The considered fields are: (1) From status_Put: the ID - * field which becomes the TOs primary key, and statusCode. (2) From - * request_Put: targetSURL and expectedFileSize. (3) From request_queue: - * pinLifetime, fileLifetime, config_FileStorageTypeID, s_token, - * config_OverwriteID. In case of any error, a log gets written and an empty - * collection is returned. No exception is returned. NOTE! Chunks in - * SRM_ABORTED status are NOT returned! This is important because this method - * is intended to be used by the Feeders to fetch all chunks in the request, - * and aborted chunks should not be picked up for processing! - */ - public synchronized Collection find(TRequestToken requestToken) { - - if (!checkConnection()) { - log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); - return null; - } - String strToken = requestToken.toString(); - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - str = "SELECT tp.config_ProtocolsID " - + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " - + "WHERE rq.r_token=?"; - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List protocols = Lists.newArrayList(); - find.setString(1, strToken); - printWarnings(find.getWarnings()); - - log.trace("PtP CHUNK DAO - find method: {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - while (rs.next()) { - protocols.add(rs.getString("tp.config_ProtocolsID")); - } - close(rs); - close(find); - - // get chunks of the request - str = "SELECT rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE rq.r_token=? AND sp.statusCode<>?"; - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List list = Lists.newArrayList(); - find.setString(1, strToken); - printWarnings(find.getWarnings()); - - find.setInt(2, - statusCodeConverter.toDB(SRM_ABORTED)); - printWarnings(find.getWarnings()); - - log.trace("PtP CHUNK DAO - find method: {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - PtPChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new PtPChunkDataTO(); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setClientDN(rs.getString("rq.client_dn")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separated by the "#" char. The proxy is a BLOB, hence it has to be - * properly converted in string. - */ - java.sql.Blob blob = rs.getBlob("rq.proxy"); - if (!rs.wasNull() && blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - chunkDataTO.setVomsAttributes(new String(bdata)); - } - chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); - chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - - chunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); - int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); - } - - chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); - chunkDataTO.setProtocolList(protocols); - chunkDataTO.setRequestToken(strToken); - chunkDataTO.setStatus(rs.getInt("sp.statusCode")); - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTP CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return Lists.newArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedPtPChunkDataTO associated to the - * given TRequestToken expressed as String. - */ - public synchronized Collection findReduced( - String reqtoken, Collection surls) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: findReduced - unable to get a valid connection!"); - return Lists.newArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - boolean addInClause = surls != null && !surls.isEmpty(); - try { - // get reduced chunks - String str = "SELECT rq.fileLifetime, rq.config_FileStorageTypeID, rp.ID, rp.targetSURL, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE rq.r_token=?"; - if (addInClause) { - str += " AND rp.targetSURL_uniqueID IN ("; - for (int i=0; i list = Lists.newArrayList(); - find.setString(1, reqtoken); - printWarnings(find.getWarnings()); - if (addInClause) { - Iterator iterator = surls.iterator(); - int start = 2; - while (iterator.hasNext()) { - TSURL surl = iterator.next(); - find.setInt(start++, surl.uniqueId()); - } - } - printWarnings(find.getWarnings()); - log.trace("PtP CHUNK DAO! findReduced with request token; {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - ReducedPtPChunkDataTO reducedChunkDataTO = null; - while (rs.next()) { - reducedChunkDataTO = new ReducedPtPChunkDataTO(); - reducedChunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); - reducedChunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - reducedChunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); - reducedChunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - reducedChunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); - int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); - if (!rs.wasNull()) { - reducedChunkDataTO.setSurlUniqueID(uniqueID); - } - - reducedChunkDataTO.setStatus(rs.getInt("sp.statusCode")); - list.add(reducedChunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTP CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return Lists.newArrayList(); - } finally { - close(rs); - close(find); - } - } - - /** - * Method that returns a Collection of ReducedPtPChunkDataTO corresponding to - * the IDs supplied in the given List of Long. If the List is null or empty, - * an empty collection is returned and error messages get logged. - */ - public synchronized Collection findReduced( - List ids) { - - if (ids != null && !ids.isEmpty()) { - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: findReduced - unable to get a valid connection!"); - return Lists.newArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - // get reduced chunks - String str = "SELECT rq.fileLifetime, rq.config_FileStorageTypeID, rp.ID, rp.targetSURL, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE rp.ID IN (" + StringUtils.join(ids.toArray(), ',') + ")"; - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List list = Lists.newArrayList(); - log.trace("PtP CHUNK DAO! fetchReduced; {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - ReducedPtPChunkDataTO reducedChunkDataTO = null; - while (rs.next()) { - reducedChunkDataTO = new ReducedPtPChunkDataTO(); - reducedChunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); - reducedChunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - reducedChunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); - reducedChunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - reducedChunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); - int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); - if (!rs.wasNull()) { - reducedChunkDataTO.setSurlUniqueID(uniqueID); - } - - reducedChunkDataTO.setStatus(rs.getInt("sp.statusCode")); - list.add(reducedChunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTP CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection */ - return Lists.newArrayList(); - } finally { - close(rs); - close(find); - } - } else { - log.warn("ATTENTION in PtP CHUNK DAO! fetchReduced " - + "invoked with null or empty list of IDs!"); - return Lists.newArrayList(); - } - } - - /** - * Method used in extraordinary situations to signal that data retrieved from - * the DB was malformed and could not be translated into the StoRM object - * model. This method attempts to change the status of the chunk to - * SRM_FAILURE and record it in the DB, in the status_Put table. This - * operation could potentially fail because the source of the malformed - * problems could be a problematic DB; indeed, initially only log messages - * were recorded. Yet it soon became clear that the source of malformed data - * were actually the clients themselves and/or FE recording in the DB. In - * these circumstances the client would find its request as being in the - * SRM_IN_PROGRESS state for ever. Hence the pressing need to inform it of the - * encountered problems. - */ - public synchronized void signalMalformedPtPChunk(PtPChunkDataTO auxTO) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: signalMalformedPtPChunk - unable to get a valid connection!"); - return; - } - String signalSQL = "UPDATE status_Put sp SET sp.statusCode=" - + statusCodeConverter.toDB(SRM_FAILURE) - + ", sp.explanation=? " + "WHERE sp.request_PutID=" + auxTO.primaryKey(); - PreparedStatement signal = null; - try { - signal = con.prepareStatement(signalSQL); - printWarnings(con.getWarnings()); - /* NB: Prepared statement spares DB-specific String notation! */ - signal.setString(1, "This chunk of the request is malformed!"); - printWarnings(signal.getWarnings()); - - log.trace("PtP CHUNK DAO - signalMalformedPtPChunk method: {}", signal); - signal.executeUpdate(); - printWarnings(signal.getWarnings()); - } catch (SQLException e) { - log.error("PtPChunkDAO! Unable to signal in DB that a chunk of " - + "the request was malformed! Request: {}; Error: {}", auxTO.toString(), - e.getMessage(), e); - } finally { - close(signal); - } - } - - /** - * Method that returns the number of Put requests on the given SURL, that are - * in SRM_SPACE_AVAILABLE state. This method is intended to be used by - * PtPChunkCatalog in the isSRM_SPACE_AVAILABLE method invocation. In case of - * any error, 0 is returned. - */ - public synchronized int numberInSRM_SPACE_AVAILABLE(int surlUniqueID) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: numberInSRM_SPACE_AVAILABLE - unable to get a valid connection!"); - return 0; - } - - String str = "SELECT COUNT(rp.ID) FROM status_Put sp JOIN request_Put rp " - + "ON (sp.request_PutID=rp.ID) " - + "WHERE rp.targetSURL_uniqueID=? AND sp.statusCode=?"; - PreparedStatement stmt = null; - ResultSet rs = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - /* Prepared statement spares DB-specific String notation! */ - stmt.setInt(1, surlUniqueID); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2,statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - numberInSRM_SPACE_AVAILABLE method: {}", stmt); - rs = stmt.executeQuery(); - printWarnings(stmt.getWarnings()); - - int numberSpaceAvailable = 0; - if (rs.next()) { - numberSpaceAvailable = rs.getInt(1); - } - return numberSpaceAvailable; - } catch (SQLException e) { - log.error("PtPChunkDAO! Unable to determine " - + "numberInSRM_SPACE_AVAILABLE! Returning 0! {}", e.getMessage(), e); - return 0; - } finally { - close(rs); - close(stmt); - } - } - - /** - * Method that retrieves all expired requests in SRM_SPACE_AVAILABLE state. - * - * @return a Map containing the ID of the request as key and the relative - * SURL as value - */ - public synchronized Map getExpiredSRM_SPACE_AVAILABLE() { - - Map ids = Maps.newHashMap(); - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: getExpiredSRM_SPACE_AVAILABLE - unable to get a valid connection!"); - return ids; - } - - String idsstr = "SELECT rp.ID, rp.targetSURL FROM " - + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "WHERE sp.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - PreparedStatement stmt = null; - ResultSet rs = null; - - try { - stmt = con.prepareStatement(idsstr); - printWarnings(con.getWarnings()); - - stmt.setInt(1, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - getExpiredSRM_SPACE_AVAILABLE: {}", stmt); - - rs = stmt.executeQuery(); - printWarnings(stmt.getWarnings()); - - while (rs.next()) { - ids.put(rs.getLong("rp.ID"), rs.getString("rp.targetSURL")); - } - } catch (SQLException e) { - log.error("PtPChunkDAO! Unable to select expired " - + "SRM_SPACE_AVAILABLE chunks of PtP requests. {}", e.getMessage(), e); - - } finally { - close(rs); - close(stmt); - } - return ids; - } - - /** - * Method that retrieves all ptp requests in SRM_REQUEST_INPROGRESS state which can be - * considered as expired. - * - * @return a Map containing the ID of the request as key and the involved array of SURLs as - * value - */ - public synchronized List getExpiredSRM_REQUEST_INPROGRESS(long expirationTime) { - - List ids = Lists.newArrayList(); - - if (!checkConnection()) { - log.error( - "PtP CHUNK DAO: getExpiredSRM_REQUEST_INPROGRESS - unable to get a valid connection!"); - return ids; - } + private static final Logger log = LoggerFactory.getLogger(PtPChunkDAO.class); + + /* String with the name of the class for the DB driver */ + private final String driver = Configuration.getInstance().getDBDriver(); + /* String referring to the URL of the DB */ + private final String url = Configuration.getInstance().getStormDbURL(); + /* String with the password for the DB */ + private final String password = Configuration.getInstance().getDBPassword(); + /* String with the name for the DB */ + private final String name = Configuration.getInstance().getDBUserName(); + /* Connection to DB - WARNING!!! It is kept open all the time! */ + private Connection con = null; + + private static final PtPChunkDAO dao = new PtPChunkDAO(); + + /* timer thread that will run a task to alert when reconnecting is necessary! */ + private Timer clock = null; + /* + * timer task that will update the boolean signaling that a reconnection is + * needed + */ + private TimerTask clockTask = null; + /* milliseconds that must pass before reconnecting to DB */ + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + /* initial delay in milliseconds before starting timer */ + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + + /* boolean that tells whether reconnection is needed because of MySQL bug! */ + private boolean reconnect = false; + + private StatusCodeConverter statusCodeConverter = StatusCodeConverter.getInstance(); + + private PtPChunkDAO() { + + setUpConnection(); + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of the PtPChunkDAO. */ + public static PtPChunkDAO getInstance() { + + return dao; + } + + /** + * Method used to save the changes made to a retrieved PtPChunkDataTO, back into the MySQL DB. + * Only the transferURL, statusCode and explanation, of status_Put table get written to the DB. + * Likewise for the pinLifetime and fileLifetime of request_queue. In case of any error, an error + * messagge gets logged but no exception is thrown. + */ + public synchronized void update(PtPChunkDataTO to) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: update - unable to get a valid connection!"); + return; + } + PreparedStatement updatePut = null; + try { + // prepare statement... + updatePut = + con.prepareStatement( + "UPDATE " + + "request_queue rq JOIN (status_Put sp, request_Put rp) ON " + + "(rq.ID=rp.request_queueID AND sp.request_PutID=rp.ID) " + + "SET sp.transferURL=?, sp.statusCode=?, sp.explanation=?, rq.pinLifetime=?, rq.fileLifetime=?, rq.config_FileStorageTypeID=?, rq.config_OverwriteID=?, " + + "rp.normalized_targetSURL_StFN=?, rp.targetSURL_uniqueID=? " + + "WHERE rp.ID=?"); + printWarnings(con.getWarnings()); + + updatePut.setString(1, to.transferURL()); + printWarnings(updatePut.getWarnings()); + + updatePut.setInt(2, to.status()); + printWarnings(updatePut.getWarnings()); + + updatePut.setString(3, to.errString()); + printWarnings(updatePut.getWarnings()); + + updatePut.setInt(4, to.pinLifetime()); + printWarnings(updatePut.getWarnings()); + + updatePut.setInt(5, to.fileLifetime()); + printWarnings(updatePut.getWarnings()); + + updatePut.setString(6, to.fileStorageType()); + printWarnings(updatePut.getWarnings()); + + updatePut.setString(7, to.overwriteOption()); + printWarnings(updatePut.getWarnings()); + + updatePut.setString(8, to.normalizedStFN()); + printWarnings(updatePut.getWarnings()); - String query = "SELECT rq.ID FROM request_queue rq, request_Put rp, status_Put sp " - + "WHERE rq.ID = rp.request_queueID and rp.ID = sp.request_PutID " - + "AND rq.status=? AND rq.timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND)"; + updatePut.setInt(9, to.surlUniqueID()); + printWarnings(updatePut.getWarnings()); - PreparedStatement stmt = null; - ResultSet rs = null; + updatePut.setLong(10, to.primaryKey()); + printWarnings(updatePut.getWarnings()); + // run updateStatusPut... + log.trace("PtP CHUNK DAO - update method: {}", updatePut); + updatePut.executeUpdate(); + printWarnings(updatePut.getWarnings()); + } catch (SQLException e) { + log.error("PtP CHUNK DAO: Unable to complete update! {}", e.getMessage(), e); + } finally { + close(updatePut); + } + } + + /** + * Updates the request_Put represented by the received ReducedPtPChunkDataTO by setting its + * normalized_targetSURL_StFN and targetSURL_uniqueID + * + * @param chunkTO + */ + public synchronized void updateIncomplete(ReducedPtPChunkDataTO chunkTO) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: updateIncomplete - unable to get a valid connection!"); + return; + } + String str = + "UPDATE request_Put SET normalized_targetSURL_StFN=?, targetSURL_uniqueID=? " + + "WHERE ID=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + stmt.setString(1, chunkTO.normalizedStFN()); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, chunkTO.surlUniqueID()); + printWarnings(stmt.getWarnings()); + + stmt.setLong(3, chunkTO.primaryKey()); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - update incomplete: {}", stmt); + stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + } catch (SQLException e) { + log.error("PtP CHUNK DAO: Unable to complete update incomplete! {}", e.getMessage(), e); + } finally { + close(stmt); + } + } - try { - stmt = con.prepareStatement(query); - printWarnings(con.getWarnings()); + /** + * Method used to refresh the PtPChunkDataTO information from the MySQL DB. This method is + * intended to be used during the srmAbortRequest/File operation. In case of any error, an error + * message gets logged but no exception is thrown; a null PtPChunkDataTO is returned. + */ + public synchronized PtPChunkDataTO refresh(long id) { - stmt.setLong(1, statusCodeConverter.toDB(SRM_REQUEST_INPROGRESS)); - printWarnings(stmt.getWarnings()); + if (!checkConnection()) { + log.error("PtP CHUNK DAO: refresh - unable to get a valid connection!"); + return null; + } + String prot = + "SELECT tp.config_ProtocolsID FROM request_TransferProtocols tp " + + "WHERE tp.request_queueID IN " + + "(SELECT rp.request_queueID FROM request_Put rp WHERE rp.ID=?)"; + + String refresh = + "SELECT rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, rq.s_token, rq.r_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode, sp.transferURL " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rq.ID=rp.request_queueID AND sp.request_PutID=rp.ID) " + + "WHERE rp.ID=?"; + + PreparedStatement stmt = null; + ResultSet rs = null; + PtPChunkDataTO chunkDataTO = null; - stmt.setLong(2, expirationTime); - printWarnings(stmt.getWarnings()); + try { + // get protocols for the request + stmt = con.prepareStatement(prot); + printWarnings(con.getWarnings()); - log.trace("PtP CHUNK DAO - getExpiredSRM_REQUEST_INPROGRESS: {}", stmt); + List protocols = Lists.newArrayList(); + stmt.setLong(1, id); + printWarnings(stmt.getWarnings()); - rs = stmt.executeQuery(); - printWarnings(stmt.getWarnings()); + log.trace("PtP CHUNK DAO - refresh method: {}", stmt); + rs = stmt.executeQuery(); + printWarnings(stmt.getWarnings()); + while (rs.next()) { + protocols.add(rs.getString("tp.config_ProtocolsID")); + } + close(rs); + close(stmt); + + // get chunk of the request + stmt = con.prepareStatement(refresh); + printWarnings(con.getWarnings()); - while (rs.next()) { - ids.add(rs.getLong("rq.ID")); - } - } catch (SQLException e) { - log.error( - "PtPChunkDAO! Unable to select expired " - + "SRM_REQUEST_INPROGRESS chunks of PtP requests. {}", - e.getMessage(), e); + stmt.setLong(1, id); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - refresh method: {}", stmt); + rs = stmt.executeQuery(); + printWarnings(stmt.getWarnings()); + + if (rs.next()) { + chunkDataTO = new PtPChunkDataTO(); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setRequestToken(rs.getString("rq.r_token")); + chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); + chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); + chunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); + int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); + } - } finally { - close(rs); - close(stmt); + chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); + chunkDataTO.setProtocolList(protocols); + chunkDataTO.setStatus(rs.getInt("sp.statusCode")); + chunkDataTO.setTransferURL(rs.getString("sp.transferURL")); + chunkDataTO.setClientDN(rs.getString("rq.client_dn")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separated by the "#" char. The proxy is a BLOB, hence it has + * to be properly converted in string. + */ + java.sql.Blob blob = rs.getBlob("rq.proxy"); + if (!rs.wasNull() && blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + chunkDataTO.setVomsAttributes(new String(bdata)); } - return ids; - } - - /** - * Method that updates chunks in SRM_SPACE_AVAILABLE state, into SRM_SUCCESS. - * An array of long representing the primary key of each chunk is required. - * This is needed when the client invokes srmPutDone() In case of any error - * nothing happens and no exception is thrown, but proper messages get - * logged. - */ - public synchronized void transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS(List ids) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS - unable to get a valid connection!"); - return; - } - - String str = "UPDATE " - + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "SET sp.statusCode=? " + "WHERE sp.statusCode=? AND rp.ID IN (" - + StringUtils.join(ids.toArray(), ',') + ")"; - - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - stmt.setInt(1, - statusCodeConverter.toDB(SRM_SUCCESS)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - " - + "transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS: {}", stmt); - - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - - if (count == 0) { - log.trace("PtPChunkDAO! No chunk of PtP request was " - + "transited from SRM_SPACE_AVAILABLE to SRM_SUCCESS."); - } else { - log.info("PtPChunkDAO! {} chunks of PtP requests were transited " - + "from SRM_SPACE_AVAILABLE to SRM_SUCCESS.", count); - } - } catch (SQLException e) { - log.error("PtPChunkDAO! Unable to transit chunks from " - + "SRM_SPACE_AVAILABLE to SRM_SUCCESS! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that updates chunks in SRM_SPACE_AVAILABLE state, into - * SRM_FILE_LIFETIME_EXPIRED. An array of Long representing the primary key - * of each chunk is required. This is needed when the client forgets to invoke - * srmPutDone(). In case of any error or exception, the returned int value - * will be zero or less than the input List size. - * - * @param the list of the request id to update - * - * @return The number of the updated records into the db - */ - public synchronized int transitExpiredSRM_SPACE_AVAILABLEtoSRM_FILE_LIFETIME_EXPIRED(Collection ids) { - - Preconditions.checkNotNull(ids, "Invalid list of id"); - - if (!checkConnection()) { - log.error("Unable to get a valid connection to the database!"); - return 0; + if (rs.next()) { + log.warn( + "ATTENTION in PtP CHUNK DAO! Possible DB corruption! " + + "refresh method invoked for specific chunk with id {}, but found " + + "more than one such chunks!", + id); } + } else { + log.warn( + "ATTENTION in PtP CHUNK DAO! Possible DB corruption! " + + "refresh method invoked for specific chunk with id {}, but chunk " + + "NOT found in persistence!", + id); + } + } catch (SQLException e) { + log.error("PtP CHUNK DAO! Unable to refresh chunk! {}", e.getMessage(), e); + chunkDataTO = null; + } finally { + close(rs); + close(stmt); + } + return chunkDataTO; + } - String querySQL = "UPDATE status_Put sp " - + "JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "SET sp.statusCode=?, sp.explanation=? " - + "WHERE sp.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; - - - if (!ids.isEmpty()) { - querySQL += "AND rp.ID IN (" + StringUtils.join(ids.toArray(), ',') + ")"; + /** + * Method that queries the MySQL DB to find all entries matching the supplied TRequestToken. The + * Collection contains the corresponding PtPChunkDataTO objects. An initial simple query + * establishes the list of protocols associated with the request. A second complex query + * establishes all chunks associated with the request, by properly joining request_queue, + * request_Put and status_Put. The considered fields are: (1) From status_Put: the ID field which + * becomes the TOs primary key, and statusCode. (2) From request_Put: targetSURL and + * expectedFileSize. (3) From request_queue: pinLifetime, fileLifetime, config_FileStorageTypeID, + * s_token, config_OverwriteID. In case of any error, a log gets written and an empty collection + * is returned. No exception is returned. NOTE! Chunks in SRM_ABORTED status are NOT returned! + * This is important because this method is intended to be used by the Feeders to fetch all chunks + * in the request, and aborted chunks should not be picked up for processing! + */ + public synchronized Collection find(TRequestToken requestToken) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); + return null; + } + String strToken = requestToken.toString(); + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + str = + "SELECT tp.config_ProtocolsID " + + "FROM request_TransferProtocols tp JOIN request_queue rq ON tp.request_queueID=rq.ID " + + "WHERE rq.r_token=?"; + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List protocols = Lists.newArrayList(); + find.setString(1, strToken); + printWarnings(find.getWarnings()); + + log.trace("PtP CHUNK DAO - find method: {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + while (rs.next()) { + protocols.add(rs.getString("tp.config_ProtocolsID")); + } + close(rs); + close(find); + + // get chunks of the request + str = + "SELECT rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE rq.r_token=? AND sp.statusCode<>?"; + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List list = Lists.newArrayList(); + find.setString(1, strToken); + printWarnings(find.getWarnings()); + + find.setInt(2, statusCodeConverter.toDB(SRM_ABORTED)); + printWarnings(find.getWarnings()); + + log.trace("PtP CHUNK DAO - find method: {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + PtPChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new PtPChunkDataTO(); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setClientDN(rs.getString("rq.client_dn")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separated by the "#" char. The proxy is a BLOB, hence it has + * to be properly converted in string. + */ + java.sql.Blob blob = rs.getBlob("rq.proxy"); + if (!rs.wasNull() && blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + chunkDataTO.setVomsAttributes(new String(bdata)); } + chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); + chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - PreparedStatement stmt = null; - int count = 0; - try { - stmt = con.prepareStatement(querySQL); - printWarnings(con.getWarnings()); - - stmt.setInt(1, statusCodeConverter.toDB(SRM_FILE_LIFETIME_EXPIRED)); - printWarnings(stmt.getWarnings()); - - stmt.setString(2, "Expired pinLifetime"); - printWarnings(stmt.getWarnings()); - - stmt.setInt(3, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); - printWarnings(stmt.getWarnings()); - - log.trace( - "PtP CHUNK DAO - transit SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED: {}", - stmt); - - count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - - } catch (SQLException e) { - log.error( - "PtPChunkDAO! Unable to transit chunks from " - + "SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED! {}", - e.getMessage(), e); - } finally { - close(stmt); + chunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); + int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); } - log.trace("PtPChunkDAO! {} chunks of PtP requests were transited " - + "from SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED.", count); - return count; + + chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); + chunkDataTO.setProtocolList(protocols); + chunkDataTO.setRequestToken(strToken); + chunkDataTO.setStatus(rs.getInt("sp.statusCode")); + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTP CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return Lists.newArrayList(); + } finally { + close(rs); + close(find); } + } - /** - * Method that updates enqueued requests selected by id into SRM_FAILURE. - * An array of Long representing the id of each request is required. - * - * @param the list of the request id to update - * - * @return The number of the updated records. Zero or less than the input list size in case of errors. - */ - public synchronized int transitExpiredSRM_REQUEST_INPROGRESStoSRM_FAILURE(Collection ids) { + /** + * Method that returns a Collection of ReducedPtPChunkDataTO associated to the given TRequestToken + * expressed as String. + */ + public synchronized Collection findReduced( + String reqtoken, Collection surls) { - Preconditions.checkNotNull(ids, "Invalid list of id"); + if (!checkConnection()) { + log.error("PtP CHUNK DAO: findReduced - unable to get a valid connection!"); + return Lists.newArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + boolean addInClause = surls != null && !surls.isEmpty(); + try { + // get reduced chunks + String str = + "SELECT rq.fileLifetime, rq.config_FileStorageTypeID, rp.ID, rp.targetSURL, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE rq.r_token=?"; + if (addInClause) { + str += " AND rp.targetSURL_uniqueID IN ("; + for (int i = 0; i < surls.size(); i++) { + str += i == 0 ? "?" : ", ?"; + } + str += ")"; + } + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); - if (ids.isEmpty()) { - return 0; + List list = Lists.newArrayList(); + find.setString(1, reqtoken); + printWarnings(find.getWarnings()); + if (addInClause) { + Iterator iterator = surls.iterator(); + int start = 2; + while (iterator.hasNext()) { + TSURL surl = iterator.next(); + find.setInt(start++, surl.uniqueId()); + } + } + printWarnings(find.getWarnings()); + log.trace("PtP CHUNK DAO! findReduced with request token; {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + ReducedPtPChunkDataTO reducedChunkDataTO = null; + while (rs.next()) { + reducedChunkDataTO = new ReducedPtPChunkDataTO(); + reducedChunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); + reducedChunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + reducedChunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); + reducedChunkDataTO.setToSURL(rs.getString("rp.targetSURL")); + reducedChunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); + int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); + if (!rs.wasNull()) { + reducedChunkDataTO.setSurlUniqueID(uniqueID); } - if (!checkConnection()) { - log.error("Unable to get a valid connection to the database!"); - return 0; + reducedChunkDataTO.setStatus(rs.getInt("sp.statusCode")); + list.add(reducedChunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTP CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return Lists.newArrayList(); + } finally { + close(rs); + close(find); + } + } + + /** + * Method that returns a Collection of ReducedPtPChunkDataTO corresponding to the IDs supplied in + * the given List of Long. If the List is null or empty, an empty collection is returned and error + * messages get logged. + */ + public synchronized Collection findReduced(List ids) { + + if (ids != null && !ids.isEmpty()) { + if (!checkConnection()) { + log.error("PtP CHUNK DAO: findReduced - unable to get a valid connection!"); + return Lists.newArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + // get reduced chunks + String str = + "SELECT rq.fileLifetime, rq.config_FileStorageTypeID, rp.ID, rp.targetSURL, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE rp.ID IN (" + + StringUtils.join(ids.toArray(), ',') + + ")"; + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List list = Lists.newArrayList(); + log.trace("PtP CHUNK DAO! fetchReduced; {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + ReducedPtPChunkDataTO reducedChunkDataTO = null; + while (rs.next()) { + reducedChunkDataTO = new ReducedPtPChunkDataTO(); + reducedChunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); + reducedChunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + reducedChunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); + reducedChunkDataTO.setToSURL(rs.getString("rp.targetSURL")); + reducedChunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); + int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); + if (!rs.wasNull()) { + reducedChunkDataTO.setSurlUniqueID(uniqueID); + } + + reducedChunkDataTO.setStatus(rs.getInt("sp.statusCode")); + list.add(reducedChunkDataTO); } + return list; + } catch (SQLException e) { + log.error("PTP CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection */ + return Lists.newArrayList(); + } finally { + close(rs); + close(find); + } + } else { + log.warn( + "ATTENTION in PtP CHUNK DAO! fetchReduced " + "invoked with null or empty list of IDs!"); + return Lists.newArrayList(); + } + } + + /** + * Method used in extraordinary situations to signal that data retrieved from the DB was malformed + * and could not be translated into the StoRM object model. This method attempts to change the + * status of the chunk to SRM_FAILURE and record it in the DB, in the status_Put table. This + * operation could potentially fail because the source of the malformed problems could be a + * problematic DB; indeed, initially only log messages were recorded. Yet it soon became clear + * that the source of malformed data were actually the clients themselves and/or FE recording in + * the DB. In these circumstances the client would find its request as being in the + * SRM_IN_PROGRESS state for ever. Hence the pressing need to inform it of the encountered + * problems. + */ + public synchronized void signalMalformedPtPChunk(PtPChunkDataTO auxTO) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: signalMalformedPtPChunk - unable to get a valid connection!"); + return; + } + String signalSQL = + "UPDATE status_Put sp SET sp.statusCode=" + + statusCodeConverter.toDB(SRM_FAILURE) + + ", sp.explanation=? " + + "WHERE sp.request_PutID=" + + auxTO.primaryKey(); + PreparedStatement signal = null; + try { + signal = con.prepareStatement(signalSQL); + printWarnings(con.getWarnings()); + /* NB: Prepared statement spares DB-specific String notation! */ + signal.setString(1, "This chunk of the request is malformed!"); + printWarnings(signal.getWarnings()); + + log.trace("PtP CHUNK DAO - signalMalformedPtPChunk method: {}", signal); + signal.executeUpdate(); + printWarnings(signal.getWarnings()); + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to signal in DB that a chunk of " + + "the request was malformed! Request: {}; Error: {}", + auxTO.toString(), + e.getMessage(), + e); + } finally { + close(signal); + } + } + + /** + * Method that returns the number of Put requests on the given SURL, that are in + * SRM_SPACE_AVAILABLE state. This method is intended to be used by PtPChunkCatalog in the + * isSRM_SPACE_AVAILABLE method invocation. In case of any error, 0 is returned. + */ + public synchronized int numberInSRM_SPACE_AVAILABLE(int surlUniqueID) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: numberInSRM_SPACE_AVAILABLE - unable to get a valid connection!"); + return 0; + } + + String str = + "SELECT COUNT(rp.ID) FROM status_Put sp JOIN request_Put rp " + + "ON (sp.request_PutID=rp.ID) " + + "WHERE rp.targetSURL_uniqueID=? AND sp.statusCode=?"; + PreparedStatement stmt = null; + ResultSet rs = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + /* Prepared statement spares DB-specific String notation! */ + stmt.setInt(1, surlUniqueID); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - numberInSRM_SPACE_AVAILABLE method: {}", stmt); + rs = stmt.executeQuery(); + printWarnings(stmt.getWarnings()); + + int numberSpaceAvailable = 0; + if (rs.next()) { + numberSpaceAvailable = rs.getInt(1); + } + return numberSpaceAvailable; + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to determine " + "numberInSRM_SPACE_AVAILABLE! Returning 0! {}", + e.getMessage(), + e); + return 0; + } finally { + close(rs); + close(stmt); + } + } + + /** + * Method that retrieves all expired requests in SRM_SPACE_AVAILABLE state. + * + * @return a Map containing the ID of the request as key and the relative SURL as value + */ + public synchronized Map getExpiredSRM_SPACE_AVAILABLE() { + + Map ids = Maps.newHashMap(); + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: getExpiredSRM_SPACE_AVAILABLE - unable to get a valid connection!"); + return ids; + } + + String idsstr = + "SELECT rp.ID, rp.targetSURL FROM " + + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "WHERE sp.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + PreparedStatement stmt = null; + ResultSet rs = null; + + try { + stmt = con.prepareStatement(idsstr); + printWarnings(con.getWarnings()); + + stmt.setInt(1, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - getExpiredSRM_SPACE_AVAILABLE: {}", stmt); + + rs = stmt.executeQuery(); + printWarnings(stmt.getWarnings()); + + while (rs.next()) { + ids.put(rs.getLong("rp.ID"), rs.getString("rp.targetSURL")); + } + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to select expired " + + "SRM_SPACE_AVAILABLE chunks of PtP requests. {}", + e.getMessage(), + e); + + } finally { + close(rs); + close(stmt); + } + return ids; + } + + /** + * Method that retrieves all ptp requests in SRM_REQUEST_INPROGRESS state which can be considered + * as expired. + * + * @return a Map containing the ID of the request as key and the involved array of SURLs as value + */ + public synchronized List getExpiredSRM_REQUEST_INPROGRESS(long expirationTime) { + + List ids = Lists.newArrayList(); + + if (!checkConnection()) { + log.error( + "PtP CHUNK DAO: getExpiredSRM_REQUEST_INPROGRESS - unable to get a valid connection!"); + return ids; + } + + String query = + "SELECT rq.ID FROM request_queue rq, request_Put rp, status_Put sp " + + "WHERE rq.ID = rp.request_queueID and rp.ID = sp.request_PutID " + + "AND rq.status=? AND rq.timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND)"; - String querySQL = "UPDATE request_queue rq, request_Put rp, status_Put sp " + PreparedStatement stmt = null; + ResultSet rs = null; + + try { + stmt = con.prepareStatement(query); + printWarnings(con.getWarnings()); + + stmt.setLong(1, statusCodeConverter.toDB(SRM_REQUEST_INPROGRESS)); + printWarnings(stmt.getWarnings()); + + stmt.setLong(2, expirationTime); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - getExpiredSRM_REQUEST_INPROGRESS: {}", stmt); + + rs = stmt.executeQuery(); + printWarnings(stmt.getWarnings()); + + while (rs.next()) { + ids.add(rs.getLong("rq.ID")); + } + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to select expired " + + "SRM_REQUEST_INPROGRESS chunks of PtP requests. {}", + e.getMessage(), + e); + + } finally { + close(rs); + close(stmt); + } + return ids; + } + + /** + * Method that updates chunks in SRM_SPACE_AVAILABLE state, into SRM_SUCCESS. An array of long + * representing the primary key of each chunk is required. This is needed when the client invokes + * srmPutDone() In case of any error nothing happens and no exception is thrown, but proper + * messages get logged. + */ + public synchronized void transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS(List ids) { + + if (!checkConnection()) { + log.error( + "PtP CHUNK DAO: transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS - unable to get a valid connection!"); + return; + } + + String str = + "UPDATE " + + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "SET sp.statusCode=? " + + "WHERE sp.statusCode=? AND rp.ID IN (" + + StringUtils.join(ids.toArray(), ',') + + ")"; + + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + stmt.setInt(1, statusCodeConverter.toDB(SRM_SUCCESS)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - " + "transitSRM_SPACE_AVAILABLEtoSRM_SUCCESS: {}", stmt); + + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + + if (count == 0) { + log.trace( + "PtPChunkDAO! No chunk of PtP request was " + + "transited from SRM_SPACE_AVAILABLE to SRM_SUCCESS."); + } else { + log.info( + "PtPChunkDAO! {} chunks of PtP requests were transited " + + "from SRM_SPACE_AVAILABLE to SRM_SUCCESS.", + count); + } + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to transit chunks from " + "SRM_SPACE_AVAILABLE to SRM_SUCCESS! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + /** + * Method that updates chunks in SRM_SPACE_AVAILABLE state, into SRM_FILE_LIFETIME_EXPIRED. An + * array of Long representing the primary key of each chunk is required. This is needed when the + * client forgets to invoke srmPutDone(). In case of any error or exception, the returned int + * value will be zero or less than the input List size. + * + * @param the list of the request id to update + * @return The number of the updated records into the db + */ + public synchronized int transitExpiredSRM_SPACE_AVAILABLEtoSRM_FILE_LIFETIME_EXPIRED( + Collection ids) { + + Preconditions.checkNotNull(ids, "Invalid list of id"); + + if (!checkConnection()) { + log.error("Unable to get a valid connection to the database!"); + return 0; + } + + String querySQL = + "UPDATE status_Put sp " + + "JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "SET sp.statusCode=?, sp.explanation=? " + + "WHERE sp.statusCode=? AND UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(rq.timeStamp) >= rq.pinLifetime "; + + if (!ids.isEmpty()) { + querySQL += "AND rp.ID IN (" + StringUtils.join(ids.toArray(), ',') + ")"; + } + + PreparedStatement stmt = null; + int count = 0; + try { + stmt = con.prepareStatement(querySQL); + printWarnings(con.getWarnings()); + + stmt.setInt(1, statusCodeConverter.toDB(SRM_FILE_LIFETIME_EXPIRED)); + printWarnings(stmt.getWarnings()); + + stmt.setString(2, "Expired pinLifetime"); + printWarnings(stmt.getWarnings()); + + stmt.setInt(3, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); + printWarnings(stmt.getWarnings()); + + log.trace( + "PtP CHUNK DAO - transit SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED: {}", stmt); + + count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to transit chunks from " + + "SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + log.trace( + "PtPChunkDAO! {} chunks of PtP requests were transited " + + "from SRM_SPACE_AVAILABLE to SRM_FILE_LIFETIME_EXPIRED.", + count); + return count; + } + + /** + * Method that updates enqueued requests selected by id into SRM_FAILURE. An array of Long + * representing the id of each request is required. + * + * @param the list of the request id to update + * @return The number of the updated records. Zero or less than the input list size in case of + * errors. + */ + public synchronized int transitExpiredSRM_REQUEST_INPROGRESStoSRM_FAILURE(Collection ids) { + + Preconditions.checkNotNull(ids, "Invalid list of id"); + + if (ids.isEmpty()) { + return 0; + } + + if (!checkConnection()) { + log.error("Unable to get a valid connection to the database!"); + return 0; + } + + String querySQL = + "UPDATE request_queue rq, request_Put rp, status_Put sp " + "SET rq.status=?, sp.statusCode=?, sp.explanation=? " + "WHERE rq.ID = rp.request_queueID and rp.ID = sp.request_PutID " - + "AND rq.status=? AND rq.ID IN (" + buildInClauseForArray(ids.size()) + ")"; - - PreparedStatement stmt = null; - int count = 0; - try { - stmt = con.prepareStatement(querySQL); - printWarnings(con.getWarnings()); - - stmt.setInt(1, statusCodeConverter.toDB(SRM_FAILURE)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, statusCodeConverter.toDB(SRM_FAILURE)); - printWarnings(stmt.getWarnings()); - - stmt.setString(3, "Request expired"); - printWarnings(stmt.getWarnings()); - - stmt.setInt(4, statusCodeConverter.toDB(SRM_REQUEST_INPROGRESS)); - printWarnings(stmt.getWarnings()); - - int i = 5; - for (Long id: ids) { - stmt.setLong(i, id); - printWarnings(stmt.getWarnings()); - i++; - } - - log.trace( - "PtP CHUNK DAO - transit SRM_REQUEST_INPROGRESS to SRM_FAILURE: {}", - stmt); - - count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - - } catch (SQLException e) { - log.error( - "PtPChunkDAO! Unable to transit chunks from " - + "SRM_REQUEST_INPROGRESS to SRM_FAILURE! {}", - e.getMessage(), e); - } finally { - close(stmt); - } - log.trace("PtPChunkDAO! {} chunks of PtP requests were transited " - + "from SRM_REQUEST_INPROGRESS to SRM_FAILURE.", count); - return count; - - } - - /** - * Method that transit chunks in SRM_SPACE_AVAILABLE to SRM_ABORTED, for the - * given SURL: the overall request status of the requests containing that - * chunk, is not changed! The TURL is set to null. Beware, that the chunks may - * be part of requests that have finished, or that still have not finished - * because other chunks are still being processed. - */ - public synchronized void transitSRM_SPACE_AVAILABLEtoSRM_ABORTED( - int surlUniqueID, String surl, String explanation) { - - if (!checkConnection()) { - log - .error("PtP CHUNK DAO: transitSRM_SPACE_AVAILABLEtoSRM_ABORTED - unable to get a valid connection!"); - return; - } - String str = "UPDATE " - + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "SET sp.statusCode=?, sp.explanation=?, sp.transferURL=NULL " - + "WHERE sp.statusCode=? AND (rp.targetSURL_uniqueID=? OR rp.targetSURL=?)"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - stmt.setInt(1, statusCodeConverter.toDB(SRM_ABORTED)); - printWarnings(stmt.getWarnings()); - - stmt.setString(2, explanation); - printWarnings(stmt.getWarnings()); - - stmt.setInt(3, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(4, surlUniqueID); - printWarnings(stmt.getWarnings()); - - stmt.setString(5, surl); - printWarnings(stmt.getWarnings()); - - log.trace("PtP CHUNK DAO - " - + "transitSRM_SPACE_AVAILABLEtoSRM_ABORTED: {}", stmt); - int count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - - if (count > 0) { - log.info("PtP CHUNK DAO! {} chunks were transited from " - + "SRM_SPACE_AVAILABLE to SRM_ABORTED.", count); - } else { - log.trace("PtP CHUNK DAO! No chunks " - + "were transited from SRM_SPACE_AVAILABLE to SRM_ABORTED."); - } - } catch (SQLException e) { - log.error("PtP CHUNK DAO! Unable to " - + "transitSRM_SPACE_AVAILABLEtoSRM_ABORTED! {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Auxiliary method used to close a Statement - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("PTP CHUNK DAO! Unable to close Statement {} - Error: {}", - stmt.toString(), e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to close a ResultSet - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("PTP CHUNK DAO! Unable to close ResultSet! Error: {}", - e.getMessage(), e); - } - } - } - - /** - * Auxiliary method that sets up the connection to the DB. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - printWarnings(con.getWarnings()); - response = con.isValid(0); - } catch (ClassNotFoundException | SQLException e) { - log.error("PTP CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private boolean checkConnection() { - - boolean response = true; - if (reconnect) { - log.debug("PTP CHUNK DAO! Reconnecting to DB! "); - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method that takes down a connection to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - log.error("PTP CHUNK DAO! Exception in takeDownConnection method - " - + "could not close connection! {}", e.getMessage(), e); - } - } - } - - public synchronized int updateStatus(int[] surlsUniqueIDs, String[] surls, - TStatusCode statusCode, String explanation) { - - if (explanation == null) { - throw new IllegalArgumentException("Unable to perform the updateStatus, " - + "invalid arguments: explanation=" + explanation); - } - return doUpdateStatus(null, surlsUniqueIDs, surls, statusCode, explanation, false, - true); - } - - public synchronized int updateStatus(TRequestToken requestToken, - int[] surlsUniqueIDs, String[] surls, TStatusCode statusCode, - String explanation) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || explanation == null) { - throw new IllegalArgumentException("Unable to perform the updateStatus, " - + "invalid arguments: requestToken=" + requestToken + " explanation=" - + explanation); - } - return doUpdateStatus(requestToken, surlsUniqueIDs, surls, statusCode, - explanation, true, true); - } - - private int doUpdateStatus(TRequestToken requestToken, int[] surlsUniqueIDs, - String[] surls, TStatusCode statusCode, String explanation, - boolean withRequestToken, boolean withExplaination) - throws IllegalArgumentException { - - if ((withRequestToken && requestToken == null) - || (withExplaination && explanation == null)) { - throw new IllegalArgumentException("Unable to perform the updateStatus, " - + "invalid arguments: withRequestToken=" + withRequestToken - + " requestToken=" + requestToken + " withExplaination=" - + withExplaination + " explaination=" + explanation); - } - if (!checkConnection()) { - log - .error("PTP CHUNK DAO: updateStatus - unable to get a valid connection!"); - return 0; - } - String str = "UPDATE status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND " - + "rp.request_queueID=rq.ID " + "SET sp.statusCode=? "; - if (withExplaination) { - str += " , " + buildExpainationSet(explanation); - } - str += " WHERE "; - if (withRequestToken) { - str += buildTokenWhereClause(requestToken) + " AND "; - } - str += " ( rp.targetSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) + " AND rp.targetSURL IN " - + makeSurlString(surls) + " ) "; - PreparedStatement stmt = null; - int count = 0; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1, statusCodeConverter.toDB(statusCode)); - printWarnings(stmt.getWarnings()); - - log.trace("PTP CHUNK DAO - updateStatus: {}", stmt); - count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PTP CHUNK DAO! No chunk of PTP request was updated to {}.", - statusCode); - } else { - log.info("PTP CHUNK DAO! {} chunks of PTP requests were updated " - + "to {}.", count, statusCode); - } - } catch (SQLException e) { - log.error("PTP CHUNK DAO! Unable to updated from to {}! {}", statusCode, - e.getMessage(), e); - } finally { - close(stmt); - } - return count; - } - - public synchronized int updateStatusOnMatchingStatus( - TRequestToken requestToken, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || explanation == null) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken + " explanation=" - + explanation); - } - return doUpdateStatusOnMatchingStatus(requestToken, null, null, - expectedStatusCode, newStatusCode, explanation, true, false, true); - } - - public synchronized int updateStatusOnMatchingStatus(int[] surlsUniqueIDs, - String[] surls, TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - if (surlsUniqueIDs == null || surls == null || explanation == null - || surlsUniqueIDs.length == 0 || surls.length == 0 - || surlsUniqueIDs.length != surls.length) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs + " surls=" - + surls + " explanation=" + explanation); - } - return doUpdateStatusOnMatchingStatus(null, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, explanation, false, true, true); - } - - public synchronized int updateStatusOnMatchingStatus( - TRequestToken requestToken, int[] surlsUniqueIDs, String[] surls, - TStatusCode expectedStatusCode, TStatusCode newStatusCode) { - - if (requestToken == null || requestToken.getValue().trim().isEmpty() - || surlsUniqueIDs == null || surls == null || surlsUniqueIDs.length == 0 - || surls.length == 0 || surlsUniqueIDs.length != surls.length) { - throw new IllegalArgumentException( - "Unable to perform the updateStatusOnMatchingStatus, " - + "invalid arguments: requestToken=" + requestToken - + "surlsUniqueIDs=" + surlsUniqueIDs + " surls=" + surls); - } - return doUpdateStatusOnMatchingStatus(requestToken, surlsUniqueIDs, surls, - expectedStatusCode, newStatusCode, null, true, true, false); - } - - private int doUpdateStatusOnMatchingStatus(TRequestToken requestToken, - int[] surlsUniqueIDs, String[] surls, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation, boolean withRequestToken, - boolean withSurls, boolean withExplanation) { - - if ((withRequestToken && requestToken == null) - || (withExplanation && explanation == null) - || (withSurls && (surlsUniqueIDs == null || surls == null))) { - throw new IllegalArgumentException( - "Unable to perform the doUpdateStatusOnMatchingStatus, " - + "invalid arguments: withRequestToken=" + withRequestToken - + " requestToken=" + requestToken + " withSurls=" + withSurls - + " surlsUniqueIDs=" + surlsUniqueIDs + " surls=" + surls - + " withExplaination=" + withExplanation + " explanation=" - + explanation); - } - if (!checkConnection()) { - log - .error("PTP CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); - return 0; - } - String str = "UPDATE " - + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "SET sp.statusCode=? "; - if (withExplanation) { - str += " , " + buildExpainationSet(explanation); - } - str += " WHERE sp.statusCode=? "; - if (withRequestToken) { - str += " AND " + buildTokenWhereClause(requestToken); - } - if (withSurls) { - str += " AND " + buildSurlsWhereClause(surlsUniqueIDs, surls); - } - - int count = 0; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(str); - printWarnings(con.getWarnings()); - stmt.setInt(1, statusCodeConverter.toDB(newStatusCode)); - printWarnings(stmt.getWarnings()); - - stmt.setInt(2, statusCodeConverter.toDB(expectedStatusCode)); - printWarnings(stmt.getWarnings()); - - log.trace("PTP CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt); - count = stmt.executeUpdate(); - printWarnings(stmt.getWarnings()); - if (count == 0) { - log.trace("PTP CHUNK DAO! No chunk of PTP request was updated " - + "from {} to {}.", expectedStatusCode, newStatusCode); - } else { - log.debug("PTP CHUNK DAO! {} chunks of PTP requests were updated " - + "from {} to {}.", count, expectedStatusCode, newStatusCode); - } - } catch (SQLException e) { - log.error("PTP CHUNK DAO! Unable to updated from {} to {}! Error: {}", - expectedStatusCode, newStatusCode, e.getMessage(), e); - } finally { - close(stmt); - } - return count; - } - - public Collection find(int[] surlsUniqueIDs, String[] surlsArray, String dn) { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0 || dn == null) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " dn=" + dn); - } - return find(surlsUniqueIDs, surlsArray, dn, true); - } - - public Collection find(int[] surlsUniqueIDs, String[] surlsArray) { - - if (surlsUniqueIDs == null || surlsUniqueIDs.length == 0 - || surlsArray == null || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray); - } - return find(surlsUniqueIDs, surlsArray, null, false); - } - - - private List chunkTOfromResultSet(ResultSet rs) - throws SQLException{ - - List results = Lists.newArrayList(); - while (rs.next()) { - - PtPChunkDataTO chunkDataTO = new PtPChunkDataTO(); - - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); + + "AND rq.status=? AND rq.ID IN (" + + buildInClauseForArray(ids.size()) + + ")"; + + PreparedStatement stmt = null; + int count = 0; + try { + stmt = con.prepareStatement(querySQL); + printWarnings(con.getWarnings()); + + stmt.setInt(1, statusCodeConverter.toDB(SRM_FAILURE)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, statusCodeConverter.toDB(SRM_FAILURE)); + printWarnings(stmt.getWarnings()); + + stmt.setString(3, "Request expired"); + printWarnings(stmt.getWarnings()); + + stmt.setInt(4, statusCodeConverter.toDB(SRM_REQUEST_INPROGRESS)); + printWarnings(stmt.getWarnings()); + + int i = 5; + for (Long id : ids) { + stmt.setLong(i, id); + printWarnings(stmt.getWarnings()); + i++; + } + + log.trace("PtP CHUNK DAO - transit SRM_REQUEST_INPROGRESS to SRM_FAILURE: {}", stmt); + + count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + + } catch (SQLException e) { + log.error( + "PtPChunkDAO! Unable to transit chunks from " + + "SRM_REQUEST_INPROGRESS to SRM_FAILURE! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + log.trace( + "PtPChunkDAO! {} chunks of PtP requests were transited " + + "from SRM_REQUEST_INPROGRESS to SRM_FAILURE.", + count); + return count; + } + + /** + * Method that transit chunks in SRM_SPACE_AVAILABLE to SRM_ABORTED, for the given SURL: the + * overall request status of the requests containing that chunk, is not changed! The TURL is set + * to null. Beware, that the chunks may be part of requests that have finished, or that still have + * not finished because other chunks are still being processed. + */ + public synchronized void transitSRM_SPACE_AVAILABLEtoSRM_ABORTED( + int surlUniqueID, String surl, String explanation) { + + if (!checkConnection()) { + log.error( + "PtP CHUNK DAO: transitSRM_SPACE_AVAILABLEtoSRM_ABORTED - unable to get a valid connection!"); + return; + } + String str = + "UPDATE " + + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "SET sp.statusCode=?, sp.explanation=?, sp.transferURL=NULL " + + "WHERE sp.statusCode=? AND (rp.targetSURL_uniqueID=? OR rp.targetSURL=?)"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + stmt.setInt(1, statusCodeConverter.toDB(SRM_ABORTED)); + printWarnings(stmt.getWarnings()); + + stmt.setString(2, explanation); + printWarnings(stmt.getWarnings()); + + stmt.setInt(3, statusCodeConverter.toDB(SRM_SPACE_AVAILABLE)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(4, surlUniqueID); + printWarnings(stmt.getWarnings()); + + stmt.setString(5, surl); + printWarnings(stmt.getWarnings()); + + log.trace("PtP CHUNK DAO - " + "transitSRM_SPACE_AVAILABLEtoSRM_ABORTED: {}", stmt); + int count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + + if (count > 0) { + log.info( + "PtP CHUNK DAO! {} chunks were transited from " + "SRM_SPACE_AVAILABLE to SRM_ABORTED.", + count); + } else { + log.trace( + "PtP CHUNK DAO! No chunks " + + "were transited from SRM_SPACE_AVAILABLE to SRM_ABORTED."); + } + } catch (SQLException e) { + log.error( + "PtP CHUNK DAO! Unable to " + "transitSRM_SPACE_AVAILABLEtoSRM_ABORTED! {}", + e.getMessage(), + e); + } finally { + close(stmt); + } + } + + /** Auxiliary method used to close a Statement */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "PTP CHUNK DAO! Unable to close Statement {} - Error: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + /** Auxiliary method used to close a ResultSet */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("PTP CHUNK DAO! Unable to close ResultSet! Error: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method that sets up the connection to the DB. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + printWarnings(con.getWarnings()); + response = con.isValid(0); + } catch (ClassNotFoundException | SQLException e) { + log.error("PTP CHUNK DAO! Exception in setUpConnection! {}", e.getMessage(), e); + } + return response; + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private boolean checkConnection() { + + boolean response = true; + if (reconnect) { + log.debug("PTP CHUNK DAO! Reconnecting to DB! "); + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method that takes down a connection to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + log.error( + "PTP CHUNK DAO! Exception in takeDownConnection method - " + + "could not close connection! {}", + e.getMessage(), + e); + } + } + } + + public synchronized int updateStatus( + int[] surlsUniqueIDs, String[] surls, TStatusCode statusCode, String explanation) { + + if (explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatus, " + "invalid arguments: explanation=" + explanation); + } + return doUpdateStatus(null, surlsUniqueIDs, surls, statusCode, explanation, false, true); + } + + public synchronized int updateStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode statusCode, + String explanation) { + + if (requestToken == null || requestToken.getValue().trim().isEmpty() || explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatus, " + + "invalid arguments: requestToken=" + + requestToken + + " explanation=" + + explanation); + } + return doUpdateStatus(requestToken, surlsUniqueIDs, surls, statusCode, explanation, true, true); + } + + private int doUpdateStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode statusCode, + String explanation, + boolean withRequestToken, + boolean withExplaination) + throws IllegalArgumentException { + + if ((withRequestToken && requestToken == null) || (withExplaination && explanation == null)) { + throw new IllegalArgumentException( + "Unable to perform the updateStatus, " + + "invalid arguments: withRequestToken=" + + withRequestToken + + " requestToken=" + + requestToken + + " withExplaination=" + + withExplaination + + " explaination=" + + explanation); + } + if (!checkConnection()) { + log.error("PTP CHUNK DAO: updateStatus - unable to get a valid connection!"); + return 0; + } + String str = + "UPDATE status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND " + + "rp.request_queueID=rq.ID " + + "SET sp.statusCode=? "; + if (withExplaination) { + str += " , " + buildExpainationSet(explanation); + } + str += " WHERE "; + if (withRequestToken) { + str += buildTokenWhereClause(requestToken) + " AND "; + } + str += + " ( rp.targetSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rp.targetSURL IN " + + makeSurlString(surls) + + " ) "; + PreparedStatement stmt = null; + int count = 0; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, statusCodeConverter.toDB(statusCode)); + printWarnings(stmt.getWarnings()); + + log.trace("PTP CHUNK DAO - updateStatus: {}", stmt); + count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace("PTP CHUNK DAO! No chunk of PTP request was updated to {}.", statusCode); + } else { + log.info( + "PTP CHUNK DAO! {} chunks of PTP requests were updated " + "to {}.", count, statusCode); + } + } catch (SQLException e) { + log.error("PTP CHUNK DAO! Unable to updated from to {}! {}", statusCode, e.getMessage(), e); + } finally { + close(stmt); + } + return count; + } + + public synchronized int updateStatusOnMatchingStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (requestToken == null || requestToken.getValue().trim().isEmpty() || explanation == null) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + " explanation=" + + explanation); + } + return doUpdateStatusOnMatchingStatus( + requestToken, + null, + null, + expectedStatusCode, + newStatusCode, + explanation, + true, + false, + true); + } + + public synchronized int updateStatusOnMatchingStatus( + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (surlsUniqueIDs == null + || surls == null + || explanation == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls + + " explanation=" + + explanation); + } + return doUpdateStatusOnMatchingStatus( + null, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + explanation, + false, + true, + true); + } + + public synchronized int updateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode) { + + if (requestToken == null + || requestToken.getValue().trim().isEmpty() + || surlsUniqueIDs == null + || surls == null + || surlsUniqueIDs.length == 0 + || surls.length == 0 + || surlsUniqueIDs.length != surls.length) { + throw new IllegalArgumentException( + "Unable to perform the updateStatusOnMatchingStatus, " + + "invalid arguments: requestToken=" + + requestToken + + "surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls); + } + return doUpdateStatusOnMatchingStatus( + requestToken, + surlsUniqueIDs, + surls, + expectedStatusCode, + newStatusCode, + null, + true, + true, + false); + } + + private int doUpdateStatusOnMatchingStatus( + TRequestToken requestToken, + int[] surlsUniqueIDs, + String[] surls, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation, + boolean withRequestToken, + boolean withSurls, + boolean withExplanation) { + + if ((withRequestToken && requestToken == null) + || (withExplanation && explanation == null) + || (withSurls && (surlsUniqueIDs == null || surls == null))) { + throw new IllegalArgumentException( + "Unable to perform the doUpdateStatusOnMatchingStatus, " + + "invalid arguments: withRequestToken=" + + withRequestToken + + " requestToken=" + + requestToken + + " withSurls=" + + withSurls + + " surlsUniqueIDs=" + + surlsUniqueIDs + + " surls=" + + surls + + " withExplaination=" + + withExplanation + + " explanation=" + + explanation); + } + if (!checkConnection()) { + log.error("PTP CHUNK DAO: updateStatusOnMatchingStatus - unable to get a valid connection!"); + return 0; + } + String str = + "UPDATE " + + "status_Put sp JOIN (request_Put rp, request_queue rq) ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "SET sp.statusCode=? "; + if (withExplanation) { + str += " , " + buildExpainationSet(explanation); + } + str += " WHERE sp.statusCode=? "; + if (withRequestToken) { + str += " AND " + buildTokenWhereClause(requestToken); + } + if (withSurls) { + str += " AND " + buildSurlsWhereClause(surlsUniqueIDs, surls); + } + + int count = 0; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(str); + printWarnings(con.getWarnings()); + stmt.setInt(1, statusCodeConverter.toDB(newStatusCode)); + printWarnings(stmt.getWarnings()); + + stmt.setInt(2, statusCodeConverter.toDB(expectedStatusCode)); + printWarnings(stmt.getWarnings()); + + log.trace("PTP CHUNK DAO - updateStatusOnMatchingStatus: {}", stmt); + count = stmt.executeUpdate(); + printWarnings(stmt.getWarnings()); + if (count == 0) { + log.trace( + "PTP CHUNK DAO! No chunk of PTP request was updated " + "from {} to {}.", + expectedStatusCode, + newStatusCode); + } else { + log.debug( + "PTP CHUNK DAO! {} chunks of PTP requests were updated " + "from {} to {}.", + count, + expectedStatusCode, + newStatusCode); + } + } catch (SQLException e) { + log.error( + "PTP CHUNK DAO! Unable to updated from {} to {}! Error: {}", + expectedStatusCode, + newStatusCode, + e.getMessage(), + e); + } finally { + close(stmt); + } + return count; + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray, String dn) { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0 + || dn == null) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " dn=" + + dn); + } + return find(surlsUniqueIDs, surlsArray, dn, true); + } + + public Collection find(int[] surlsUniqueIDs, String[] surlsArray) { + + if (surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray); + } + return find(surlsUniqueIDs, surlsArray, null, false); + } + + private List chunkTOfromResultSet(ResultSet rs) throws SQLException { + + List results = Lists.newArrayList(); + while (rs.next()) { + + PtPChunkDataTO chunkDataTO = new PtPChunkDataTO(); + + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); @@ -1344,11 +1444,10 @@ private List chunkTOfromResultSet(ResultSet rs) chunkDataTO.setClientDN(rs.getString("rq.client_dn")); /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separated by the "#" char. The proxy is a BLOB, hence it has to be - * properly converted in string. + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separated by the "#" char. The proxy is a BLOB, hence it has to + * be properly converted in string. */ java.sql.Blob blob = rs.getBlob("rq.proxy"); if (!rs.wasNull() && blob != null) { @@ -1358,88 +1457,88 @@ private List chunkTOfromResultSet(ResultSet rs) chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - chunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); + chunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); if (!rs.wasNull()) { chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); } - chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); + chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); chunkDataTO.setRequestToken(rs.getString("rq.r_token")); chunkDataTO.setStatus(rs.getInt("sp.statusCode")); results.add(chunkDataTO); } - - return results; - } - - - - public synchronized List findActivePtPsOnSURLs(List surls){ - - if (surls == null || surls.isEmpty()){ - throw new IllegalArgumentException("cannot find active active " - + "PtPs for an empty or null list of SURLs!"); - } - - ResultSet rs = null; + + return results; + } + + public synchronized List findActivePtPsOnSURLs(List surls) { + + if (surls == null || surls.isEmpty()) { + throw new IllegalArgumentException( + "cannot find active active " + "PtPs for an empty or null list of SURLs!"); + } + + ResultSet rs = null; PreparedStatement stat = null; - + try { - String query = "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " - + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " - + "sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE ( rp.targetSURL in "+ makeSurlString((String[])surls.toArray()) +" )" - + "AND sp.statusCode = 24"; - - stat = con.prepareStatement(query); - printWarnings(con.getWarnings()); - - rs = stat.executeQuery(); - List results = chunkTOfromResultSet(rs); + String query = + "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " + + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " + + "sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE ( rp.targetSURL in " + + makeSurlString((String[]) surls.toArray()) + + " )" + + "AND sp.statusCode = 24"; + + stat = con.prepareStatement(query); + printWarnings(con.getWarnings()); + + rs = stat.executeQuery(); + List results = chunkTOfromResultSet(rs); + + return results; - return results; - } catch (SQLException e) { - log.error("findActivePtPsOnSURLs(): SQL Error: {}", e.getMessage(),e); + log.error("findActivePtPsOnSURLs(): SQL Error: {}", e.getMessage(), e); return Collections.emptyList(); - + } finally { close(rs); close(stat); } - } - + } public synchronized List findActivePtPsOnSURL(String surl) { return findActivePtPsOnSURL(surl, null); } - public synchronized List findActivePtPsOnSURL(String surl, - String currentRequestToken) { + public synchronized List findActivePtPsOnSURL( + String surl, String currentRequestToken) { if (surl == null || surl.isEmpty()) { - throw new IllegalArgumentException("cannot find active active " - + "PtPs for an empty or null SURL!"); + throw new IllegalArgumentException( + "cannot find active active " + "PtPs for an empty or null SURL!"); } - + ResultSet rs = null; PreparedStatement stat = null; - + try { - String query = "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " - + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " - + "sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE ( rp.targetSURL = ? and sp.statusCode=24 )"; - - if (currentRequestToken != null){ + String query = + "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " + + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " + + "sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE ( rp.targetSURL = ? and sp.statusCode=24 )"; + + if (currentRequestToken != null) { query += "AND rq.r_token != ?"; } @@ -1447,224 +1546,230 @@ public synchronized List findActivePtPsOnSURL(String surl, printWarnings(con.getWarnings()); stat.setString(1, surl); - - if (currentRequestToken != null){ + + if (currentRequestToken != null) { stat.setString(2, currentRequestToken); } - + rs = stat.executeQuery(); List results = chunkTOfromResultSet(rs); return results; - + } catch (SQLException e) { - log.error("findActivePtPsOnSURL(): SQL Error: {}", e.getMessage(),e); + log.error("findActivePtPsOnSURL(): SQL Error: {}", e.getMessage(), e); return Collections.emptyList(); - + } finally { close(rs); close(stat); } + } + + private synchronized Collection find( + int[] surlsUniqueIDs, String[] surlsArray, String dn, boolean withDn) + throws IllegalArgumentException { + + if ((withDn && dn == null) + || surlsUniqueIDs == null + || surlsUniqueIDs.length == 0 + || surlsArray == null + || surlsArray.length == 0) { + throw new IllegalArgumentException( + "Unable to perform the find, " + + "invalid arguments: surlsUniqueIDs=" + + surlsUniqueIDs + + " surlsArray=" + + surlsArray + + " withDn=" + + withDn + + " dn=" + + dn); + } + if (!checkConnection()) { + log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); + return Lists.newArrayList(); + } + PreparedStatement find = null; + ResultSet rs = null; + try { + // get chunks of the request + String str = + "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " + + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " + + "sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE ( rp.targetSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rp.targetSURL IN " + + makeSurlString(surlsArray) + + " )"; + + if (withDn) { + str += " AND rq.client_dn=\'" + dn + "\'"; + } + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List list = Lists.newArrayList(); + + log.trace("PtP CHUNK DAO - find method: {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + PtPChunkDataTO chunkDataTO = null; + while (rs.next()) { + chunkDataTO = new PtPChunkDataTO(); + chunkDataTO.setFileStorageType(rs.getString("rq.config_FileStorageTypeID")); + chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); + chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); + chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); + chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); + chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); + chunkDataTO.setClientDN(rs.getString("rq.client_dn")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separated by the "#" char. The proxy is a BLOB, hence it has + * to be properly converted in string. + */ + java.sql.Blob blob = rs.getBlob("rq.proxy"); + if (!rs.wasNull() && blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + chunkDataTO.setVomsAttributes(new String(bdata)); + } + chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); + chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); + + chunkDataTO.setNormalizedStFN(rs.getString("rp.normalized_targetSURL_StFN")); + int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); + if (!rs.wasNull()) { + chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); + } + + chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); + chunkDataTO.setRequestToken(rs.getString("rq.r_token")); + chunkDataTO.setStatus(rs.getInt("sp.statusCode")); + list.add(chunkDataTO); + } + return list; + } catch (SQLException e) { + log.error("PTP CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return Lists.newArrayList(); + } finally { + close(rs); + close(find); + } + } + + public synchronized List findProtocols(long requestQueueId) { + + if (!checkConnection()) { + log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); + return Lists.newArrayList(); + } + String str = null; + PreparedStatement find = null; + ResultSet rs = null; + try { + str = + "SELECT tp.config_ProtocolsID " + + "FROM request_TransferProtocols tp " + + "WHERE tp.request_queueID=?"; + + find = con.prepareStatement(str); + printWarnings(con.getWarnings()); + + List protocols = Lists.newArrayList(); + find.setLong(1, requestQueueId); + printWarnings(find.getWarnings()); + + log.trace("PtP CHUNK DAO - findProtocols method: {}", find); + rs = find.executeQuery(); + printWarnings(find.getWarnings()); + + while (rs.next()) { + protocols.add(rs.getString("tp.config_ProtocolsID")); + } + + return protocols; + } catch (SQLException e) { + log.error("PTP CHUNK DAO: {}", e.getMessage(), e); + /* return empty Collection! */ + return Lists.newArrayList(); + } finally { + close(rs); + close(find); + } + } + + private String buildExpainationSet(String explanation) { + + return " sp.explanation='" + explanation + "' "; + } + + private String buildTokenWhereClause(TRequestToken requestToken) { + + return " rq.r_token='" + requestToken.toString() + "' "; + } + + private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { + + return " ( rp.targetSURL_uniqueID IN " + + makeSURLUniqueIDWhere(surlsUniqueIDs) + + " AND rp.targetSURL IN " + + makeSurlString(surls) + + " ) "; + } + + /** Method that returns a String containing all Surl's IDs. */ + private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { + + StringBuilder sb = new StringBuilder("("); + for (int i = 0; i < surlUniqueIDs.length; i++) { + if (i > 0) { + sb.append(","); + } + sb.append(surlUniqueIDs[i]); + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all Surls. */ + private String makeSurlString(String[] surls) { + + StringBuilder sb = new StringBuilder("("); + int n = surls.length; + + for (int i = 0; i < n; i++) { + + SURL requestedSURL; + + try { + requestedSURL = SURL.makeSURLfromString(surls[i]); + } catch (NamespaceException e) { + log.error(e.getMessage(), e); + log.debug("Skip '{}' during query creation", surls[i]); + continue; + } + + sb.append("'"); + sb.append(requestedSURL.getNormalFormAsString()); + sb.append("','"); + sb.append(requestedSURL.getQueryFormAsString()); + sb.append("'"); + + if (i < (n - 1)) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); } - - private synchronized Collection find(int[] surlsUniqueIDs, - String[] surlsArray, String dn, boolean withDn) - throws IllegalArgumentException { - - if ((withDn && dn == null) || surlsUniqueIDs == null - || surlsUniqueIDs.length == 0 || surlsArray == null - || surlsArray.length == 0) { - throw new IllegalArgumentException("Unable to perform the find, " - + "invalid arguments: surlsUniqueIDs=" + surlsUniqueIDs - + " surlsArray=" + surlsArray + " withDn=" + withDn + " dn=" + dn); - } - if (!checkConnection()) { - log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); - return Lists.newArrayList(); - } - PreparedStatement find = null; - ResultSet rs = null; - try { - // get chunks of the request - String str = "SELECT rq.ID, rq.r_token, rq.config_FileStorageTypeID, rq.config_OverwriteID, rq.timeStamp, rq.pinLifetime, rq.fileLifetime, " - + "rq.s_token, rq.client_dn, rq.proxy, rp.ID, rp.targetSURL, rp.expectedFileSize, rp.normalized_targetSURL_StFN, rp.targetSURL_uniqueID, " - + "sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE ( rp.targetSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) - + " AND rp.targetSURL IN " - + makeSurlString(surlsArray) + " )"; - - if (withDn) { - str += " AND rq.client_dn=\'" + dn + "\'"; - } - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List list = Lists.newArrayList(); - - log.trace("PtP CHUNK DAO - find method: {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - PtPChunkDataTO chunkDataTO = null; - while (rs.next()) { - chunkDataTO = new PtPChunkDataTO(); - chunkDataTO.setFileStorageType(rs - .getString("rq.config_FileStorageTypeID")); - chunkDataTO.setOverwriteOption(rs.getString("rq.config_OverwriteID")); - chunkDataTO.setTimeStamp(rs.getTimestamp("rq.timeStamp")); - chunkDataTO.setPinLifetime(rs.getInt("rq.pinLifetime")); - chunkDataTO.setFileLifetime(rs.getInt("rq.fileLifetime")); - chunkDataTO.setSpaceToken(rs.getString("rq.s_token")); - chunkDataTO.setClientDN(rs.getString("rq.client_dn")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separated by the "#" char. The proxy is a BLOB, hence it has to be - * properly converted in string. - */ - java.sql.Blob blob = rs.getBlob("rq.proxy"); - if (!rs.wasNull() && blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - chunkDataTO.setVomsAttributes(new String(bdata)); - } - chunkDataTO.setPrimaryKey(rs.getLong("rp.ID")); - chunkDataTO.setToSURL(rs.getString("rp.targetSURL")); - - chunkDataTO.setNormalizedStFN(rs - .getString("rp.normalized_targetSURL_StFN")); - int uniqueID = rs.getInt("rp.targetSURL_uniqueID"); - if (!rs.wasNull()) { - chunkDataTO.setSurlUniqueID(Integer.valueOf(uniqueID)); - } - - chunkDataTO.setExpectedFileSize(rs.getLong("rp.expectedFileSize")); - chunkDataTO.setRequestToken(rs.getString("rq.r_token")); - chunkDataTO.setStatus(rs.getInt("sp.statusCode")); - list.add(chunkDataTO); - } - return list; - } catch (SQLException e) { - log.error("PTP CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return Lists.newArrayList(); - } finally { - close(rs); - close(find); - } - } - - public synchronized List findProtocols(long requestQueueId) { - - if (!checkConnection()) { - log.error("PtP CHUNK DAO: find - unable to get a valid connection!"); - return Lists.newArrayList(); - } - String str = null; - PreparedStatement find = null; - ResultSet rs = null; - try { - str = "SELECT tp.config_ProtocolsID " - + "FROM request_TransferProtocols tp " + "WHERE tp.request_queueID=?"; - - find = con.prepareStatement(str); - printWarnings(con.getWarnings()); - - List protocols = Lists.newArrayList(); - find.setLong(1, requestQueueId); - printWarnings(find.getWarnings()); - - log.trace("PtP CHUNK DAO - findProtocols method: {}", find); - rs = find.executeQuery(); - printWarnings(find.getWarnings()); - - while (rs.next()) { - protocols.add(rs.getString("tp.config_ProtocolsID")); - } - - return protocols; - } catch (SQLException e) { - log.error("PTP CHUNK DAO: {}", e.getMessage(), e); - /* return empty Collection! */ - return Lists.newArrayList(); - } finally { - close(rs); - close(find); - } - } - - private String buildExpainationSet(String explanation) { - - return " sp.explanation='" + explanation + "' "; - } - - private String buildTokenWhereClause(TRequestToken requestToken) { - - return " rq.r_token='" + requestToken.toString() + "' "; - } - - private String buildSurlsWhereClause(int[] surlsUniqueIDs, String[] surls) { - - return " ( rp.targetSURL_uniqueID IN " - + makeSURLUniqueIDWhere(surlsUniqueIDs) + " AND rp.targetSURL IN " - + makeSurlString(surls) + " ) "; - } - - /** - * Method that returns a String containing all Surl's IDs. - */ - private String makeSURLUniqueIDWhere(int[] surlUniqueIDs) { - - StringBuilder sb = new StringBuilder("("); - for (int i = 0; i < surlUniqueIDs.length; i++) { - if (i > 0) { - sb.append(","); - } - sb.append(surlUniqueIDs[i]); - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all Surls. - */ - private String makeSurlString(String[] surls) { - - StringBuilder sb = new StringBuilder("("); - int n = surls.length; - - for (int i = 0; i < n; i++) { - - SURL requestedSURL; - - try { - requestedSURL = SURL.makeSURLfromString(surls[i]); - } catch (NamespaceException e) { - log.error(e.getMessage(), e); - log.debug("Skip '{}' during query creation", surls[i]); - continue; - } - - sb.append("'"); - sb.append(requestedSURL.getNormalFormAsString()); - sb.append("','"); - sb.append(requestedSURL.getQueryFormAsString()); - sb.append("'"); - - if (i < (n - 1)) { - sb.append(","); - } - } - - sb.append(")"); - return sb.toString(); - } - -} \ No newline at end of file +} diff --git a/src/main/java/it/grid/storm/catalogs/PtPChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/PtPChunkDataTO.java index 754718d5..e665ae99 100644 --- a/src/main/java/it/grid/storm/catalogs/PtPChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/PtPChunkDataTO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -10,320 +9,301 @@ import it.grid.storm.srm.types.TFileStorageType; import it.grid.storm.srm.types.TOverwriteMode; import it.grid.storm.srm.types.TStatusCode; - import java.sql.Timestamp; import java.util.List; /** - * Class that represents a row in the Persistence Layer: this is all raw data - * referring to the PtPChunkData proper, that is, String and primitive types. - * - * Each field is initialized with default values as per SRM 2.2 specification: - * protocolList GSIFTP fileStorageType VOLATILE overwriteMode NEVER status - * SRM_REQUEST_QUEUED - * - * All other fields are 0 if int, or a white space if String. - * + * Class that represents a row in the Persistence Layer: this is all raw data referring to the + * PtPChunkData proper, that is, String and primitive types. + * + *

Each field is initialized with default values as per SRM 2.2 specification: protocolList + * GSIFTP fileStorageType VOLATILE overwriteMode NEVER status SRM_REQUEST_QUEUED + * + *

All other fields are 0 if int, or a white space if String. + * * @author EGRID ICTP * @version 2.0 * @date June 2005 */ public class PtPChunkDataTO { - private static final String FQAN_SEPARATOR = "#"; - /* Database table request_Get fields BEGIN */ - private long primaryKey = -1; // ID primary key of status_Put record in DB - private String toSURL = " "; - private long expectedFileSize = 0; - private String normalizedStFN = null; - private Integer surlUniqueID = null; - /* Database table request_Get fields END */ - - private String requestToken = " "; - private int pinLifetime = -1; - private int fileLifetime = -1; - private String fileStorageType = null; // initialised in constructor - private String spaceToken = " "; - private List protocolList = null; // initialised in constructor - private String overwriteOption = null; // initialised in constructor - private int status; // initialised in constructor - private String errString = " "; - private String turl = " "; - private Timestamp timeStamp = null; - - private String clientDN = null; - private String vomsAttributes = null; - + private static final String FQAN_SEPARATOR = "#"; + /* Database table request_Get fields BEGIN */ + private long primaryKey = -1; // ID primary key of status_Put record in DB + private String toSURL = " "; + private long expectedFileSize = 0; + private String normalizedStFN = null; + private Integer surlUniqueID = null; + /* Database table request_Get fields END */ - public PtPChunkDataTO() { + private String requestToken = " "; + private int pinLifetime = -1; + private int fileLifetime = -1; + private String fileStorageType = null; // initialised in constructor + private String spaceToken = " "; + private List protocolList = null; // initialised in constructor + private String overwriteOption = null; // initialised in constructor + private int status; // initialised in constructor + private String errString = " "; + private String turl = " "; + private Timestamp timeStamp = null; - this.fileStorageType = FileStorageTypeConverter.getInstance().toDB( - TFileStorageType.getTFileStorageType(Configuration.getInstance() - .getDefaultFileStorageType())); - TURLPrefix protocolPreferences = new TURLPrefix(); - protocolPreferences.addProtocol(Protocol.GSIFTP); - this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); - this.overwriteOption = OverwriteModeConverter.getInstance().toDB( - TOverwriteMode.NEVER); - this.status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - } + private String clientDN = null; + private String vomsAttributes = null; - public long primaryKey() { + public PtPChunkDataTO() { - return primaryKey; - } + this.fileStorageType = + FileStorageTypeConverter.getInstance() + .toDB( + TFileStorageType.getTFileStorageType( + Configuration.getInstance().getDefaultFileStorageType())); + TURLPrefix protocolPreferences = new TURLPrefix(); + protocolPreferences.addProtocol(Protocol.GSIFTP); + this.protocolList = TransferProtocolListConverter.toDB(protocolPreferences); + this.overwriteOption = OverwriteModeConverter.getInstance().toDB(TOverwriteMode.NEVER); + this.status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + } - public void setPrimaryKey(long n) { + public long primaryKey() { - primaryKey = n; - } + return primaryKey; + } - public String requestToken() { + public void setPrimaryKey(long n) { - return requestToken; - } + primaryKey = n; + } - public void setRequestToken(String s) { + public String requestToken() { - requestToken = s; - } + return requestToken; + } - public Timestamp timeStamp() { + public void setRequestToken(String s) { - return timeStamp; - } + requestToken = s; + } - public void setTimeStamp(Timestamp timeStamp) { + public Timestamp timeStamp() { - this.timeStamp = timeStamp; - } + return timeStamp; + } - public String toSURL() { + public void setTimeStamp(Timestamp timeStamp) { - return toSURL; - } + this.timeStamp = timeStamp; + } - public void setToSURL(String s) { + public String toSURL() { - toSURL = s; - } + return toSURL; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + public void setToSURL(String s) { - return normalizedStFN; - } + toSURL = s; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + /** @return the normalizedStFN */ + public String normalizedStFN() { - this.normalizedStFN = normalizedStFN; - } + return normalizedStFN; + } - /** - * @return the surlUniqueID - */ - public Integer surlUniqueID() { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - return surlUniqueID; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @param surlUniqueID - * the surlUniqueID to set - */ - public void setSurlUniqueID(Integer surlUniqueID) { + /** @return the surlUniqueID */ + public Integer surlUniqueID() { - this.surlUniqueID = surlUniqueID; - } + return surlUniqueID; + } - public int pinLifetime() { + /** @param surlUniqueID the surlUniqueID to set */ + public void setSurlUniqueID(Integer surlUniqueID) { - return pinLifetime; - } + this.surlUniqueID = surlUniqueID; + } - public void setPinLifetime(int n) { + public int pinLifetime() { - pinLifetime = n; - } + return pinLifetime; + } - public int fileLifetime() { + public void setPinLifetime(int n) { - return fileLifetime; - } + pinLifetime = n; + } - public void setFileLifetime(int n) { + public int fileLifetime() { - fileLifetime = n; - } + return fileLifetime; + } - public String fileStorageType() { + public void setFileLifetime(int n) { - return fileStorageType; - } + fileLifetime = n; + } - /** - * Method that sets the FileStorageType: if it is null nothing gets set. The - * deafult value is Permanent. - */ - public void setFileStorageType(String s) { + public String fileStorageType() { - if (s != null) - fileStorageType = s; - } + return fileStorageType; + } - public String spaceToken() { + /** + * Method that sets the FileStorageType: if it is null nothing gets set. The deafult value is + * Permanent. + */ + public void setFileStorageType(String s) { - return spaceToken; - } + if (s != null) fileStorageType = s; + } - public void setSpaceToken(String s) { + public String spaceToken() { - spaceToken = s; - } + return spaceToken; + } - public long expectedFileSize() { + public void setSpaceToken(String s) { - return expectedFileSize; - } + spaceToken = s; + } - public void setExpectedFileSize(long l) { + public long expectedFileSize() { - expectedFileSize = l; - } + return expectedFileSize; + } - public List protocolList() { + public void setExpectedFileSize(long l) { - return protocolList; - } + expectedFileSize = l; + } - public void setProtocolList(List l) { + public List protocolList() { - if ((l != null) && (!l.isEmpty())) - protocolList = l; - } + return protocolList; + } - public String overwriteOption() { + public void setProtocolList(List l) { - return overwriteOption; - } + if ((l != null) && (!l.isEmpty())) protocolList = l; + } - /** - * Method that sets the OverwriteMode: if it is null nothing gets set. The - * deafult value is Never. - */ - public void setOverwriteOption(String s) { + public String overwriteOption() { - if (s != null) - overwriteOption = s; - } + return overwriteOption; + } - public int status() { + /** + * Method that sets the OverwriteMode: if it is null nothing gets set. The deafult value is Never. + */ + public void setOverwriteOption(String s) { - return status; - } + if (s != null) overwriteOption = s; + } - public void setStatus(int n) { + public int status() { - status = n; - } + return status; + } - public String errString() { + public void setStatus(int n) { - return errString; - } + status = n; + } - public void setErrString(String s) { + public String errString() { - errString = s; - } + return errString; + } - public String transferURL() { + public void setErrString(String s) { - return turl; - } + errString = s; + } - public void setTransferURL(String s) { + public String transferURL() { - turl = s; - } + return turl; + } - public String clientDN() { + public void setTransferURL(String s) { - return clientDN; - } + turl = s; + } - public void setClientDN(String s) { + public String clientDN() { - clientDN = s; - } + return clientDN; + } - public String vomsAttributes() { + public void setClientDN(String s) { - return vomsAttributes; - } + clientDN = s; + } - public void setVomsAttributes(String s) { + public String vomsAttributes() { - vomsAttributes = s; - } + return vomsAttributes; + } - public void setVomsAttributes(String[] fqaNsAsString) { + public void setVomsAttributes(String s) { - vomsAttributes = ""; - for (int i = 0; i < fqaNsAsString.length; i++) { - vomsAttributes += fqaNsAsString[i]; - if (i < fqaNsAsString.length - 1) { - vomsAttributes += FQAN_SEPARATOR; - } - } + vomsAttributes = s; + } - } + public void setVomsAttributes(String[] fqaNsAsString) { - public String[] vomsAttributesArray() { + vomsAttributes = ""; + for (int i = 0; i < fqaNsAsString.length; i++) { + vomsAttributes += fqaNsAsString[i]; + if (i < fqaNsAsString.length - 1) { + vomsAttributes += FQAN_SEPARATOR; + } + } + } - return vomsAttributes.split(FQAN_SEPARATOR); - } + public String[] vomsAttributesArray() { - public String toString() { + return vomsAttributes.split(FQAN_SEPARATOR); + } - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(requestToken); - sb.append(" "); - sb.append(toSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(pinLifetime); - sb.append(" "); - sb.append(fileLifetime); - sb.append(" "); - sb.append(fileStorageType); - sb.append(" "); - sb.append(spaceToken); - sb.append(" "); - sb.append(expectedFileSize); - sb.append(" "); - sb.append(protocolList); - sb.append(" "); - sb.append(overwriteOption); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - sb.append(turl); - return sb.toString(); - } + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(requestToken); + sb.append(" "); + sb.append(toSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(pinLifetime); + sb.append(" "); + sb.append(fileLifetime); + sb.append(" "); + sb.append(fileStorageType); + sb.append(" "); + sb.append(spaceToken); + sb.append(" "); + sb.append(expectedFileSize); + sb.append(" "); + sb.append(protocolList); + sb.append(" "); + sb.append(overwriteOption); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + sb.append(turl); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/PtPData.java b/src/main/java/it/grid/storm/catalogs/PtPData.java index 3a2cd109..d7783517 100644 --- a/src/main/java/it/grid/storm/catalogs/PtPData.java +++ b/src/main/java/it/grid/storm/catalogs/PtPData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -12,52 +11,35 @@ public interface PtPData extends FileTransferData { - /** - * Method that returns the space token supplied for this chunk of the srm - * request. - */ - public TSpaceToken getSpaceToken(); - - /** - * Method that returns the requested pin life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds pinLifetime(); - - /** - * Method that returns the requested file life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds fileLifetime(); - - /** - * Method that returns the fileStorageType for this chunk of the srm request. - */ - public TFileStorageType fileStorageType(); - - /** - * Method that returns the knownSizeOfThisFile supplied with this chunk of the - * srm request. - */ - public TSizeInBytes expectedFileSize(); - - /** - * Method that returns the overwriteOption specified in the srm request. - */ - public TOverwriteMode overwriteOption(); - - /** - * Method that sets the status of this request to SRM_SPACE_AVAILABLE; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_SPACE_AVAILABLE(String explanation); - - /** - * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_DUPLICATION_ERROR(String explanation); + /** Method that returns the space token supplied for this chunk of the srm request. */ + public TSpaceToken getSpaceToken(); + /** Method that returns the requested pin life time for this chunk of the srm request. */ + public TLifeTimeInSeconds pinLifetime(); + + /** Method that returns the requested file life time for this chunk of the srm request. */ + public TLifeTimeInSeconds fileLifetime(); + + /** Method that returns the fileStorageType for this chunk of the srm request. */ + public TFileStorageType fileStorageType(); + + /** Method that returns the knownSizeOfThisFile supplied with this chunk of the srm request. */ + public TSizeInBytes expectedFileSize(); + + /** Method that returns the overwriteOption specified in the srm request. */ + public TOverwriteMode overwriteOption(); + + /** + * Method that sets the status of this request to SRM_SPACE_AVAILABLE; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_SPACE_AVAILABLE(String explanation); + + /** + * Method that sets the status of this request to SRM_DUPLICATION_ERROR; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_DUPLICATION_ERROR(String explanation); } diff --git a/src/main/java/it/grid/storm/catalogs/PtPPersistentChunkData.java b/src/main/java/it/grid/storm/catalogs/PtPPersistentChunkData.java index b981f3ab..992dfcd3 100644 --- a/src/main/java/it/grid/storm/catalogs/PtPPersistentChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/PtPPersistentChunkData.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TFileStorageType; @@ -18,175 +14,182 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TTURL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * This class represents a PrepareToPutChunkData, that is part of a multifile - * PrepareToPut srm request. It contains data about: the requestToken, the - * toSURL, the requested lifeTime of pinning, the requested lifetime of - * volatile, the requested fileStorageType and any available spaceToken, the - * expectedFileSize, the desired transferProtocols in order of preference, the - * overwriteOption to be applied in case the file already exists, the - * transferURL for the supplied SURL. - * + * This class represents a PrepareToPutChunkData, that is part of a multifile PrepareToPut srm + * request. It contains data about: the requestToken, the toSURL, the requested lifeTime of pinning, + * the requested lifetime of volatile, the requested fileStorageType and any available spaceToken, + * the expectedFileSize, the desired transferProtocols in order of preference, the overwriteOption + * to be applied in case the file already exists, the transferURL for the supplied SURL. + * * @author EGRID - ICTP Trieste * @date June, 2005 * @version 2.0 */ -public class PtPPersistentChunkData extends IdentityPtPData implements - PersistentChunkData { - - private static final Logger log = LoggerFactory - .getLogger(PtPPersistentChunkData.class); - - /** - * long representing the primary key for the persistence layer, in the - * status_Put table - */ - private long primaryKey = -1; - - /** - * This is the requestToken of the multifile srm request to which this chunk - * belongs - */ - private final TRequestToken requestToken; - - public PtPPersistentChunkData(GridUserInterface auth, - TRequestToken requestToken, TSURL toSURL, TLifeTimeInSeconds pinLifetime, - TLifeTimeInSeconds fileLifetime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TSizeInBytes expectedFileSize, - TURLPrefix transferProtocols, TOverwriteMode overwriteOption, - TReturnStatus status, TTURL transferURL) - throws InvalidPtPPersistentChunkDataAttributesException, - InvalidPtPDataAttributesException, - InvalidFileTransferDataAttributesException, - InvalidSurlRequestDataAttributesException { - - super(auth, toSURL, pinLifetime, fileLifetime, fileStorageType, spaceToken, - expectedFileSize, transferProtocols, overwriteOption, status, transferURL); - if (requestToken == null) { - log.debug("PtPPersistentChunkData: requestToken is null!"); - throw new InvalidPtPPersistentChunkDataAttributesException(requestToken, - toSURL, pinLifetime, fileLifetime, fileStorageType, spaceToken, - expectedFileSize, transferProtocols, overwriteOption, status, - transferURL); - } - this.requestToken = requestToken; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - @Override - public long getPrimaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the requestToken of the srm request to which this chunk - * belongs. - */ - @Override - public TRequestToken getRequestToken() { - - return requestToken; - } - - @Override - public long getIdentifier() { - - return getPrimaryKey(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = super.hashCode(); - result = prime * result + (int) (primaryKey ^ (primaryKey >>> 32)); - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PtPPersistentChunkData other = (PtPPersistentChunkData) obj; - if (primaryKey != other.primaryKey) { - return false; - } - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - return true; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("PtPPersistentChunkData [primaryKey="); - builder.append(primaryKey); - builder.append(", requestToken="); - builder.append(requestToken); - builder.append(", spaceToken="); - builder.append(spaceToken); - builder.append(", pinLifetime="); - builder.append(pinLifetime); - builder.append(", fileLifetime="); - builder.append(fileLifetime); - builder.append(", fileStorageType="); - builder.append(fileStorageType); - builder.append(", overwriteOption="); - builder.append(overwriteOption); - builder.append(", expectedFileSize="); - builder.append(expectedFileSize); - builder.append(", transferProtocols="); - builder.append(transferProtocols); - builder.append(", SURL="); - builder.append(SURL); - builder.append(", status="); - builder.append(status); - builder.append(", transferURL="); - builder.append(transferURL); - builder.append("]"); - return builder.toString(); - } +public class PtPPersistentChunkData extends IdentityPtPData implements PersistentChunkData { + + private static final Logger log = LoggerFactory.getLogger(PtPPersistentChunkData.class); + + /** long representing the primary key for the persistence layer, in the status_Put table */ + private long primaryKey = -1; + + /** This is the requestToken of the multifile srm request to which this chunk belongs */ + private final TRequestToken requestToken; + + public PtPPersistentChunkData( + GridUserInterface auth, + TRequestToken requestToken, + TSURL toSURL, + TLifeTimeInSeconds pinLifetime, + TLifeTimeInSeconds fileLifetime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TSizeInBytes expectedFileSize, + TURLPrefix transferProtocols, + TOverwriteMode overwriteOption, + TReturnStatus status, + TTURL transferURL) + throws InvalidPtPPersistentChunkDataAttributesException, InvalidPtPDataAttributesException, + InvalidFileTransferDataAttributesException, InvalidSurlRequestDataAttributesException { + + super( + auth, + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); + if (requestToken == null) { + log.debug("PtPPersistentChunkData: requestToken is null!"); + throw new InvalidPtPPersistentChunkDataAttributesException( + requestToken, + toSURL, + pinLifetime, + fileLifetime, + fileStorageType, + spaceToken, + expectedFileSize, + transferProtocols, + overwriteOption, + status, + transferURL); + } + this.requestToken = requestToken; + } + + /** Method used to get the primary key used in the persistence layer! */ + @Override + public long getPrimaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the requestToken of the srm request to which this chunk belongs. */ + @Override + public TRequestToken getRequestToken() { + + return requestToken; + } + + @Override + public long getIdentifier() { + + return getPrimaryKey(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = super.hashCode(); + result = prime * result + (int) (primaryKey ^ (primaryKey >>> 32)); + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PtPPersistentChunkData other = (PtPPersistentChunkData) obj; + if (primaryKey != other.primaryKey) { + return false; + } + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + return true; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("PtPPersistentChunkData [primaryKey="); + builder.append(primaryKey); + builder.append(", requestToken="); + builder.append(requestToken); + builder.append(", spaceToken="); + builder.append(spaceToken); + builder.append(", pinLifetime="); + builder.append(pinLifetime); + builder.append(", fileLifetime="); + builder.append(fileLifetime); + builder.append(", fileStorageType="); + builder.append(fileStorageType); + builder.append(", overwriteOption="); + builder.append(overwriteOption); + builder.append(", expectedFileSize="); + builder.append(expectedFileSize); + builder.append(", transferProtocols="); + builder.append(transferProtocols); + builder.append(", SURL="); + builder.append(SURL); + builder.append(", status="); + builder.append(status); + builder.append(", transferURL="); + builder.append(transferURL); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkData.java b/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkData.java index d7b3c0e8..3bdd4be0 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkData.java @@ -1,130 +1,118 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a ReducedBringOnLineChunkData, that is part of a - * multifile PrepareToGet srm request. It is closely related to BoLChunkData but - * it is called Reduced because it only contains the fromSURL, the current - * TReturnStatus, and the primary key of the request. - * - * This class is intended to be used by srmReleaseFiles, where only a limited - * amunt of information is needed instead of full blown BoLChunkData. - * + * This class represents a ReducedBringOnLineChunkData, that is part of a multifile PrepareToGet srm + * request. It is closely related to BoLChunkData but it is called Reduced because it only contains + * the fromSURL, the current TReturnStatus, and the primary key of the request. + * + *

This class is intended to be used by srmReleaseFiles, where only a limited amunt of + * information is needed instead of full blown BoLChunkData. + * * @author CNAF * @date Aug 2009 * @version 1.0 */ public class ReducedBoLChunkData implements ReducedChunkData { - @SuppressWarnings("unused") - private static final Logger log = LoggerFactory - .getLogger(ReducedBoLChunkData.class); - - private long primaryKey = -1; // long representing the primary key for the - // persistence layer! - private TSURL fromSURL; // SURL that the srm command wants to get - private TReturnStatus status; // return status for this chunk of request - - public ReducedBoLChunkData(TSURL fromSURL, TReturnStatus status) - throws InvalidReducedBoLChunkDataAttributesException { - - boolean ok = status != null && fromSURL != null; - if (!ok) { - throw new InvalidReducedBoLChunkDataAttributesException(fromSURL, status); - } - this.fromSURL = fromSURL; - this.status = status; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof ReducedBoLChunkData)) { - return false; - } - ReducedBoLChunkData cd = (ReducedBoLChunkData) o; - return (primaryKey == cd.primaryKey) && fromSURL.equals(cd.fromSURL) - && status.equals(cd.status); - } - - /** - * Method that returns the fromSURL of the srm request to which this chunk - * belongs. - */ - public TSURL fromSURL() { - - return fromSURL; - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + new Long(primaryKey).hashCode(); - hash = 37 * hash + fromSURL.hashCode(); - hash = 37 * hash + status.hashCode(); - return hash; - } - - public boolean isPinned() { - - if (status.getStatusCode() == TStatusCode.SRM_SUCCESS) { - return true; - } - return false; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - public long primaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - public TReturnStatus status() { - - return status; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("ReducedBoLChunkData\n"); - sb.append("primaryKey="); - sb.append(primaryKey); - sb.append("; "); - sb.append("fromSURL="); - sb.append(fromSURL); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append("."); - return sb.toString(); - } + @SuppressWarnings("unused") + private static final Logger log = LoggerFactory.getLogger(ReducedBoLChunkData.class); + + private long primaryKey = -1; // long representing the primary key for the + // persistence layer! + private TSURL fromSURL; // SURL that the srm command wants to get + private TReturnStatus status; // return status for this chunk of request + + public ReducedBoLChunkData(TSURL fromSURL, TReturnStatus status) + throws InvalidReducedBoLChunkDataAttributesException { + + boolean ok = status != null && fromSURL != null; + if (!ok) { + throw new InvalidReducedBoLChunkDataAttributesException(fromSURL, status); + } + this.fromSURL = fromSURL; + this.status = status; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof ReducedBoLChunkData)) { + return false; + } + ReducedBoLChunkData cd = (ReducedBoLChunkData) o; + return (primaryKey == cd.primaryKey) + && fromSURL.equals(cd.fromSURL) + && status.equals(cd.status); + } + + /** Method that returns the fromSURL of the srm request to which this chunk belongs. */ + public TSURL fromSURL() { + + return fromSURL; + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + new Long(primaryKey).hashCode(); + hash = 37 * hash + fromSURL.hashCode(); + hash = 37 * hash + status.hashCode(); + return hash; + } + + public boolean isPinned() { + + if (status.getStatusCode() == TStatusCode.SRM_SUCCESS) { + return true; + } + return false; + } + + /** Method used to get the primary key used in the persistence layer! */ + public long primaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the status for this chunk of the srm request. */ + public TReturnStatus status() { + + return status; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("ReducedBoLChunkData\n"); + sb.append("primaryKey="); + sb.append(primaryKey); + sb.append("; "); + sb.append("fromSURL="); + sb.append(fromSURL); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkDataTO.java index 4d1d3c7a..0bb653e0 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedBoLChunkDataTO.java @@ -1,120 +1,107 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TStatusCode; /** - * Class that represents some of the fields in a row in the Persistence Layer: - * this is all raw data referring to the ReducedBoLChunkData proper, that is - * String and primitive types. - * + * Class that represents some of the fields in a row in the Persistence Layer: this is all raw data + * referring to the ReducedBoLChunkData proper, that is String and primitive types. + * * @author EGRID ICTP * @version 1.0 * @date November, 2006 */ public class ReducedBoLChunkDataTO { - private long primaryKey = -1; // ID primary key of record in DB - private String fromSURL = " "; - private String normalizedStFN = null; - private Integer surlUniqueID = null; + private long primaryKey = -1; // ID primary key of record in DB + private String fromSURL = " "; + private String normalizedStFN = null; + private Integer surlUniqueID = null; - private int status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - private String errString = " "; + private int status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + private String errString = " "; - public String errString() { + public String errString() { - return errString; - } + return errString; + } - public String fromSURL() { + public String fromSURL() { - return fromSURL; - } + return fromSURL; + } - public long primaryKey() { + public long primaryKey() { - return primaryKey; - } + return primaryKey; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } + errString = s; + } - public void setFromSURL(String s) { + public void setFromSURL(String s) { - fromSURL = s; - } + fromSURL = s; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - public int status() { + public int status() { - return status; - } + return status; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - this.normalizedStFN = normalizedStFN; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + /** @return the normalizedStFN */ + public String normalizedStFN() { - return normalizedStFN; - } + return normalizedStFN; + } - /** - * @param surlUniqueID - * the sURLUniqueID to set - */ - public void setSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the sURLUniqueID to set */ + public void setSurlUniqueID(Integer surlUniqueID) { - this.surlUniqueID = surlUniqueID; - } + this.surlUniqueID = surlUniqueID; + } - /** - * @return the sURLUniqueID - */ - public Integer surlUniqueID() { + /** @return the sURLUniqueID */ + public Integer surlUniqueID() { - return surlUniqueID; - } + return surlUniqueID; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedChunkData.java b/src/main/java/it/grid/storm/catalogs/ReducedChunkData.java index 79b04e75..a75c0892 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -9,14 +8,13 @@ public interface ReducedChunkData { - public TSURL fromSURL(); + public TSURL fromSURL(); - public boolean isPinned(); + public boolean isPinned(); - public long primaryKey(); + public long primaryKey(); - public void setPrimaryKey(long l); - - public TReturnStatus status(); + public void setPrimaryKey(long l); + public TReturnStatus status(); } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkData.java b/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkData.java index b2c25c40..dbecd1ee 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -8,121 +7,110 @@ import it.grid.storm.srm.types.TSURL; /** - * This class represents a ReducedCopyChunkData, that is part of a multifile - * Copy srm request. It contains data about: the requestToken, the fromSURL, the - * toSURL, return status of the file together with its error string. - * + * This class represents a ReducedCopyChunkData, that is part of a multifile Copy srm request. It + * contains data about: the requestToken, the fromSURL, the toSURL, return status of the file + * together with its error string. + * * @author Michele Dibenedetto */ public class ReducedCopyChunkData { - /* long representing the primary key for the persistence layer! */ - private long primaryKey = -1; - /* SURL from which the srmCopy will get the file */ - private TSURL fromSURL; - /* SURL to which the srmCopy will put the file */ - private TSURL toSURL; - /* Return status for this chunk of request */ - private TReturnStatus status; - - public ReducedCopyChunkData(TSURL fromSURL, TSURL toSURL, TReturnStatus status) - throws InvalidReducedCopyChunkDataAttributesException { - - if (fromSURL == null || toSURL == null || status == null) { - throw new InvalidReducedCopyChunkDataAttributesException(fromSURL, - toSURL, status); - } - - this.fromSURL = fromSURL; - this.toSURL = toSURL; - this.status = status; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - public long primaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the fromSURL of the srm request to which this chunk - * belongs. - */ - public TSURL fromSURL() { - - return fromSURL; - } - - /** - * Method that returns the toSURL of the srm request to which this chunk - * belongs. - */ - public TSURL toSURL() { - - return toSURL; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - public TReturnStatus status() { - - return status; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("CopyChunkData\n"); - sb.append("primaryKey="); - sb.append(primaryKey); - sb.append("; "); - sb.append("RequestToken="); - sb.append("fromSURL="); - sb.append(fromSURL); - sb.append("; "); - sb.append("toSURL="); - sb.append(toSURL); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append("; "); - return sb.toString(); - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + new Long(primaryKey).hashCode(); - hash = 37 * hash + fromSURL.hashCode(); - hash = 37 * hash + toSURL.hashCode(); - hash = 37 * hash + status.hashCode(); - return hash; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof ReducedCopyChunkData)) { - return false; - } - ReducedCopyChunkData cd = (ReducedCopyChunkData) o; - return (primaryKey == cd.primaryKey) && fromSURL.equals(cd.fromSURL) - && toSURL.equals(cd.toSURL) && status.equals(cd.status); - } + /* long representing the primary key for the persistence layer! */ + private long primaryKey = -1; + /* SURL from which the srmCopy will get the file */ + private TSURL fromSURL; + /* SURL to which the srmCopy will put the file */ + private TSURL toSURL; + /* Return status for this chunk of request */ + private TReturnStatus status; + + public ReducedCopyChunkData(TSURL fromSURL, TSURL toSURL, TReturnStatus status) + throws InvalidReducedCopyChunkDataAttributesException { + + if (fromSURL == null || toSURL == null || status == null) { + throw new InvalidReducedCopyChunkDataAttributesException(fromSURL, toSURL, status); + } + + this.fromSURL = fromSURL; + this.toSURL = toSURL; + this.status = status; + } + + /** Method used to get the primary key used in the persistence layer! */ + public long primaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the fromSURL of the srm request to which this chunk belongs. */ + public TSURL fromSURL() { + + return fromSURL; + } + + /** Method that returns the toSURL of the srm request to which this chunk belongs. */ + public TSURL toSURL() { + + return toSURL; + } + + /** Method that returns the status for this chunk of the srm request. */ + public TReturnStatus status() { + + return status; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("CopyChunkData\n"); + sb.append("primaryKey="); + sb.append(primaryKey); + sb.append("; "); + sb.append("RequestToken="); + sb.append("fromSURL="); + sb.append(fromSURL); + sb.append("; "); + sb.append("toSURL="); + sb.append(toSURL); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append("; "); + return sb.toString(); + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + new Long(primaryKey).hashCode(); + hash = 37 * hash + fromSURL.hashCode(); + hash = 37 * hash + toSURL.hashCode(); + hash = 37 * hash + status.hashCode(); + return hash; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof ReducedCopyChunkData)) { + return false; + } + ReducedCopyChunkData cd = (ReducedCopyChunkData) o; + return (primaryKey == cd.primaryKey) + && fromSURL.equals(cd.fromSURL) + && toSURL.equals(cd.toSURL) + && status.equals(cd.status); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkDataTO.java index 8fe6bc5b..d2a11e52 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedCopyChunkDataTO.java @@ -1,175 +1,152 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TStatusCode; /** - * Class that represents some of the fields in a row in the Persistence Layer: - * this is all raw data referring to the ReducedCopyChunkData proper, that is - * String and primitive types. - * - * All other fields are 0 if int, or a white space if String. - * + * Class that represents some of the fields in a row in the Persistence Layer: this is all raw data + * referring to the ReducedCopyChunkData proper, that is String and primitive types. + * + *

All other fields are 0 if int, or a white space if String. + * * @author Michele Dibenedetto */ public class ReducedCopyChunkDataTO { - /* Database table request_Get fields BEGIN */ - private long primaryKey = -1; // ID primary key of record in DB - private String fromSURL = " "; - private String normalizedSourceStFN = null; - private Integer sourceSurlUniqueID = null; - private String toSURL = " "; - private String normalizedTargetStFN = null; - private Integer targetSurlUniqueID = null; - /* Database table request_Get fields END */ + /* Database table request_Get fields BEGIN */ + private long primaryKey = -1; // ID primary key of record in DB + private String fromSURL = " "; + private String normalizedSourceStFN = null; + private Integer sourceSurlUniqueID = null; + private String toSURL = " "; + private String normalizedTargetStFN = null; + private Integer targetSurlUniqueID = null; + /* Database table request_Get fields END */ - private int status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - private String errString = " "; + private int status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + private String errString = " "; - public long primaryKey() { + public long primaryKey() { - return primaryKey; - } + return primaryKey; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public String fromSURL() { + public String fromSURL() { - return fromSURL; - } + return fromSURL; + } - public void setFromSURL(String s) { + public void setFromSURL(String s) { - fromSURL = s; - } + fromSURL = s; + } - /** - * @return the normalizedStFN - */ - public String normalizedSourceStFN() { + /** @return the normalizedStFN */ + public String normalizedSourceStFN() { - return normalizedSourceStFN; - } + return normalizedSourceStFN; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedSourceStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedSourceStFN(String normalizedStFN) { - this.normalizedSourceStFN = normalizedStFN; - } + this.normalizedSourceStFN = normalizedStFN; + } - /** - * @return the surlUniqueID - */ - public Integer sourceSurlUniqueID() { + /** @return the surlUniqueID */ + public Integer sourceSurlUniqueID() { - return sourceSurlUniqueID; - } + return sourceSurlUniqueID; + } - /** - * @param surlUniqueID - * the surlUniqueID to set - */ - public void setSourceSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the surlUniqueID to set */ + public void setSourceSurlUniqueID(Integer surlUniqueID) { - this.sourceSurlUniqueID = surlUniqueID; - } + this.sourceSurlUniqueID = surlUniqueID; + } - public String toSURL() { + public String toSURL() { - return toSURL; - } + return toSURL; + } - public void setToSURL(String s) { + public void setToSURL(String s) { - toSURL = s; - } + toSURL = s; + } - /** - * @return the normalizedStFN - */ - public String normalizedTargetStFN() { + /** @return the normalizedStFN */ + public String normalizedTargetStFN() { - return normalizedTargetStFN; - } + return normalizedTargetStFN; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedTargetStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedTargetStFN(String normalizedStFN) { - this.normalizedTargetStFN = normalizedStFN; - } + this.normalizedTargetStFN = normalizedStFN; + } - /** - * @return the surlUniqueID - */ - public Integer targetSurlUniqueID() { + /** @return the surlUniqueID */ + public Integer targetSurlUniqueID() { - return targetSurlUniqueID; - } + return targetSurlUniqueID; + } - /** - * @param surlUniqueID - * the surlUniqueID to set - */ - public void setTargetSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the surlUniqueID to set */ + public void setTargetSurlUniqueID(Integer surlUniqueID) { - this.targetSurlUniqueID = surlUniqueID; - } + this.targetSurlUniqueID = surlUniqueID; + } - public int status() { + public int status() { - return status; - } + return status; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - public String errString() { + public String errString() { - return errString; - } + return errString; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } - - public String toString() { + errString = s; + } - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedSourceStFN); - sb.append(" "); - sb.append(sourceSurlUniqueID); - sb.append(" "); - sb.append(toSURL); - sb.append(" "); - sb.append(normalizedTargetStFN); - sb.append(" "); - sb.append(targetSurlUniqueID); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - return sb.toString(); - } + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedSourceStFN); + sb.append(" "); + sb.append(sourceSurlUniqueID); + sb.append(" "); + sb.append(toSURL); + sb.append(" "); + sb.append(normalizedTargetStFN); + sb.append(" "); + sb.append(targetSurlUniqueID); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkData.java b/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkData.java index 2ced971d..b2d63dd1 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkData.java @@ -1,129 +1,117 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a ReducedPrepareToGetChunkData, that is part of a - * multifile PrepareToGet srm request. It is closely related to PtGChunkData but - * it is called Reduced because it only contains the fromSURL, the current - * TReturnStatus, and the primary key of the request. - * - * This class is intended to be used by srmReleaseFiles, where only a limited - * amunt of information is needed instead of full blown PtGChunkData. - * + * This class represents a ReducedPrepareToGetChunkData, that is part of a multifile PrepareToGet + * srm request. It is closely related to PtGChunkData but it is called Reduced because it only + * contains the fromSURL, the current TReturnStatus, and the primary key of the request. + * + *

This class is intended to be used by srmReleaseFiles, where only a limited amunt of + * information is needed instead of full blown PtGChunkData. + * * @author EGRID - ICTP Trieste * @date November, 2006 * @version 1.0 */ public class ReducedPtGChunkData implements ReducedChunkData { - @SuppressWarnings("unused") - private static final Logger log = LoggerFactory - .getLogger(ReducedPtGChunkData.class); - - private long primaryKey = -1; // long representing the primary key for the - // persistence layer! - private TSURL fromSURL; // SURL that the srm command wants to get - private TReturnStatus status; // return status for this chunk of request - - public ReducedPtGChunkData(TSURL fromSURL, TReturnStatus status) - throws InvalidReducedPtGChunkDataAttributesException { - - if (status == null || fromSURL == null) { - throw new InvalidReducedPtGChunkDataAttributesException(fromSURL, status); - } - this.fromSURL = fromSURL; - this.status = status; - } - - /** - * Method that returns the fromSURL of the srm request to which this chunk - * belongs. - */ - public TSURL fromSURL() { - - return fromSURL; - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + new Long(primaryKey).hashCode(); - hash = 37 * hash + fromSURL.hashCode(); - hash = 37 * hash + status.hashCode(); - return hash; - } - - public boolean isPinned() { - - if (status.getStatusCode() == TStatusCode.SRM_FILE_PINNED) { - return true; - } - return false; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - public long primaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - public TReturnStatus status() { - - return status; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("ReducedPtGChunkData\n"); - sb.append("primaryKey="); - sb.append(primaryKey); - sb.append("; "); - sb.append("fromSURL="); - sb.append(fromSURL); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append("."); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof ReducedPtGChunkData)) { - return false; - } - ReducedPtGChunkData cd = (ReducedPtGChunkData) o; - return (primaryKey == cd.primaryKey) && fromSURL.equals(cd.fromSURL) - && status.equals(cd.status); - } + @SuppressWarnings("unused") + private static final Logger log = LoggerFactory.getLogger(ReducedPtGChunkData.class); + + private long primaryKey = -1; // long representing the primary key for the + // persistence layer! + private TSURL fromSURL; // SURL that the srm command wants to get + private TReturnStatus status; // return status for this chunk of request + + public ReducedPtGChunkData(TSURL fromSURL, TReturnStatus status) + throws InvalidReducedPtGChunkDataAttributesException { + + if (status == null || fromSURL == null) { + throw new InvalidReducedPtGChunkDataAttributesException(fromSURL, status); + } + this.fromSURL = fromSURL; + this.status = status; + } + + /** Method that returns the fromSURL of the srm request to which this chunk belongs. */ + public TSURL fromSURL() { + + return fromSURL; + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + new Long(primaryKey).hashCode(); + hash = 37 * hash + fromSURL.hashCode(); + hash = 37 * hash + status.hashCode(); + return hash; + } + + public boolean isPinned() { + + if (status.getStatusCode() == TStatusCode.SRM_FILE_PINNED) { + return true; + } + return false; + } + + /** Method used to get the primary key used in the persistence layer! */ + public long primaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the status for this chunk of the srm request. */ + public TReturnStatus status() { + + return status; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("ReducedPtGChunkData\n"); + sb.append("primaryKey="); + sb.append(primaryKey); + sb.append("; "); + sb.append("fromSURL="); + sb.append(fromSURL); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append("."); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof ReducedPtGChunkData)) { + return false; + } + ReducedPtGChunkData cd = (ReducedPtGChunkData) o; + return (primaryKey == cd.primaryKey) + && fromSURL.equals(cd.fromSURL) + && status.equals(cd.status); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkDataTO.java index f3c6af0f..6f50ec9f 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedPtGChunkDataTO.java @@ -1,120 +1,107 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TStatusCode; /** - * Class that represents some of the fileds in a row in the Persistence Layer: - * this is all raw data referring to the ReducedPtGChunkData proper, that is - * String and primitive types. - * + * Class that represents some of the fileds in a row in the Persistence Layer: this is all raw data + * referring to the ReducedPtGChunkData proper, that is String and primitive types. + * * @author EGRID ICTP * @version 1.0 * @date November, 2006 */ public class ReducedPtGChunkDataTO { - private long primaryKey = -1; // ID primary key of record in DB - private String fromSURL = " "; - private String normalizedStFN = null; - private Integer surlUniqueID = null; + private long primaryKey = -1; // ID primary key of record in DB + private String fromSURL = " "; + private String normalizedStFN = null; + private Integer surlUniqueID = null; - private int status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - private String errString = " "; + private int status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + private String errString = " "; - public long primaryKey() { + public long primaryKey() { - return primaryKey; - } + return primaryKey; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public String fromSURL() { + public String fromSURL() { - return fromSURL; - } + return fromSURL; + } - public void setFromSURL(String s) { + public void setFromSURL(String s) { - fromSURL = s; - } + fromSURL = s; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - this.normalizedStFN = normalizedStFN; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + /** @return the normalizedStFN */ + public String normalizedStFN() { - return normalizedStFN; - } + return normalizedStFN; + } - /** - * @param surlUniqueID - * the sURLUniqueID to set - */ - public void setSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the sURLUniqueID to set */ + public void setSurlUniqueID(Integer surlUniqueID) { - this.surlUniqueID = surlUniqueID; - } + this.surlUniqueID = surlUniqueID; + } - /** - * @return the sURLUniqueID - */ - public Integer surlUniqueID() { + /** @return the sURLUniqueID */ + public Integer surlUniqueID() { - return surlUniqueID; - } + return surlUniqueID; + } - public int status() { + public int status() { - return status; - } + return status; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - public String errString() { + public String errString() { - return errString; - } + return errString; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } + errString = s; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(fromSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(fromSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkData.java b/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkData.java index 0006a86d..273e6d44 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkData.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -8,155 +7,139 @@ import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a ReducedPrepareToPutChunkData, that is part of a - * multifile PrepareToPut srm request. It is closely related to PtPChunkData but - * it is called Reduced because it only contains the toSURL, the current - * TReturnStatus, the TFileStorageType, the FileLifeTime in case of Volatile, - * the VomsGridUser limited to the DN, and the primary key of the request. - * - * This class is intended to be used by srmPutDone, where only a limited amount - * of information is needed instead of full blown PtPChunkData. It is also used - * by the automatic handlnig of non invoked srmPutDone, during transition to - * SRM_FILE_LIFETIME_EXPIRED. - * + * This class represents a ReducedPrepareToPutChunkData, that is part of a multifile PrepareToPut + * srm request. It is closely related to PtPChunkData but it is called Reduced because it only + * contains the toSURL, the current TReturnStatus, the TFileStorageType, the FileLifeTime in case of + * Volatile, the VomsGridUser limited to the DN, and the primary key of the request. + * + *

This class is intended to be used by srmPutDone, where only a limited amount of information is + * needed instead of full blown PtPChunkData. It is also used by the automatic handlnig of non + * invoked srmPutDone, during transition to SRM_FILE_LIFETIME_EXPIRED. + * * @author EGRID - ICTP Trieste * @date January, 2007 * @version 2.0 */ public class ReducedPtPChunkData { - @SuppressWarnings("unused") - private static final Logger log = LoggerFactory - .getLogger(ReducedPtPChunkData.class); - - private long primaryKey = -1; // long representing the primary key for the - // persistence layer! - private TSURL toSURL; // SURL that the srm command wants to get - private TReturnStatus status; // return status for this chunk of request - private TFileStorageType fileStorageType; // fileStorageType of this shunk of - // the request - private TLifeTimeInSeconds fileLifetime; // requested lifetime for SURL in - // case of Volatile entry. - - public ReducedPtPChunkData(TSURL toSURL, TReturnStatus status, - TFileStorageType fileStorageType, TLifeTimeInSeconds fileLifetime) - throws InvalidReducedPtPChunkDataAttributesException { - - if (status == null || toSURL == null || fileStorageType == null - || fileLifetime == null) { - throw new InvalidReducedPtPChunkDataAttributesException(toSURL, status, - fileStorageType, fileLifetime); - } - this.toSURL = toSURL; - this.status = status; - this.fileStorageType = fileStorageType; - this.fileLifetime = fileLifetime; - } - - /** - * Method used to get the primary key used in the persistence layer! - */ - public long primaryKey() { - - return primaryKey; - } - - /** - * Method used to set the primary key to be used in the persistence layer! - */ - public void setPrimaryKey(long l) { - - primaryKey = l; - } - - /** - * Method that returns the toSURL of the srm request to which this chunk - * belongs. - */ - public TSURL toSURL() { - - return toSURL; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - public TReturnStatus status() { - - return status; - } - - /** - * Method that returns the TFileStorageType of the srm request to which this - * chunk belongs. - */ - public TFileStorageType fileStorageType() { - - return fileStorageType; - } - - /** - * Method that returns the fileLifetime of the srm request to which this chunk - * belongs. - */ - public TLifeTimeInSeconds fileLifetime() { - - return fileLifetime; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("ReducedPtPChunkData\n"); - sb.append("primaryKey="); - sb.append(primaryKey); - sb.append("; "); - sb.append("toSURL="); - sb.append(toSURL); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append(";"); - sb.append("fileStorageType="); - sb.append(fileStorageType); - sb.append(";"); - sb.append("fileLifetime="); - sb.append(fileLifetime); - sb.append("."); - return sb.toString(); - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + new Long(primaryKey).hashCode(); - hash = 37 * hash + toSURL.hashCode(); - hash = 37 * hash + status.hashCode(); - hash = 37 * hash + fileStorageType.hashCode(); - hash = 37 * hash + fileLifetime.hashCode(); - return hash; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof ReducedPtPChunkData)) { - return false; - } - ReducedPtPChunkData cd = (ReducedPtPChunkData) o; - return (primaryKey == cd.primaryKey) && toSURL.equals(cd.toSURL) - && status.equals(cd.status) && fileStorageType.equals(cd.fileStorageType) - && fileLifetime.equals(cd.fileLifetime); - } - + @SuppressWarnings("unused") + private static final Logger log = LoggerFactory.getLogger(ReducedPtPChunkData.class); + + private long primaryKey = -1; // long representing the primary key for the + // persistence layer! + private TSURL toSURL; // SURL that the srm command wants to get + private TReturnStatus status; // return status for this chunk of request + private TFileStorageType fileStorageType; // fileStorageType of this shunk of + // the request + private TLifeTimeInSeconds fileLifetime; // requested lifetime for SURL in + // case of Volatile entry. + + public ReducedPtPChunkData( + TSURL toSURL, + TReturnStatus status, + TFileStorageType fileStorageType, + TLifeTimeInSeconds fileLifetime) + throws InvalidReducedPtPChunkDataAttributesException { + + if (status == null || toSURL == null || fileStorageType == null || fileLifetime == null) { + throw new InvalidReducedPtPChunkDataAttributesException( + toSURL, status, fileStorageType, fileLifetime); + } + this.toSURL = toSURL; + this.status = status; + this.fileStorageType = fileStorageType; + this.fileLifetime = fileLifetime; + } + + /** Method used to get the primary key used in the persistence layer! */ + public long primaryKey() { + + return primaryKey; + } + + /** Method used to set the primary key to be used in the persistence layer! */ + public void setPrimaryKey(long l) { + + primaryKey = l; + } + + /** Method that returns the toSURL of the srm request to which this chunk belongs. */ + public TSURL toSURL() { + + return toSURL; + } + + /** Method that returns the status for this chunk of the srm request. */ + public TReturnStatus status() { + + return status; + } + + /** Method that returns the TFileStorageType of the srm request to which this chunk belongs. */ + public TFileStorageType fileStorageType() { + + return fileStorageType; + } + + /** Method that returns the fileLifetime of the srm request to which this chunk belongs. */ + public TLifeTimeInSeconds fileLifetime() { + + return fileLifetime; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("ReducedPtPChunkData\n"); + sb.append("primaryKey="); + sb.append(primaryKey); + sb.append("; "); + sb.append("toSURL="); + sb.append(toSURL); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append(";"); + sb.append("fileStorageType="); + sb.append(fileStorageType); + sb.append(";"); + sb.append("fileLifetime="); + sb.append(fileLifetime); + sb.append("."); + return sb.toString(); + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + new Long(primaryKey).hashCode(); + hash = 37 * hash + toSURL.hashCode(); + hash = 37 * hash + status.hashCode(); + hash = 37 * hash + fileStorageType.hashCode(); + hash = 37 * hash + fileLifetime.hashCode(); + return hash; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof ReducedPtPChunkData)) { + return false; + } + ReducedPtPChunkData cd = (ReducedPtPChunkData) o; + return (primaryKey == cd.primaryKey) + && toSURL.equals(cd.toSURL) + && status.equals(cd.status) + && fileStorageType.equals(cd.fileStorageType) + && fileLifetime.equals(cd.fileLifetime); + } } diff --git a/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkDataTO.java b/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkDataTO.java index f6881c76..7da4d724 100644 --- a/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/ReducedPtPChunkDataTO.java @@ -1,151 +1,137 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TFileStorageType; +import it.grid.storm.srm.types.TStatusCode; /** - * Class that represents some of the fields in a row in the Persistence Layer: - * this is all raw data referring to the ReducedPtPChunkData proper, that is - * String and primitive types. - * + * Class that represents some of the fields in a row in the Persistence Layer: this is all raw data + * referring to the ReducedPtPChunkData proper, that is String and primitive types. + * * @author EGRID ICTP * @version 1.0 * @date January, 2007 */ public class ReducedPtPChunkDataTO { - private long primaryKey = -1; // ID primary key of record in DB - private String toSURL = " "; - private String normalizedStFN = null; - private Integer surlUniqueID = null; + private long primaryKey = -1; // ID primary key of record in DB + private String toSURL = " "; + private String normalizedStFN = null; + private Integer surlUniqueID = null; - private int status = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_QUEUED); - private String errString = " "; - private String fileStorageType = FileStorageTypeConverter.getInstance().toDB( - TFileStorageType.VOLATILE); - private int fileLifetime = -1; + private int status = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED); + private String errString = " "; + private String fileStorageType = + FileStorageTypeConverter.getInstance().toDB(TFileStorageType.VOLATILE); + private int fileLifetime = -1; - public long primaryKey() { + public long primaryKey() { - return primaryKey; - } + return primaryKey; + } - public void setPrimaryKey(long n) { + public void setPrimaryKey(long n) { - primaryKey = n; - } + primaryKey = n; + } - public String toSURL() { + public String toSURL() { - return toSURL; - } + return toSURL; + } - public void setToSURL(String s) { + public void setToSURL(String s) { - toSURL = s; - } + toSURL = s; + } - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { - this.normalizedStFN = normalizedStFN; - } + this.normalizedStFN = normalizedStFN; + } - /** - * @return the normalizedStFN - */ - public String normalizedStFN() { + /** @return the normalizedStFN */ + public String normalizedStFN() { - return normalizedStFN; - } + return normalizedStFN; + } - /** - * @param surlUniqueID - * the sURLUniqueID to set - */ - public void setSurlUniqueID(Integer surlUniqueID) { + /** @param surlUniqueID the sURLUniqueID to set */ + public void setSurlUniqueID(Integer surlUniqueID) { - this.surlUniqueID = surlUniqueID; - } + this.surlUniqueID = surlUniqueID; + } - /** - * @return the sURLUniqueID - */ - public Integer surlUniqueID() { + /** @return the sURLUniqueID */ + public Integer surlUniqueID() { - return surlUniqueID; - } + return surlUniqueID; + } - public int status() { + public int status() { - return status; - } + return status; + } - public void setStatus(int n) { + public void setStatus(int n) { - status = n; - } + status = n; + } - public String errString() { + public String errString() { - return errString; - } + return errString; + } - public void setErrString(String s) { + public void setErrString(String s) { - errString = s; - } + errString = s; + } - public String fileStorageType() { + public String fileStorageType() { - return fileStorageType; - } + return fileStorageType; + } - /** - * Method that sets the FileStorageType: if it is null nothing gets set. The - * deafult value is Volatile. - */ - public void setFileStorageType(String s) { + /** + * Method that sets the FileStorageType: if it is null nothing gets set. The deafult value is + * Volatile. + */ + public void setFileStorageType(String s) { - if (s != null) - fileStorageType = s; - } + if (s != null) fileStorageType = s; + } - public int fileLifetime() { + public int fileLifetime() { - return fileLifetime; - } + return fileLifetime; + } - public void setFileLifetime(int n) { + public void setFileLifetime(int n) { - fileLifetime = n; - } + fileLifetime = n; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(primaryKey); - sb.append(" "); - sb.append(toSURL); - sb.append(" "); - sb.append(normalizedStFN); - sb.append(" "); - sb.append(surlUniqueID); - sb.append(" "); - sb.append(status); - sb.append(" "); - sb.append(errString); - sb.append(" "); - sb.append(fileStorageType); - sb.append(" "); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(primaryKey); + sb.append(" "); + sb.append(toSURL); + sb.append(" "); + sb.append(normalizedStFN); + sb.append(" "); + sb.append(surlUniqueID); + sb.append(" "); + sb.append(status); + sb.append(" "); + sb.append(errString); + sb.append(" "); + sb.append(fileStorageType); + sb.append(" "); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/RequestData.java b/src/main/java/it/grid/storm/catalogs/RequestData.java index 4bae65ab..c52c60b5 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestData.java +++ b/src/main/java/it/grid/storm/catalogs/RequestData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -10,9 +9,9 @@ import it.grid.storm.synchcall.data.InputData; /** - * Class that represents a generic chunk. It provides only one method which is - * the primary key associated ot the chunk in persistence. - * + * Class that represents a generic chunk. It provides only one method which is the primary key + * associated ot the chunk in persistence. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2006 diff --git a/src/main/java/it/grid/storm/catalogs/RequestSummaryCatalog.java b/src/main/java/it/grid/storm/catalogs/RequestSummaryCatalog.java index fd4e0e0d..9793d745 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestSummaryCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/RequestSummaryCatalog.java @@ -1,9 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import com.google.common.collect.Lists; import it.grid.storm.catalogs.timertasks.RequestsGarbageCollector; import it.grid.storm.common.types.TimeUnit; import it.grid.storm.config.Configuration; @@ -17,412 +17,416 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Timer; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - /** - * Class that represents the RequestSummaryCatalog of StoRM. The rows in the - * catalog are called RequestSummaryData. Methods are provided to: look up newly - * added requests as identified by their SRM_REQUEST_QUEUED status, to update - * the global status of the request, and to fail a request with SRM_FAILURE. - * + * Class that represents the RequestSummaryCatalog of StoRM. The rows in the catalog are called + * RequestSummaryData. Methods are provided to: look up newly added requests as identified by their + * SRM_REQUEST_QUEUED status, to update the global status of the request, and to fail a request with + * SRM_FAILURE. + * * @author EGRID - ICTP Trieste * @version 2.0 * @date April 26th, 2005 */ public class RequestSummaryCatalog { - private static final Logger log = LoggerFactory - .getLogger(RequestSummaryCatalog.class); - /** Only instance of RequestSummaryCatalog for StoRM! */ - private static RequestSummaryCatalog cat = new RequestSummaryCatalog(); - /** WARNING!!! TO BE MODIFIED WITH FACTORY!!! */ - private final RequestSummaryDAO dao = RequestSummaryDAO.getInstance(); - /** timer thread that will run a task to clean */ - private Timer clock = null; - /** configuration instance **/ - private final Configuration config = Configuration.getInstance(); - - private RequestSummaryCatalog() { - - clock = new Timer(); - - clock.schedule( - new RequestsGarbageCollector(clock, - config.getRequestPurgerPeriod() * 1000), - config.getRequestPurgerDelay() * 1000); - } - - /** - * Method that returns the only instance of RequestSummaryCatalog present in - * StoRM. - */ - public static RequestSummaryCatalog getInstance() { - - return RequestSummaryCatalog.cat; - } - - /** - * Method in charge of retrieving RequestSummaryData associated to new - * requests, that is those found in SRM_REQUETS_QUEUED global status; such - * requests then transit into SRM_SUCCESS. The actual number of fetched - * requests depends on the configured ceiling. - * - * If no new request is found, an empty Collection is returned. if a request - * is malformed, then that request is failed and an attempt is made to signal - * such occurrence in the DB. Only correctly formed requests are returned. - */ - synchronized public Collection fetchNewRequests( - int capacity) { - - List list = Lists.newArrayList(); - - Collection c = dao.findNew(capacity); - if (c == null || c.isEmpty()) { - return list; - } - int fetched = c.size(); - log.debug("REQUEST SUMMARY CATALOG: {} new requests picked up.", fetched); - for (RequestSummaryDataTO auxTO : c) { - RequestSummaryData aux = null; - try { - aux = makeOne(auxTO); - } catch (IllegalArgumentException e) { - log.error("REQUEST SUMMARY CATALOG: Failure while performing makeOne " - + "operation. IllegalArgumentException: {}", e.getMessage(), e); - continue; - } - if (aux != null) { - log.debug("REQUEST SUMMARY CATALOG: {} associated to {} included " - + "for processing", aux.requestToken(), aux.gridUser().getDn()); - list.add(aux); - } - } - int ret = list.size(); - if (ret < fetched) { - log.warn("REQUEST SUMMARY CATALOG: including {} requests for processing, " - + "since the dropped ones were malformed!", ret); - } else { - log.debug("REQUEST SUMMARY CATALOG: including for processing all {} " - + "requests.", ret); - } - if (!list.isEmpty()) { - log.debug("REQUEST SUMMARY CATALOG: returning {}\n\n", list); - } - return list; - } - - /** - * Private method used to create a RequestSummaryData object, from a - * RequestSummaryDataTO. If a chunk cannot be created, an error messagge gets - * logged and an attempt is made to signal in the DB that the request is - * malformed. - */ - private RequestSummaryData makeOne(RequestSummaryDataTO to) - throws IllegalArgumentException { - - TRequestType auxrtype = RequestTypeConverter.getInstance().toSTORM( - to.requestType()); - if (auxrtype == TRequestType.EMPTY) { - StringBuilder sb = new StringBuilder(); - sb.append("TRequestType could not be created from its String representation "); - sb.append(to.requestType()); - sb.append("\n"); - log.warn(sb.toString()); - throw new IllegalArgumentException( - "Invalid TRequestType in the provided RequestSummaryDataTO"); - } - TRequestToken auxrtoken; - try { - auxrtoken = new TRequestToken(to.requestToken(), to.timestamp()); - } catch (InvalidTRequestTokenAttributesException e) { - log.warn("Unable to create TRequestToken from RequestSummaryDataTO. " - + "InvalidTRequestTokenAttributesException: {}", e.getMessage()); - throw new IllegalArgumentException( - "Unable to create TRequestToken from RequestSummaryDataTO."); - } - GridUserInterface auxgu; - - try { - auxgu = loadVomsGridUser(to.clientDN(), to.vomsAttributes()); - } catch (MalformedGridUserException e) { - StringBuilder sb = new StringBuilder(); - sb.append("VomsGridUser could not be created from DN String "); - sb.append(to.clientDN()); - sb.append(" voms attributes String "); - sb.append(to.vomsAttributes()); - sb.append(" and from request token String "); - sb.append(to.requestToken()); - log.warn("{}. MalformedGridUserException: {}", sb.toString(), e.getMessage()); - throw new IllegalArgumentException( - "Unable to load Voms Grid User from RequestSummaryDataTO. " - + "MalformedGridUserException: " + e.getMessage()); - } - RequestSummaryData data = null; - try { - data = new RequestSummaryData(auxrtype, auxrtoken, auxgu); - data.setPrimaryKey(to.primaryKey()); - } catch (InvalidRequestSummaryDataAttributesException e) { - dao.failRequest(to.primaryKey(), "The request data is malformed!"); - log.warn("REQUEST SUMMARY CATALOG! Unable to create RequestSummaryData. " - + "InvalidRequestSummaryDataAttributesException: {}", e.getMessage(), e); - throw new IllegalArgumentException("Unable to reate RequestSummaryData"); - } - TReturnStatus status = null; - if (to.getStatus() != null) { - TStatusCode code = StatusCodeConverter.getInstance().toSTORM(to.getStatus()); - if (code == TStatusCode.EMPTY) { - log.warn("RequestSummaryDataTO retrieved StatusCode was not " - + "recognised: {}", to.getStatus()); - } else { - status = new TReturnStatus(code, to.getErrstring()); - } - } - data.setUserToken(to.getUserToken()); - data.setRetrytime(to.getRetrytime()); - if (to.getPinLifetime() != null) { - data.setPinLifetime(TLifeTimeInSeconds.make(PinLifetimeConverter - .getInstance().toStoRM(to.getPinLifetime()), TimeUnit.SECONDS)); - } - data.setSpaceToken(to.getSpaceToken()); - data.setStatus(status); - data.setErrstring(to.getErrstring()); - data.setRemainingTotalTime(to.getRemainingTotalTime()); - data.setNbreqfiles(to.getNbreqfiles()); - data.setNumOfCompleted(to.getNumOfCompleted()); - if (to.getFileLifetime() != null) { - data.setFileLifetime(TLifeTimeInSeconds.make(to.getFileLifetime(), - TimeUnit.SECONDS)); - } - - data.setDeferredStartTime(to.getDeferredStartTime()); - data.setNumOfWaiting(to.getNumOfWaiting()); - data.setNumOfFailed(to.getNumOfFailed()); - data.setRemainingDeferredStartTime(to.getRemainingDeferredStartTime()); - return data; - } - - /** - * Private method that holds the logic for creating a VomsGridUser from - * persistence and to load any available Proxy. For the moment the VOMS - * attributes present in persistence are NOT loaded! - */ - private GridUserInterface loadVomsGridUser(String dn, String fqansString) throws MalformedGridUserException { - - log.debug("load VomsGridUser for dn='{}' and fqansString='{}'", dn, fqansString); - - if (dn == null) { - throw new MalformedGridUserException("Invalid null DN"); - } - if (fqansString == null || fqansString.isEmpty()) { - return GridUserManager.makeGridUser(dn); - } - - FQAN[] fqans = new FQAN[fqansString.split("#").length]; - int i = 0; - for (String fqan: fqansString.split("#")) { - fqans[i++] = new FQAN(fqan); - } - try { - return GridUserManager.makeVOMSGridUser(dn, fqans); - } catch (IllegalArgumentException e) { - log.error("Unexpected error on voms grid user creation. " - + "IllegalArgumentException: {}", e.getMessage(), e); - throw new MalformedGridUserException(e.getMessage()); - } - } - - /** - * Method used to update the global status of a request identified by - * TRequestToken, to the supplied TReturnStatus. In case of any exception - * nothing happens. - */ - synchronized public void updateGlobalStatus(TRequestToken rt, - TReturnStatus status) { - - dao.updateGlobalStatus(rt.toString(), StatusCodeConverter.getInstance() - .toDB(status.getStatusCode()), status.getExplanation()); - } - - public void updateFromPreviousGlobalStatus(TRequestToken requestToken, - TStatusCode expectedStatusCode, TStatusCode newStatusCode, - String explanation) { - - dao.updateGlobalStatusOnMatchingGlobalStatus(requestToken, - expectedStatusCode, newStatusCode, explanation); - } - - /** - * Method used to update the global status of a request identified by - * TRequestToken, to the supplied TReturnStatus. The pin lifetime and the file - * lifetime are updated in order to start the countdown from the moment the - * status is updated. In case of any exception nothing happens. - */ - synchronized public void updateGlobalStatusPinFileLifetime(TRequestToken rt, - TReturnStatus status) { - - dao.updateGlobalStatusPinFileLifetime(rt.toString(), StatusCodeConverter - .getInstance().toDB(status.getStatusCode()), status.getExplanation()); - } - - /** - * Method used to change the global status of the supplied request to - * SRM_FAILURE, as well as that of each single chunk in the request. If the - * request type is not supported by the logic, only the global status is - * updated and an error log gets written warning of the unsupported business - * logic. - * - * If the supplied RequestSummaryData is null, nothing gets done; if any DB - * error occurs, no exception gets thrown but proper messagges get logged. - */ - synchronized public void failRequest(RequestSummaryData rsd, - String explanation) { - - if (rsd != null) { - TRequestType rtype = rsd.requestType(); - if (rtype == TRequestType.PREPARE_TO_GET) { - dao.failPtGRequest(rsd.primaryKey(), explanation); - } else if (rtype == TRequestType.PREPARE_TO_PUT) { - dao.failPtPRequest(rsd.primaryKey(), explanation); - } else if (rtype == TRequestType.COPY) { - dao.failCopyRequest(rsd.primaryKey(), explanation); - } else { - dao.failRequest(rsd.primaryKey(), explanation); - } - } - } - - /** - * Method used to abort a request that has not yet been fetched for - * processing; if the status of the request associated to the supplied request - * token tok is different from SRM_REQUEST_QUEUED, then nothing takes place; - * likewise if the supplied token does not correspond to any request, or if it - * is null. - */ - synchronized public void abortRequest(TRequestToken rt) { - - if (rt != null) { - dao.abortRequest(rt.toString()); - } - } - - /** - * Method used to abort a request that has not yet been fetched for - * processing; abort is only applied to those SURLs of the request specified - * in the Collection; if the status of the request associated to the supplied - * request token is different from SRM_REQUEST_QUEUED, then nothing takes - * place; likewise if the supplied token does not correspond to any request, - * if it is null, if the Collection is null, or the Collection does not - * contain TSURLs. - */ - synchronized public void abortChunksOfRequest(TRequestToken rt, - Collection c) { - - if ((rt != null) && (c != null) && (!c.isEmpty())) { - try { - ArrayList aux = new ArrayList(); - for (TSURL tsurl : c) { - aux.add(tsurl.toString()); - } - dao.abortChunksOfRequest(rt.toString(), aux); - } catch (ClassCastException e) { - log.error("REQUEST SUMMARY CATALOG! Unexpected error in " - + "abortChunksOfRequest: the supplied Collection did not contain " - + "TSURLs! Error: {}", e.getMessage(), e); - } - } - } - - /** - * Method used to abort a request that HAS been fetched for processing; abort - * is only applied to those SURLs of the request specified in the Collection; - * if the status of the request associated to the supplied request token is - * different from SRM_REQUEST_INPROGRESS, then nothing takes place; likewise - * if the supplied token does not correspond to any request, if it is null, if - * the Collection is null, or the Collection does not contain TSURLs. - */ - synchronized public void abortChunksOfInProgressRequest(TRequestToken rt, - Collection tsurls) { - - if ((rt != null) && (tsurls != null) && (!tsurls.isEmpty())) { - try { - List aux = new ArrayList(); - for (TSURL tsurl : tsurls) { - aux.add(tsurl.toString()); - } - dao.abortChunksOfInProgressRequest(rt.toString(), aux); - } catch (ClassCastException e) { - log.error("REQUEST SUMMARY CATALOG! Unexpected error in " - + "abortChunksOfInProgressRequest: the supplied Collection did not " - + "contain TSURLs! Error: {}", e.getMessage()); - } - } - } - - synchronized public RequestSummaryData find(TRequestToken requestToken) - throws IllegalArgumentException { - - if (requestToken == null || requestToken.toString().trim().isEmpty()) { - throw new IllegalArgumentException( - "Unable to perform find, illegal arguments: requestToken=" - + requestToken); - } - RequestSummaryDataTO to = dao.find(requestToken.toString()); - if (to != null) { - try { - RequestSummaryData data = makeOne(to); - if (data != null) { - log.debug("REQUEST SUMMARY CATALOG: {} associated to {} retrieved", - data.requestToken(), data.gridUser().getDn()); - return data; - } - } catch (IllegalArgumentException e) { - log.error("REQUEST SUMMARY CATALOG; Failure performing makeOne operation. " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - } else { - log.debug("REQUEST SUMMARY CATALOG: {} token not found", requestToken); - } - return null; - } - - /** - * Method that returns the TRequestType associated to the request with the - * supplied TRequestToken. If no request exists with that token, or the type - * cannot be established from the DB, or the supplied token is null, then an - * EMPTY TRequestType is returned. - */ - synchronized public TRequestType typeOf(TRequestToken rt) { - - TRequestType result = TRequestType.EMPTY; - String type = null; - if (rt != null) { - type = dao.typeOf(rt.toString()); - if (type != null && !type.isEmpty()) - result = RequestTypeConverter.getInstance().toSTORM(type); - } - return result; - } - - /** - * Method used to abort a request that HAS been fetched for processing; if the - * status of the request associated to the supplied request token tok is - * different from SRM_REQUEST_INPROGRESS, then nothing takes place; likewise - * if the supplied token does not correspond to any request, or if it is null. - */ - synchronized public void abortInProgressRequest(TRequestToken rt) { - - if (rt != null) { - dao.abortInProgressRequest(rt.toString()); - } - } - + private static final Logger log = LoggerFactory.getLogger(RequestSummaryCatalog.class); + /** Only instance of RequestSummaryCatalog for StoRM! */ + private static RequestSummaryCatalog cat = new RequestSummaryCatalog(); + /** WARNING!!! TO BE MODIFIED WITH FACTORY!!! */ + private final RequestSummaryDAO dao = RequestSummaryDAO.getInstance(); + /** timer thread that will run a task to clean */ + private Timer clock = null; + /** configuration instance * */ + private final Configuration config = Configuration.getInstance(); + + private RequestSummaryCatalog() { + + clock = new Timer(); + + clock.schedule( + new RequestsGarbageCollector(clock, config.getRequestPurgerPeriod() * 1000), + config.getRequestPurgerDelay() * 1000); + } + + /** Method that returns the only instance of RequestSummaryCatalog present in StoRM. */ + public static RequestSummaryCatalog getInstance() { + + return RequestSummaryCatalog.cat; + } + + /** + * Method in charge of retrieving RequestSummaryData associated to new requests, that is those + * found in SRM_REQUETS_QUEUED global status; such requests then transit into SRM_SUCCESS. The + * actual number of fetched requests depends on the configured ceiling. + * + *

If no new request is found, an empty Collection is returned. if a request is malformed, then + * that request is failed and an attempt is made to signal such occurrence in the DB. Only + * correctly formed requests are returned. + */ + public synchronized Collection fetchNewRequests(int capacity) { + + List list = Lists.newArrayList(); + + Collection c = dao.findNew(capacity); + if (c == null || c.isEmpty()) { + return list; + } + int fetched = c.size(); + log.debug("REQUEST SUMMARY CATALOG: {} new requests picked up.", fetched); + for (RequestSummaryDataTO auxTO : c) { + RequestSummaryData aux = null; + try { + aux = makeOne(auxTO); + } catch (IllegalArgumentException e) { + log.error( + "REQUEST SUMMARY CATALOG: Failure while performing makeOne " + + "operation. IllegalArgumentException: {}", + e.getMessage(), + e); + continue; + } + if (aux != null) { + log.debug( + "REQUEST SUMMARY CATALOG: {} associated to {} included " + "for processing", + aux.requestToken(), + aux.gridUser().getDn()); + list.add(aux); + } + } + int ret = list.size(); + if (ret < fetched) { + log.warn( + "REQUEST SUMMARY CATALOG: including {} requests for processing, " + + "since the dropped ones were malformed!", + ret); + } else { + log.debug("REQUEST SUMMARY CATALOG: including for processing all {} " + "requests.", ret); + } + if (!list.isEmpty()) { + log.debug("REQUEST SUMMARY CATALOG: returning {}\n\n", list); + } + return list; + } + + /** + * Private method used to create a RequestSummaryData object, from a RequestSummaryDataTO. If a + * chunk cannot be created, an error messagge gets logged and an attempt is made to signal in the + * DB that the request is malformed. + */ + private RequestSummaryData makeOne(RequestSummaryDataTO to) throws IllegalArgumentException { + + TRequestType auxrtype = RequestTypeConverter.getInstance().toSTORM(to.requestType()); + if (auxrtype == TRequestType.EMPTY) { + StringBuilder sb = new StringBuilder(); + sb.append("TRequestType could not be created from its String representation "); + sb.append(to.requestType()); + sb.append("\n"); + log.warn(sb.toString()); + throw new IllegalArgumentException( + "Invalid TRequestType in the provided RequestSummaryDataTO"); + } + TRequestToken auxrtoken; + try { + auxrtoken = new TRequestToken(to.requestToken(), to.timestamp()); + } catch (InvalidTRequestTokenAttributesException e) { + log.warn( + "Unable to create TRequestToken from RequestSummaryDataTO. " + + "InvalidTRequestTokenAttributesException: {}", + e.getMessage()); + throw new IllegalArgumentException( + "Unable to create TRequestToken from RequestSummaryDataTO."); + } + GridUserInterface auxgu; + + try { + auxgu = loadVomsGridUser(to.clientDN(), to.vomsAttributes()); + } catch (MalformedGridUserException e) { + StringBuilder sb = new StringBuilder(); + sb.append("VomsGridUser could not be created from DN String "); + sb.append(to.clientDN()); + sb.append(" voms attributes String "); + sb.append(to.vomsAttributes()); + sb.append(" and from request token String "); + sb.append(to.requestToken()); + log.warn("{}. MalformedGridUserException: {}", sb.toString(), e.getMessage()); + throw new IllegalArgumentException( + "Unable to load Voms Grid User from RequestSummaryDataTO. " + + "MalformedGridUserException: " + + e.getMessage()); + } + RequestSummaryData data = null; + try { + data = new RequestSummaryData(auxrtype, auxrtoken, auxgu); + data.setPrimaryKey(to.primaryKey()); + } catch (InvalidRequestSummaryDataAttributesException e) { + dao.failRequest(to.primaryKey(), "The request data is malformed!"); + log.warn( + "REQUEST SUMMARY CATALOG! Unable to create RequestSummaryData. " + + "InvalidRequestSummaryDataAttributesException: {}", + e.getMessage(), + e); + throw new IllegalArgumentException("Unable to reate RequestSummaryData"); + } + TReturnStatus status = null; + if (to.getStatus() != null) { + TStatusCode code = StatusCodeConverter.getInstance().toSTORM(to.getStatus()); + if (code == TStatusCode.EMPTY) { + log.warn( + "RequestSummaryDataTO retrieved StatusCode was not " + "recognised: {}", + to.getStatus()); + } else { + status = new TReturnStatus(code, to.getErrstring()); + } + } + data.setUserToken(to.getUserToken()); + data.setRetrytime(to.getRetrytime()); + if (to.getPinLifetime() != null) { + data.setPinLifetime( + TLifeTimeInSeconds.make( + PinLifetimeConverter.getInstance().toStoRM(to.getPinLifetime()), TimeUnit.SECONDS)); + } + data.setSpaceToken(to.getSpaceToken()); + data.setStatus(status); + data.setErrstring(to.getErrstring()); + data.setRemainingTotalTime(to.getRemainingTotalTime()); + data.setNbreqfiles(to.getNbreqfiles()); + data.setNumOfCompleted(to.getNumOfCompleted()); + if (to.getFileLifetime() != null) { + data.setFileLifetime(TLifeTimeInSeconds.make(to.getFileLifetime(), TimeUnit.SECONDS)); + } + + data.setDeferredStartTime(to.getDeferredStartTime()); + data.setNumOfWaiting(to.getNumOfWaiting()); + data.setNumOfFailed(to.getNumOfFailed()); + data.setRemainingDeferredStartTime(to.getRemainingDeferredStartTime()); + return data; + } + + /** + * Private method that holds the logic for creating a VomsGridUser from persistence and to load + * any available Proxy. For the moment the VOMS attributes present in persistence are NOT loaded! + */ + private GridUserInterface loadVomsGridUser(String dn, String fqansString) + throws MalformedGridUserException { + + log.debug("load VomsGridUser for dn='{}' and fqansString='{}'", dn, fqansString); + + if (dn == null) { + throw new MalformedGridUserException("Invalid null DN"); + } + if (fqansString == null || fqansString.isEmpty()) { + return GridUserManager.makeGridUser(dn); + } + + FQAN[] fqans = new FQAN[fqansString.split("#").length]; + int i = 0; + for (String fqan : fqansString.split("#")) { + fqans[i++] = new FQAN(fqan); + } + try { + return GridUserManager.makeVOMSGridUser(dn, fqans); + } catch (IllegalArgumentException e) { + log.error( + "Unexpected error on voms grid user creation. " + "IllegalArgumentException: {}", + e.getMessage(), + e); + throw new MalformedGridUserException(e.getMessage()); + } + } + + /** + * Method used to update the global status of a request identified by TRequestToken, to the + * supplied TReturnStatus. In case of any exception nothing happens. + */ + public synchronized void updateGlobalStatus(TRequestToken rt, TReturnStatus status) { + + dao.updateGlobalStatus( + rt.toString(), + StatusCodeConverter.getInstance().toDB(status.getStatusCode()), + status.getExplanation()); + } + + public void updateFromPreviousGlobalStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + dao.updateGlobalStatusOnMatchingGlobalStatus( + requestToken, expectedStatusCode, newStatusCode, explanation); + } + + /** + * Method used to update the global status of a request identified by TRequestToken, to the + * supplied TReturnStatus. The pin lifetime and the file lifetime are updated in order to start + * the countdown from the moment the status is updated. In case of any exception nothing happens. + */ + public synchronized void updateGlobalStatusPinFileLifetime( + TRequestToken rt, TReturnStatus status) { + + dao.updateGlobalStatusPinFileLifetime( + rt.toString(), + StatusCodeConverter.getInstance().toDB(status.getStatusCode()), + status.getExplanation()); + } + + /** + * Method used to change the global status of the supplied request to SRM_FAILURE, as well as that + * of each single chunk in the request. If the request type is not supported by the logic, only + * the global status is updated and an error log gets written warning of the unsupported business + * logic. + * + *

If the supplied RequestSummaryData is null, nothing gets done; if any DB error occurs, no + * exception gets thrown but proper messagges get logged. + */ + public synchronized void failRequest(RequestSummaryData rsd, String explanation) { + + if (rsd != null) { + TRequestType rtype = rsd.requestType(); + if (rtype == TRequestType.PREPARE_TO_GET) { + dao.failPtGRequest(rsd.primaryKey(), explanation); + } else if (rtype == TRequestType.PREPARE_TO_PUT) { + dao.failPtPRequest(rsd.primaryKey(), explanation); + } else if (rtype == TRequestType.COPY) { + dao.failCopyRequest(rsd.primaryKey(), explanation); + } else { + dao.failRequest(rsd.primaryKey(), explanation); + } + } + } + + /** + * Method used to abort a request that has not yet been fetched for processing; if the status of + * the request associated to the supplied request token tok is different from SRM_REQUEST_QUEUED, + * then nothing takes place; likewise if the supplied token does not correspond to any request, or + * if it is null. + */ + public synchronized void abortRequest(TRequestToken rt) { + + if (rt != null) { + dao.abortRequest(rt.toString()); + } + } + + /** + * Method used to abort a request that has not yet been fetched for processing; abort is only + * applied to those SURLs of the request specified in the Collection; if the status of the request + * associated to the supplied request token is different from SRM_REQUEST_QUEUED, then nothing + * takes place; likewise if the supplied token does not correspond to any request, if it is null, + * if the Collection is null, or the Collection does not contain TSURLs. + */ + public synchronized void abortChunksOfRequest(TRequestToken rt, Collection c) { + + if ((rt != null) && (c != null) && (!c.isEmpty())) { + try { + ArrayList aux = new ArrayList(); + for (TSURL tsurl : c) { + aux.add(tsurl.toString()); + } + dao.abortChunksOfRequest(rt.toString(), aux); + } catch (ClassCastException e) { + log.error( + "REQUEST SUMMARY CATALOG! Unexpected error in " + + "abortChunksOfRequest: the supplied Collection did not contain " + + "TSURLs! Error: {}", + e.getMessage(), + e); + } + } + } + + /** + * Method used to abort a request that HAS been fetched for processing; abort is only applied to + * those SURLs of the request specified in the Collection; if the status of the request associated + * to the supplied request token is different from SRM_REQUEST_INPROGRESS, then nothing takes + * place; likewise if the supplied token does not correspond to any request, if it is null, if the + * Collection is null, or the Collection does not contain TSURLs. + */ + public synchronized void abortChunksOfInProgressRequest( + TRequestToken rt, Collection tsurls) { + + if ((rt != null) && (tsurls != null) && (!tsurls.isEmpty())) { + try { + List aux = new ArrayList(); + for (TSURL tsurl : tsurls) { + aux.add(tsurl.toString()); + } + dao.abortChunksOfInProgressRequest(rt.toString(), aux); + } catch (ClassCastException e) { + log.error( + "REQUEST SUMMARY CATALOG! Unexpected error in " + + "abortChunksOfInProgressRequest: the supplied Collection did not " + + "contain TSURLs! Error: {}", + e.getMessage()); + } + } + } + + public synchronized RequestSummaryData find(TRequestToken requestToken) + throws IllegalArgumentException { + + if (requestToken == null || requestToken.toString().trim().isEmpty()) { + throw new IllegalArgumentException( + "Unable to perform find, illegal arguments: requestToken=" + requestToken); + } + RequestSummaryDataTO to = dao.find(requestToken.toString()); + if (to != null) { + try { + RequestSummaryData data = makeOne(to); + if (data != null) { + log.debug( + "REQUEST SUMMARY CATALOG: {} associated to {} retrieved", + data.requestToken(), + data.gridUser().getDn()); + return data; + } + } catch (IllegalArgumentException e) { + log.error( + "REQUEST SUMMARY CATALOG; Failure performing makeOne operation. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + } else { + log.debug("REQUEST SUMMARY CATALOG: {} token not found", requestToken); + } + return null; + } + + /** + * Method that returns the TRequestType associated to the request with the supplied TRequestToken. + * If no request exists with that token, or the type cannot be established from the DB, or the + * supplied token is null, then an EMPTY TRequestType is returned. + */ + public synchronized TRequestType typeOf(TRequestToken rt) { + + TRequestType result = TRequestType.EMPTY; + String type = null; + if (rt != null) { + type = dao.typeOf(rt.toString()); + if (type != null && !type.isEmpty()) + result = RequestTypeConverter.getInstance().toSTORM(type); + } + return result; + } + + /** + * Method used to abort a request that HAS been fetched for processing; if the status of the + * request associated to the supplied request token tok is different from SRM_REQUEST_INPROGRESS, + * then nothing takes place; likewise if the supplied token does not correspond to any request, or + * if it is null. + */ + public synchronized void abortInProgressRequest(TRequestToken rt) { + + if (rt != null) { + dao.abortInProgressRequest(rt.toString()); + } + } } diff --git a/src/main/java/it/grid/storm/catalogs/RequestSummaryDAO.java b/src/main/java/it/grid/storm/catalogs/RequestSummaryDAO.java index ad52e5f7..618c4c12 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestSummaryDAO.java +++ b/src/main/java/it/grid/storm/catalogs/RequestSummaryDAO.java @@ -1,16 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import com.google.common.collect.Lists; - import it.grid.storm.config.Configuration; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TRequestType; import it.grid.storm.srm.types.TStatusCode; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -24,1354 +21,1366 @@ import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * DAO class for RequestSummaryCatalog. This DAO is specifically designed to - * connect to a MySQL DB. - * + * DAO class for RequestSummaryCatalog. This DAO is specifically designed to connect to a MySQL DB. + * * @author EGRID ICTP * @version 3.0 * @date May 2005 */ public class RequestSummaryDAO { - private static final Logger log = LoggerFactory - .getLogger(RequestSummaryDAO.class); - - /** String with the name of the class for the DB driver */ - private final String driver = Configuration.getInstance().getDBDriver(); - /** String referring to the URL of the DB */ - private final String url = Configuration.getInstance().getStormDbURL(); - /** String with the password for the DB */ - private final String password = Configuration.getInstance().getDBPassword(); - /** String with the name for the DB */ - private final String name = Configuration.getInstance().getDBUserName(); - /** maximum number of requests that will be retrieved */ - private int limit; - /** Connection to DB - WARNING!!! It is kept open all the time! */ - private Connection con = null; - - /** milliseconds that must pass before reconnecting to DB */ - private final long period = Configuration.getInstance() - .getDBReconnectPeriod() * 1000; - /** initial delay in milliseconds before starting timer */ - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - /** timer thread that will run a task to alert when reconnecting is necessary! */ - private Timer clock = null; - /** - * timer task that will update the boolean signaling that a reconnection is - * needed! - */ - private TimerTask clockTask = null; - /** boolean that tells whether reconnection is needed because of MySQL bug! */ - private boolean reconnect = false; - - private static final RequestSummaryDAO dao = new RequestSummaryDAO(); - - private RequestSummaryDAO() { - - int aux = Configuration.getInstance().getPickingMaxBatchSize(); - if (aux > 1) { - limit = aux; - } else { - limit = 1; - } - setUpConnection(); - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of the RequestSummaryDAO. - */ - public static RequestSummaryDAO getInstance() { - - return dao; - } - - /** - * Method that retrieves requests in the SRM_REQUEST_QUEUED status: retrieved - * requests are limited to the number specified by the Configuration method - * getPicker2MaxBatchSize. All retrieved requests get their global status - * transited to SRM_REQUEST_INPROGRESS. A Collection of RequestSummaryDataTO - * is returned: if none are found, an empty collection is returned. - */ - public Collection findNew(int freeSlot) { - - PreparedStatement stmt = null; - ResultSet rs = null; - List list = Lists.newArrayList(); - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - findNew: unable to get a valid connection!"); - return list; - } - // RequestSummaryDataTO - try { - // start transaction - con.setAutoCommit(false); - - int howMuch = -1; - if (freeSlot > limit) { - howMuch = limit; - } else { - howMuch = freeSlot; - } - - String query = "SELECT ID, config_RequestTypeID, r_token, timeStamp, " - + "client_dn, proxy FROM request_queue WHERE status=? LIMIT ?"; - - // get id, request type, request token and client_DN of newly added - // requests, which must be in SRM_REQUEST_QUEUED state - stmt = con.prepareStatement(query); - logWarnings(con.getWarnings()); - - stmt.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); - stmt.setInt(2, howMuch); - - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - - List rowids = new ArrayList(); // arraylist with selected ids - RequestSummaryDataTO aux = null; // RequestSummaryDataTO made from - // retrieved row - long auxid; // primary key of retrieved row - while (rs.next()) { - auxid = rs.getLong("ID"); - rowids.add(Long.valueOf(auxid)); - aux = new RequestSummaryDataTO(); - aux.setPrimaryKey(auxid); - aux.setRequestType(rs.getString("config_RequestTypeID")); - aux.setRequestToken(rs.getString("r_token")); - aux.setClientDN(rs.getString("client_dn")); - aux.setTimestamp(rs.getTimestamp("timeStamp")); - - /** - * This code is only for the 1.3.18. This is a workaround to get FQANs - * using the proxy field on request_queue. The FE use the proxy field of - * request_queue to insert a single FQAN string containing all FQAN - * separated by the "#" char. The proxy is a BLOB, hence it has to be - * properly converted in string. - */ - java.sql.Blob blob = rs.getBlob("proxy"); - if (blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - aux.setVomsAttributes(new String(bdata)); - } - - list.add(aux); - } - close(rs); - close(stmt); - - // transit state from SRM_REQUEST_QUEUED to SRM_REQUEST_INPROGRESS - if (!list.isEmpty()) { - logWarnings(con.getWarnings()); - String where = makeWhereString(rowids); - String update = "UPDATE request_queue SET status=" - + StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_REQUEST_INPROGRESS) + ", errstring=?" - + " WHERE ID IN " + where; - stmt = con.prepareStatement(update); - logWarnings(stmt.getWarnings()); - stmt.setString(1, "Request handled!"); - logWarnings(stmt.getWarnings()); - log.trace("REQUEST SUMMARY DAO - findNew: executing {}", stmt); - stmt.executeUpdate(); - close(stmt); - } - - // commit and finish transaction - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - findNew: Unable to complete picking. " - + "Error: {}. Rolling back!", e.getMessage(), e); - } finally { - close(rs); - close(stmt); - } - // return collection of requests - if (!list.isEmpty()) { - log.debug("REQUEST SUMMARY DAO - findNew: returning {}", list); - } - return list; - } - - /** - * Method used to signal in the DB that a request failed: the status of the - * request identified by the primary key index is transited to SRM_FAILURE, - * with the supplied explanation String. The supplied index is the primary key - * of the global request. In case of any error, nothing gets done and no - * exception is thrown, but proper error messages get logged. - */ - public void failRequest(long index, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - failRequest: unable to get a valid connection!"); - return; - } - String signalSQL = "UPDATE request_queue r " + "SET r.status=" - + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) - + ", r.errstring=? " + "WHERE r.ID=?"; - PreparedStatement signal = null; - try { - signal = con.prepareStatement(signalSQL); - logWarnings(con.getWarnings()); - signal.setString(1, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(signal.getWarnings()); - signal.setLong(2, index); - logWarnings(signal.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failRequest executing: {}", signal); - signal.executeUpdate(); - logWarnings(signal.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO! Unable to transit request identified by " - + "ID {} to SRM_FAILURE! Error: {}", index, e.getMessage(), e); - } finally { - close(signal); - } - } - - /** - * Method used to signal in the DB that a PtGRequest failed. The global status - * transits to SRM_FAILURE, as well as that of each chunk associated to the - * request. The supplied explanation string is used both for the global status - * as well as for each individual chunk. The supplied index is the primary key - * of the global request. In case of any error, nothing gets done and no - * exception is thrown, but proper error messages get logged. - */ - public void failPtGRequest(long index, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - failPtGRequest: unable to get a valid connection!"); - return; - } - String requestSQL = "UPDATE request_queue r " - + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; - String chunkSQL = "UPDATE " - + "status_Get s JOIN (request_queue r, request_Get g) ON s.request_GetID=g.ID AND g.request_queueID=r.ID " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=?"; - PreparedStatement request = null; - PreparedStatement chunk = null; - int failCode = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_FAILURE); - try { - // start transaction - con.setAutoCommit(false); - - // update global status - request = con.prepareStatement(requestSQL); - logWarnings(con.getWarnings()); - request.setInt(1, failCode); - logWarnings(request.getWarnings()); - request.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(request.getWarnings()); - request.setLong(3, index); - logWarnings(request.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failPtGRequest executing: {}", request); - request.executeUpdate(); - logWarnings(request.getWarnings()); - - // update each chunk status - chunk = con.prepareStatement(chunkSQL); - logWarnings(con.getWarnings()); - chunk.setInt(1, failCode); - logWarnings(chunk.getWarnings()); - chunk.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(chunk.getWarnings()); - chunk.setLong(3, index); - logWarnings(chunk.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failPtGRequest executing: {}", chunk); - chunk.executeUpdate(); - logWarnings(chunk.getWarnings()); - - // commit and finish transaction - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO! Unable to transit PtG request identified " - + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", index, - e.getMessage(), e); - rollback(con); - } finally { - close(request); - close(chunk); - } - } - - /** - * Method used to signal in the DB that a PtPRequest failed. The global status - * transits to SRM_FAILURE, as well as that of each chunk associated to the - * request. The supplied explanation string is used both for the global status - * as well as for each individual chunk. The supplied index is the primary key - * of the global request. In case of any error, nothing gets done and no - * exception is thrown, but proper error messagges get logged. - */ - public void failPtPRequest(long index, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - failPtPRequest: unable to get a valid connection!"); - return; - } - String requestSQL = "UPDATE request_queue r " - + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; - String chunkSQL = "UPDATE " - + "status_Put s JOIN (request_queue r, request_Put p) ON s.request_PutID=p.ID AND p.request_queueID=r.ID " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=?"; - PreparedStatement request = null; - PreparedStatement chunk = null; - int failCode = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_FAILURE); - try { - // start transaction - con.setAutoCommit(false); - - // update global status - request = con.prepareStatement(requestSQL); - logWarnings(con.getWarnings()); - request.setInt(1, failCode); - logWarnings(request.getWarnings()); - request.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(request.getWarnings()); - request.setLong(3, index); - logWarnings(request.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failPtPRequest executing: {}", request); - request.executeUpdate(); - logWarnings(request.getWarnings()); - - // update each chunk status - chunk = con.prepareStatement(chunkSQL); - logWarnings(con.getWarnings()); - chunk.setInt(1, failCode); - logWarnings(chunk.getWarnings()); - chunk.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(chunk.getWarnings()); - chunk.setLong(3, index); - logWarnings(chunk.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failPtPRequest executing: {}", chunk); - chunk.executeUpdate(); - logWarnings(chunk.getWarnings()); - - // commit and finish transaction - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO! Unable to transit PtP request identified " - + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", index, - e.getMessage(), e); - rollback(con); - } finally { - close(request); - close(chunk); - } - } - - /** - * Method used to signal in the DB that a CopyRequest failed. The global - * status transits to SRM_FAILURE, as well as that of each chunk associated to - * the request. The supplied explanation string is used both for the global - * status as well as for each individual chunk. The supplied index is the - * primary key of the global request. In case of any error, nothing gets done - * and no exception is thrown, but proper error messagges get logged. - */ - public void failCopyRequest(long index, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - failCopyRequest: unable to get a valid connection!"); - return; - } - String requestSQL = "UPDATE request_queue r " - + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; - String chunkSQL = "UPDATE " - + "status_Copy s JOIN (request_queue r, request_Copy c) ON s.request_CopyID=c.ID AND c.request_queueID=r.ID " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=?"; - PreparedStatement request = null; - PreparedStatement chunk = null; - int failCode = StatusCodeConverter.getInstance().toDB( - TStatusCode.SRM_FAILURE); - try { - // start transaction - con.setAutoCommit(false); - - // update global status - request = con.prepareStatement(requestSQL); - logWarnings(con.getWarnings()); - request.setInt(1, failCode); - logWarnings(request.getWarnings()); - request.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(request.getWarnings()); - request.setLong(3, index); - logWarnings(request.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failCopyRequest executing: {}", request); - request.executeUpdate(); - logWarnings(request.getWarnings()); - - // update each chunk status - chunk = con.prepareStatement(chunkSQL); - logWarnings(con.getWarnings()); - chunk.setInt(1, failCode); - logWarnings(chunk.getWarnings()); - chunk.setString(2, explanation); // Prepared statement spares - // DB-specific String notation! - logWarnings(chunk.getWarnings()); - chunk.setLong(3, index); - logWarnings(chunk.getWarnings()); - log.trace("REQUEST SUMMARY DAO! failCopyRequest executing: {}", chunk); - chunk.executeUpdate(); - logWarnings(chunk.getWarnings()); - - // commit and finish transaction - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO! Unable to transit Copy request identified " - + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", index, - e.getMessage(), e); - rollback(con); - } finally { - close(request); - close(chunk); - } - } - - /** - * Method used to update the global status of the request identified by the - * RequestToken rt. It gets updated the supplied status, with the supplied - * explanation String. If the supplied request token does not exist, nothing - * happens. - */ - public void updateGlobalStatus(String rt, int status, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - updateGlobalStatus: unable to get a valid connection!"); - return; - } - PreparedStatement update = null; - try { - update = con - .prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE r_token=?"); - logWarnings(con.getWarnings()); - update.setInt(1, status); - logWarnings(update.getWarnings()); - update.setString(2, explanation); - logWarnings(update.getWarnings()); - update.setString(3, rt); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - updateGlobalStatus: executing {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); - } finally { - close(update); - } - } - - public void updateGlobalStatusOnMatchingGlobalStatus( - TRequestToken requestToken, TStatusCode expectedStatusCode, - TStatusCode newStatusCode, String explanation) { - - if (!checkConnection()) { - log - .error("REQUEST SUMMARY DAO - updateGlobalStatusOnMatchingGlobalStatus: " - + "unable to get a valid connection!"); - return; - } - PreparedStatement update = null; - try { - update = con - .prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE r_token=? AND status=?"); - logWarnings(con.getWarnings()); - update.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); - logWarnings(update.getWarnings()); - update.setString(2, explanation); - logWarnings(update.getWarnings()); - update.setString(3, requestToken.toString()); - logWarnings(update.getWarnings()); - update.setInt(4, - StatusCodeConverter.getInstance().toDB(expectedStatusCode)); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - updateGlobalStatusOnMatchingGlobalStatus: " - + "executing {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); - } finally { - close(update); - } - } - - /** - * Method used to update the global status of the request identified by the - * RequestToken rt. It gets updated the supplied status, with the supplied - * explanation String and pin and file lifetimes are updated in order to start - * the countdown from now. If the supplied request token does not exist, - * nothing happens. - */ - public void updateGlobalStatusPinFileLifetime(String rt, int status, - String explanation) { - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - updateGlobalStatusPinFileLifetime: " - + "unable to get a valid connection!"); - return; - } - PreparedStatement update = null; - - String query = "UPDATE request_queue SET status=?, errstring=?, " - + "pinLifetime=pinLifetime+(UNIX_TIMESTAMP()-UNIX_TIMESTAMP(timeStamp)) " - + "WHERE r_token=?"; - - try { - update = con.prepareStatement(query); - logWarnings(con.getWarnings()); - - update.setInt(1, status); - logWarnings(update.getWarnings()); - - update.setString(2, explanation); - logWarnings(update.getWarnings()); - - update.setString(3, rt); - logWarnings(update.getWarnings()); - - log.trace("REQUEST SUMMARY DAO - updateGlobalStatus: executing {}", update); - - update.executeUpdate(); - logWarnings(update.getWarnings()); - - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); - } finally { - close(update); - } - } - - /** - * Method used to transit the status of a request that is in - * SRM_REQUEST_QUEUED state, to SRM_ABORTED. All files associated with the - * request will also get their status changed to SRM_ABORTED. If the supplied - * token is null, or not found, or not in the SRM_REQUEST_QUEUED state, then - * nothing happens. - */ - public void abortRequest(String rt) { - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - abortRequest: unable to get a valid connection!"); - return; - } - PreparedStatement update = null; - PreparedStatement query = null; - ResultSet rs = null; - try { - query = con - .prepareStatement("SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - logWarnings(query.getWarnings()); - query.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); - logWarnings(query.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortRequest - {}", query); - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (rs.next()) { - long id = rs.getLong("ID"); - String type = rs.getString("config_RequestTypeID"); - update = con - .prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE ID=?"); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortRequest - {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - close(update); - // update single chunk file statuses - TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); - String statusTable = null; - String requestTable = null; - String joinColumn = null; - if (rtyp != TRequestType.EMPTY) { - if (rtyp == TRequestType.PREPARE_TO_GET) { - statusTable = "status_Get"; - requestTable = "request_Get"; - joinColumn = "request_GetID"; - } else if (rtyp == TRequestType.PREPARE_TO_PUT) { - requestTable = "request_Put"; - statusTable = "status_Put"; - joinColumn = "request_PutID"; - } else if (rtyp == TRequestType.COPY) { - requestTable = "request_Copy"; - statusTable = "status_Copy"; - joinColumn = "request_CopyID"; - } else { - requestTable = "request_BoL"; - statusTable = "status_BoL"; - joinColumn = "request_BoLID"; - } - String auxstr = "UPDATE " + statusTable - + " s JOIN (request_queue r, " + requestTable + " t) ON (s." - + joinColumn + "=t.ID AND t.request_queueID=r.ID) " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=?"; - update = con.prepareStatement(auxstr); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortRequest - {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } else { - log.error("REQUEST SUMMARY DAO - Unable to complete abortRequest: " - + "could not update file statuses because the request type could " - + "not be translated from the DB!"); - } - } - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - abortRequest: {}", e.getMessage(), e); - } finally { - close(rs); - close(query); - close(update); - } - } - - /** - * Method used to transit the status of a request that is in - * SRM_REQUEST_INPROGRESS state, to SRM_ABORTED. All files associated with the - * request will also get their status changed to SRM_ABORTED. If the supplied - * token is null, or not found, or not in the SRM_REQUEST_INPROGRESS state, - * then nothing happens. - */ - public void abortInProgressRequest(String rt) { - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - abortInProgressRequest: unable to get " - + "a valid connection!"); - return; - } - PreparedStatement update = null; - PreparedStatement query = null; - ResultSet rs = null; - try { - query = con - .prepareStatement("SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - logWarnings(query.getWarnings()); - query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); - logWarnings(query.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", query); - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (rs.next()) { - // token found... - // get ID - long id = rs.getLong("ID"); - String type = rs.getString("config_RequestTypeID"); - // update global request status - update = con - .prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE ID=?"); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - close(update); - // update single chunk file statuses - TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); - String statusTable = null; - String requestTable = null; - String joinColumn = null; - if (rtyp != TRequestType.EMPTY) { - if (rtyp == TRequestType.PREPARE_TO_GET) { - requestTable = "request_Get"; - statusTable = "status_Get"; - joinColumn = "request_GetID"; - } else if (rtyp == TRequestType.PREPARE_TO_PUT) { - requestTable = "request_Put"; - statusTable = "status_Put"; - joinColumn = "request_PutID"; - } else if (rtyp == TRequestType.COPY) { - requestTable = "request_Copy"; - statusTable = "status_Copy"; - joinColumn = "request_CopyID"; - } else { - requestTable = "request_BoL"; - statusTable = "status_BoL"; - joinColumn = "request_BoLID"; - } - String auxstr = "UPDATE " + statusTable - + " s JOIN (request_queue r, " + requestTable + " t ON s." - + joinColumn + "=t.ID AND t.request_queueID=r.ID )" - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=?"; - update = con.prepareStatement(auxstr); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } else { - log.error("REQUEST SUMMARY DAO - Unable to complete " - + "abortInProgressRequest: could not update file statuses because " - + "the request type could not be translated from the DB!"); - } - } - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - abortInProgressRequest: {}", - e.getMessage(), e); - } finally { - close(rs); - close(query); - close(update); - } - } - - /** - * Method used to transit the status of chunks of a request that is in - * SRM_REQUEST_QUEUED state, to SRM_ABORTED. If the supplied token is null, or - * not found, or not in the SRM_REQUEST_QUEUED state, then nothing happens. - */ - public void abortChunksOfRequest(String rt, Collection surls) { - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - abortChunksOfRequest: unable to get a valid connection!"); - return; - } - PreparedStatement update = null; - PreparedStatement query = null; - ResultSet rs = null; - try { - query = con - .prepareStatement("SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - logWarnings(query.getWarnings()); - query.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); - logWarnings(query.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortChunksOfRequest - {}", query); - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (rs.next()) { - long id = rs.getLong("ID"); - String type = rs.getString("config_RequestTypeID"); - // update single chunk file statuses - TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); - String statusTable = null; - String requestTable = null; - String joinColumn = null; - String surlColumn = null; - if (rtyp != TRequestType.EMPTY) { - if (rtyp == TRequestType.PREPARE_TO_GET) { - requestTable = "request_Get"; - statusTable = "status_Get"; - joinColumn = "request_GetID"; - surlColumn = "sourceSURL"; - } else if (rtyp == TRequestType.PREPARE_TO_PUT) { - requestTable = "request_Put"; - statusTable = "status_Put"; - joinColumn = "request_PutID"; - surlColumn = "targetSURL"; - } else if (rtyp == TRequestType.COPY) { - requestTable = "request_Copy"; - statusTable = "status_Copy"; - joinColumn = "request_CopyID"; - surlColumn = "targetSURL"; - } else { - requestTable = "request_BoL"; - statusTable = "status_BoL"; - joinColumn = "request_BoLID"; - surlColumn = "sourceSURL"; - } - String auxstr = "UPDATE " + statusTable - + " s JOIN (request_queue r, " + requestTable + " t ON s." - + joinColumn + "=t.ID AND t.request_queueID=r.ID " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=? AND " - + surlColumn + " IN " + makeInString(surls); - update = con.prepareStatement(auxstr); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortChunksOfRequest - {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } else { - log.error("REQUEST SUMMARY DAO - Unable to complete " - + "abortChunksOfRequest: could not update file statuses because " - + "the request type could not be translated from the DB!"); - } - } - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - abortChunksOfRequest: {}", - e.getMessage(), e); - } finally { - close(rs); - close(query); - close(update); - } - } - - /** - * Method used to transit the status of chunks of a request that is in - * SRM_REQUEST_INPROGRESS state, to SRM_ABORTED. If the supplied token is - * null, or not found, or not in the SRM_REQUEST_INPROGRESS state, then - * nothing happens. - */ - public void abortChunksOfInProgressRequest(String rt, Collection surls) { - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest: unable " - + "to get a valid connection!"); - return; - } - PreparedStatement update = null; - PreparedStatement query = null; - ResultSet rs = null; - try { - query = con - .prepareStatement("SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - logWarnings(query.getWarnings()); - query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); - logWarnings(query.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest - {}", query); - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (rs.next()) { - long id = rs.getLong("ID"); - String type = rs.getString("config_RequestTypeID"); - // update single chunk file statuses - TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); - String statusTable = null; - String requestTable = null; - String joinColumn = null; - String surlColumn = null; - if (rtyp != TRequestType.EMPTY) { - if (rtyp == TRequestType.PREPARE_TO_GET) { - requestTable = "request_Get"; - statusTable = "status_Get"; - joinColumn = "request_GetID"; - surlColumn = "sourceSURL"; - } else if (rtyp == TRequestType.PREPARE_TO_PUT) { - requestTable = "request_Put"; - statusTable = "status_Put"; - joinColumn = "request_PutID"; - surlColumn = "targetSURL"; - } else if (rtyp == TRequestType.COPY) { - requestTable = "request_Copy"; - statusTable = "status_Copy"; - joinColumn = "request_CopyID"; - surlColumn = "targetSURL"; - } else { - requestTable = "request_BoL"; - statusTable = "status_BoL"; - joinColumn = "request_BoLID"; - surlColumn = "sourceSURL"; - } - String auxstr = "UPDATE " + statusTable - + " s JOIN (request_queue r, " + requestTable + " t ON s." - + joinColumn + "=t.ID AND t.request_queueID=r.ID " - + "SET s.statusCode=?, s.explanation=? " + "WHERE r.ID=? AND " - + surlColumn + " IN " + makeInString(surls); - update = con.prepareStatement(auxstr); - logWarnings(con.getWarnings()); - update.setInt(1, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); - logWarnings(update.getWarnings()); - update.setString(2, "User aborted request!"); - logWarnings(update.getWarnings()); - update.setLong(3, id); - logWarnings(update.getWarnings()); - log.trace("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest " - + "- {}", update); - update.executeUpdate(); - logWarnings(update.getWarnings()); - } else { - log.error("REQUEST SUMMARY DAO - Unable to complete " - + "abortChunksOfInProgressRequest: could not update file statuses " - + "because the request type could not be translated from the DB!"); - } - } - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest: {}", - e.getMessage(), e); - } finally { - close(rs); - close(query); - close(update); - } - } - - /** - * Private method that returns a String of all SURLS in the collection of - * String. - */ - private String makeInString(Collection c) { - - StringBuilder sb = new StringBuilder("("); - for (Iterator i = c.iterator(); i.hasNext();) { - sb.append(i.next()); - if (i.hasNext()) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns the config_RequestTypeID field present in request_queue - * table, for the request with the specified request token rt. In case of any - * error, the empty String "" is returned. - */ - public String typeOf(String rt) { - - PreparedStatement query = null; - ResultSet rs = null; - String result = ""; - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - typeOf: unable to get a valid connection!"); - return result; - } - try { - query = con - .prepareStatement("SELECT config_RequestTypeID from request_queue WHERE r_token=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - logWarnings(query.getWarnings()); - log.trace("REQUEST SUMMARY DAO - typeOf - {}", query); - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (rs.next()) { - result = rs.getString("config_RequestTypeID"); - } - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - typeOf - {}", e.getMessage(), e); - } finally { - close(rs); - close(query); - } - return result; - } - - /** - * Method that returns the config_RequestTypeID field present in request_queue - * table, for the request with the specified request token rt. In case of any - * error, the empty String "" is returned. - */ - public RequestSummaryDataTO find(String rt) { - - PreparedStatement query = null; - ResultSet rs = null; - RequestSummaryDataTO to = null; - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - find: unable to get a valid connection!"); - return null; - } - try { - query = con - .prepareStatement("SELECT * from request_queue WHERE r_token=?"); - logWarnings(con.getWarnings()); - query.setString(1, rt); - con.setAutoCommit(false); - - rs = query.executeQuery(); - logWarnings(query.getWarnings()); - if (!rs.first()) { - log.debug("No requests found with token {}", rt); - return null; - } - to = new RequestSummaryDataTO(); - to.setPrimaryKey(rs.getLong("ID")); - to.setRequestType(rs.getString("config_RequestTypeID")); - to.setClientDN(rs.getString("client_dn")); - to.setUserToken(rs.getString("u_token")); - to.setRetrytime(rs.getInt("retrytime")); - to.setPinLifetime(rs.getInt("pinLifetime")); - to.setSpaceToken(rs.getString("s_token")); - to.setStatus(rs.getInt("status")); - to.setErrstring(rs.getString("errstring")); - to.setRequestToken(rs.getString("r_token")); - to.setRemainingTotalTime(rs.getInt("remainingTotalTime")); - to.setFileLifetime(rs.getInt("fileLifetime")); - to.setNbreqfiles(rs.getInt("nbreqfiles")); - to.setNumOfCompleted(rs.getInt("numOfCompleted")); - to.setNumOfWaiting(rs.getInt("numOfWaiting")); - to.setNumOfFailed(rs.getInt("numOfFailed")); - to.setTimestamp(rs.getTimestamp("timeStamp")); - - - java.sql.Blob blob = rs.getBlob("proxy"); - if (blob != null) { - byte[] bdata = blob.getBytes(1, (int) blob.length()); - to.setVomsAttributes(new String(bdata)); - } - to.setDeferredStartTime(rs.getInt("deferredStartTime")); - to.setRemainingDeferredStartTime(rs.getInt("remainingDeferredStartTime")); - - if (rs.next()) { - log.warn("More than a row matches token {}", rt); - } - close(rs); - close(query); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - find - {}", e.getMessage(), e); - } finally { - close(rs); - close(query); - } - return to; - } - - /** - * Method that purges expired requests: it only removes up to a fixed value of - * expired requests at a time. The value is configured and obtained from the - * configuration property getPurgeBatchSize. A List of Strings with the - * request tokens removed is returned. In order to completely remove all - * expired requests, simply keep invoking this method until an empty List is - * returned. This batch processing is needed because there could be millions - * of expired requests which are likely to result in out-of-memory problems. - * Notice that in case of errors only error messages get logged. An empty List - * is also returned. - */ - public List purgeExpiredRequests(long expiredRequestTime, int purgeSize) { - - PreparedStatement ps = null; - ResultSet rs = null; - List requestTokens = Lists.newArrayList(); - List ids = Lists.newArrayList(); - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - purgeExpiredRequests: unable to get a " - + "valid connection!"); - return requestTokens; - } - - try { - // start transaction - con.setAutoCommit(false); - String stmt = "SELECT ID, r_token FROM request_queue WHERE UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(timeStamp) > ? AND status <> ? AND status <> ? LIMIT ?"; - ps = con.prepareStatement(stmt); - ps.setLong(1, expiredRequestTime); - ps.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); - ps.setInt(3, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); - ps.setInt(4, purgeSize); - logWarnings(con.getWarnings()); - log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", ps); - - rs = ps.executeQuery(); - logWarnings(ps.getWarnings()); - - while (rs.next()) { - requestTokens.add(rs.getString("r_token")); - ids.add(new Long(rs.getLong("ID"))); - } - - close(rs); - close(ps); - - if (!ids.isEmpty()) { - // REMOVE BATCH OF EXPIRED REQUESTS! - stmt = "DELETE FROM request_queue WHERE ID in " + makeWhereString(ids); - - ps = con.prepareStatement(stmt); - logWarnings(con.getWarnings()); - log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", stmt); - - int deleted = ps.executeUpdate(); - logWarnings(ps.getWarnings()); - if (deleted > 0) { - log.info("REQUEST SUMMARY DAO - purgeExpiredRequests - Deleted {} " - + "expired requests.", deleted); - } else { - log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - No deleted " - + "expired requests."); - } - - close(ps); - - stmt = "DELETE request_DirOption FROM request_DirOption " - + " LEFT JOIN request_Get ON request_DirOption.ID = request_Get.request_DirOptionID" - + " LEFT JOIN request_BoL ON request_DirOption.ID = request_BoL.request_DirOptionID " - + " LEFT JOIN request_Copy ON request_DirOption.ID = request_Copy.request_DirOptionID" - + " WHERE request_Copy.request_DirOptionID IS NULL AND" - + " request_Get.request_DirOptionID IS NULL AND" - + " request_BoL.request_DirOptionID IS NULL;"; - - ps = con.prepareStatement(stmt); - logWarnings(con.getWarnings()); - log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", stmt); - deleted = ps.executeUpdate(); - logWarnings(ps.getWarnings()); - - if (deleted > 0) { - log.info("REQUEST SUMMARY DAO - purgeExpiredRequests - Deleted {} " - + "DirOption related to expired requests.", deleted); - } else { - log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - No Deleted " - + "DirOption related to expired requests."); - } - close(ps); - - } - // commit and finish transaction - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); - logWarnings(con.getWarnings()); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - purgeExpiredRequests - Rolling back " - + "because of error: {}", e.getMessage(), e); - rollback(con); - } finally { - close(rs); - close(ps); - } - return requestTokens; - } - - /** - * Retrieve the total number of expired requests. - * - * @return - */ - public int getNumberExpired() { - - int rowCount = 0; - - if (!checkConnection()) { - log.error("REQUEST SUMMARY DAO - getNumberExpired: unable to get a " - + "valid connection!"); - return 0; - } - - PreparedStatement ps = null; - ResultSet rs = null; - - try { - // start transaction - con.setAutoCommit(false); - - String stmt = "SELECT count(*) FROM request_queue WHERE UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(timeStamp) > ? AND status <> ? AND status <> ? "; - ps = con.prepareStatement(stmt); - ps.setLong(1, Configuration.getInstance().getExpiredRequestTime()); - ps.setInt(2, - StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); - ps.setInt(3, StatusCodeConverter.getInstance() - .toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); - - logWarnings(con.getWarnings()); - log.trace("REQUEST SUMMARY DAO - Number of expired requests: {}", ps); - rs = ps.executeQuery(); - logWarnings(ps.getWarnings()); - - // Get the number of rows from the result set - rs.next(); - rowCount = rs.getInt(1); - log.debug("Nr of expired requests is: {}", rowCount); - - close(rs); - close(ps); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO - purgeExpiredRequests - Rolling back " - + "because of error: {}", e.getMessage(), e); - rollback(con); - } finally { - close(rs); - close(ps); - } - - return rowCount; - - } - - /** - * Private method that returns a String of all IDs retrieved by the last - * SELECT. - */ - private String makeWhereString(List rowids) { - - StringBuilder sb = new StringBuilder("("); - for (Iterator i = rowids.iterator(); i.hasNext();) { - sb.append(i.next()); - if (i.hasNext()) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Auxiliary method that sets up the connection to the DB, as well as the - * prepared statement. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - logWarnings(con.getWarnings()); - response = con.isValid(0); - } catch (ClassNotFoundException | SQLException e) { - log.error("REQUEST SUMMARY DAO! Exception in setUpConnection! {}", e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that tales down a connection to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - log.error("REQUEST SUMMARY DAO! Exception in takeDownConnection " - + "method: {}", e.getMessage(), e); - } - } - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private boolean checkConnection() { - - boolean response = true; - if (reconnect) { - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method used to close a Statement - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("REQUEST SUMMARY DAO! Unable to close Statement {} - " - + "Error: {}", stmt.toString(), e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to close a ResultSet - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("REQUEST SUMMARY DAO! Unable to close ResultSet! Error: {}", - e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to roll back a transaction - */ - private void rollback(Connection con) { - - if (con != null) { - try { - con.rollback(); - logWarnings(con.getWarnings()); - log.error("PICKER2: roll back successful!"); - } catch (SQLException e2) { - log.error("PICKER2: roll back failed! {}", e2.getMessage(), e2); - } - } - } - - /** - * Private auxiliary method used to log SQLWarnings. - */ - private void logWarnings(SQLWarning warning) { - - if (warning != null) { - log.debug("REQUEST SUMMARY DAO: {}", warning); - while ((warning = warning.getNextWarning()) != null) { - log.debug("REQUEST SUMMARY DAO: {}", warning); - } - } - } - + private static final Logger log = LoggerFactory.getLogger(RequestSummaryDAO.class); + + /** String with the name of the class for the DB driver */ + private final String driver = Configuration.getInstance().getDBDriver(); + /** String referring to the URL of the DB */ + private final String url = Configuration.getInstance().getStormDbURL(); + /** String with the password for the DB */ + private final String password = Configuration.getInstance().getDBPassword(); + /** String with the name for the DB */ + private final String name = Configuration.getInstance().getDBUserName(); + /** maximum number of requests that will be retrieved */ + private int limit; + /** Connection to DB - WARNING!!! It is kept open all the time! */ + private Connection con = null; + + /** milliseconds that must pass before reconnecting to DB */ + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + /** initial delay in milliseconds before starting timer */ + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + /** timer thread that will run a task to alert when reconnecting is necessary! */ + private Timer clock = null; + /** timer task that will update the boolean signaling that a reconnection is needed! */ + private TimerTask clockTask = null; + /** boolean that tells whether reconnection is needed because of MySQL bug! */ + private boolean reconnect = false; + + private static final RequestSummaryDAO dao = new RequestSummaryDAO(); + + private RequestSummaryDAO() { + + int aux = Configuration.getInstance().getPickingMaxBatchSize(); + if (aux > 1) { + limit = aux; + } else { + limit = 1; + } + setUpConnection(); + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of the RequestSummaryDAO. */ + public static RequestSummaryDAO getInstance() { + + return dao; + } + + /** + * Method that retrieves requests in the SRM_REQUEST_QUEUED status: retrieved requests are limited + * to the number specified by the Configuration method getPicker2MaxBatchSize. All retrieved + * requests get their global status transited to SRM_REQUEST_INPROGRESS. A Collection of + * RequestSummaryDataTO is returned: if none are found, an empty collection is returned. + */ + public Collection findNew(int freeSlot) { + + PreparedStatement stmt = null; + ResultSet rs = null; + List list = Lists.newArrayList(); + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - findNew: unable to get a valid connection!"); + return list; + } + // RequestSummaryDataTO + try { + // start transaction + con.setAutoCommit(false); + + int howMuch = -1; + if (freeSlot > limit) { + howMuch = limit; + } else { + howMuch = freeSlot; + } + + String query = + "SELECT ID, config_RequestTypeID, r_token, timeStamp, " + + "client_dn, proxy FROM request_queue WHERE status=? LIMIT ?"; + + // get id, request type, request token and client_DN of newly added + // requests, which must be in SRM_REQUEST_QUEUED state + stmt = con.prepareStatement(query); + logWarnings(con.getWarnings()); + + stmt.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); + stmt.setInt(2, howMuch); + + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + + List rowids = new ArrayList(); // arraylist with selected ids + RequestSummaryDataTO aux = null; // RequestSummaryDataTO made from + // retrieved row + long auxid; // primary key of retrieved row + while (rs.next()) { + auxid = rs.getLong("ID"); + rowids.add(Long.valueOf(auxid)); + aux = new RequestSummaryDataTO(); + aux.setPrimaryKey(auxid); + aux.setRequestType(rs.getString("config_RequestTypeID")); + aux.setRequestToken(rs.getString("r_token")); + aux.setClientDN(rs.getString("client_dn")); + aux.setTimestamp(rs.getTimestamp("timeStamp")); + + /** + * This code is only for the 1.3.18. This is a workaround to get FQANs using the proxy field + * on request_queue. The FE use the proxy field of request_queue to insert a single FQAN + * string containing all FQAN separated by the "#" char. The proxy is a BLOB, hence it has + * to be properly converted in string. + */ + java.sql.Blob blob = rs.getBlob("proxy"); + if (blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + aux.setVomsAttributes(new String(bdata)); + } + + list.add(aux); + } + close(rs); + close(stmt); + + // transit state from SRM_REQUEST_QUEUED to SRM_REQUEST_INPROGRESS + if (!list.isEmpty()) { + logWarnings(con.getWarnings()); + String where = makeWhereString(rowids); + String update = + "UPDATE request_queue SET status=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS) + + ", errstring=?" + + " WHERE ID IN " + + where; + stmt = con.prepareStatement(update); + logWarnings(stmt.getWarnings()); + stmt.setString(1, "Request handled!"); + logWarnings(stmt.getWarnings()); + log.trace("REQUEST SUMMARY DAO - findNew: executing {}", stmt); + stmt.executeUpdate(); + close(stmt); + } + + // commit and finish transaction + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO - findNew: Unable to complete picking. " + + "Error: {}. Rolling back!", + e.getMessage(), + e); + } finally { + close(rs); + close(stmt); + } + // return collection of requests + if (!list.isEmpty()) { + log.debug("REQUEST SUMMARY DAO - findNew: returning {}", list); + } + return list; + } + + /** + * Method used to signal in the DB that a request failed: the status of the request identified by + * the primary key index is transited to SRM_FAILURE, with the supplied explanation String. The + * supplied index is the primary key of the global request. In case of any error, nothing gets + * done and no exception is thrown, but proper error messages get logged. + */ + public void failRequest(long index, String explanation) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - failRequest: unable to get a valid connection!"); + return; + } + String signalSQL = + "UPDATE request_queue r " + + "SET r.status=" + + StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE) + + ", r.errstring=? " + + "WHERE r.ID=?"; + PreparedStatement signal = null; + try { + signal = con.prepareStatement(signalSQL); + logWarnings(con.getWarnings()); + signal.setString(1, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(signal.getWarnings()); + signal.setLong(2, index); + logWarnings(signal.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failRequest executing: {}", signal); + signal.executeUpdate(); + logWarnings(signal.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO! Unable to transit request identified by " + + "ID {} to SRM_FAILURE! Error: {}", + index, + e.getMessage(), + e); + } finally { + close(signal); + } + } + + /** + * Method used to signal in the DB that a PtGRequest failed. The global status transits to + * SRM_FAILURE, as well as that of each chunk associated to the request. The supplied explanation + * string is used both for the global status as well as for each individual chunk. The supplied + * index is the primary key of the global request. In case of any error, nothing gets done and no + * exception is thrown, but proper error messages get logged. + */ + public void failPtGRequest(long index, String explanation) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - failPtGRequest: unable to get a valid connection!"); + return; + } + String requestSQL = + "UPDATE request_queue r " + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; + String chunkSQL = + "UPDATE " + + "status_Get s JOIN (request_queue r, request_Get g) ON s.request_GetID=g.ID AND g.request_queueID=r.ID " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=?"; + PreparedStatement request = null; + PreparedStatement chunk = null; + int failCode = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE); + try { + // start transaction + con.setAutoCommit(false); + + // update global status + request = con.prepareStatement(requestSQL); + logWarnings(con.getWarnings()); + request.setInt(1, failCode); + logWarnings(request.getWarnings()); + request.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(request.getWarnings()); + request.setLong(3, index); + logWarnings(request.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failPtGRequest executing: {}", request); + request.executeUpdate(); + logWarnings(request.getWarnings()); + + // update each chunk status + chunk = con.prepareStatement(chunkSQL); + logWarnings(con.getWarnings()); + chunk.setInt(1, failCode); + logWarnings(chunk.getWarnings()); + chunk.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(chunk.getWarnings()); + chunk.setLong(3, index); + logWarnings(chunk.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failPtGRequest executing: {}", chunk); + chunk.executeUpdate(); + logWarnings(chunk.getWarnings()); + + // commit and finish transaction + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO! Unable to transit PtG request identified " + + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", + index, + e.getMessage(), + e); + rollback(con); + } finally { + close(request); + close(chunk); + } + } + + /** + * Method used to signal in the DB that a PtPRequest failed. The global status transits to + * SRM_FAILURE, as well as that of each chunk associated to the request. The supplied explanation + * string is used both for the global status as well as for each individual chunk. The supplied + * index is the primary key of the global request. In case of any error, nothing gets done and no + * exception is thrown, but proper error messagges get logged. + */ + public void failPtPRequest(long index, String explanation) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - failPtPRequest: unable to get a valid connection!"); + return; + } + String requestSQL = + "UPDATE request_queue r " + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; + String chunkSQL = + "UPDATE " + + "status_Put s JOIN (request_queue r, request_Put p) ON s.request_PutID=p.ID AND p.request_queueID=r.ID " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=?"; + PreparedStatement request = null; + PreparedStatement chunk = null; + int failCode = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE); + try { + // start transaction + con.setAutoCommit(false); + + // update global status + request = con.prepareStatement(requestSQL); + logWarnings(con.getWarnings()); + request.setInt(1, failCode); + logWarnings(request.getWarnings()); + request.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(request.getWarnings()); + request.setLong(3, index); + logWarnings(request.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failPtPRequest executing: {}", request); + request.executeUpdate(); + logWarnings(request.getWarnings()); + + // update each chunk status + chunk = con.prepareStatement(chunkSQL); + logWarnings(con.getWarnings()); + chunk.setInt(1, failCode); + logWarnings(chunk.getWarnings()); + chunk.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(chunk.getWarnings()); + chunk.setLong(3, index); + logWarnings(chunk.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failPtPRequest executing: {}", chunk); + chunk.executeUpdate(); + logWarnings(chunk.getWarnings()); + + // commit and finish transaction + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO! Unable to transit PtP request identified " + + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", + index, + e.getMessage(), + e); + rollback(con); + } finally { + close(request); + close(chunk); + } + } + + /** + * Method used to signal in the DB that a CopyRequest failed. The global status transits to + * SRM_FAILURE, as well as that of each chunk associated to the request. The supplied explanation + * string is used both for the global status as well as for each individual chunk. The supplied + * index is the primary key of the global request. In case of any error, nothing gets done and no + * exception is thrown, but proper error messagges get logged. + */ + public void failCopyRequest(long index, String explanation) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - failCopyRequest: unable to get a valid connection!"); + return; + } + String requestSQL = + "UPDATE request_queue r " + "SET r.status=?, r.errstring=? " + "WHERE r.ID=?"; + String chunkSQL = + "UPDATE " + + "status_Copy s JOIN (request_queue r, request_Copy c) ON s.request_CopyID=c.ID AND c.request_queueID=r.ID " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=?"; + PreparedStatement request = null; + PreparedStatement chunk = null; + int failCode = StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_FAILURE); + try { + // start transaction + con.setAutoCommit(false); + + // update global status + request = con.prepareStatement(requestSQL); + logWarnings(con.getWarnings()); + request.setInt(1, failCode); + logWarnings(request.getWarnings()); + request.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(request.getWarnings()); + request.setLong(3, index); + logWarnings(request.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failCopyRequest executing: {}", request); + request.executeUpdate(); + logWarnings(request.getWarnings()); + + // update each chunk status + chunk = con.prepareStatement(chunkSQL); + logWarnings(con.getWarnings()); + chunk.setInt(1, failCode); + logWarnings(chunk.getWarnings()); + chunk.setString(2, explanation); // Prepared statement spares + // DB-specific String notation! + logWarnings(chunk.getWarnings()); + chunk.setLong(3, index); + logWarnings(chunk.getWarnings()); + log.trace("REQUEST SUMMARY DAO! failCopyRequest executing: {}", chunk); + chunk.executeUpdate(); + logWarnings(chunk.getWarnings()); + + // commit and finish transaction + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO! Unable to transit Copy request identified " + + "by ID {} to SRM_FAILURE! Error: {}\nRolling back...", + index, + e.getMessage(), + e); + rollback(con); + } finally { + close(request); + close(chunk); + } + } + + /** + * Method used to update the global status of the request identified by the RequestToken rt. It + * gets updated the supplied status, with the supplied explanation String. If the supplied request + * token does not exist, nothing happens. + */ + public void updateGlobalStatus(String rt, int status, String explanation) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - updateGlobalStatus: unable to get a valid connection!"); + return; + } + PreparedStatement update = null; + try { + update = + con.prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE r_token=?"); + logWarnings(con.getWarnings()); + update.setInt(1, status); + logWarnings(update.getWarnings()); + update.setString(2, explanation); + logWarnings(update.getWarnings()); + update.setString(3, rt); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - updateGlobalStatus: executing {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); + } finally { + close(update); + } + } + + public void updateGlobalStatusOnMatchingGlobalStatus( + TRequestToken requestToken, + TStatusCode expectedStatusCode, + TStatusCode newStatusCode, + String explanation) { + + if (!checkConnection()) { + log.error( + "REQUEST SUMMARY DAO - updateGlobalStatusOnMatchingGlobalStatus: " + + "unable to get a valid connection!"); + return; + } + PreparedStatement update = null; + try { + update = + con.prepareStatement( + "UPDATE request_queue SET status=?, errstring=? WHERE r_token=? AND status=?"); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(newStatusCode)); + logWarnings(update.getWarnings()); + update.setString(2, explanation); + logWarnings(update.getWarnings()); + update.setString(3, requestToken.toString()); + logWarnings(update.getWarnings()); + update.setInt(4, StatusCodeConverter.getInstance().toDB(expectedStatusCode)); + logWarnings(update.getWarnings()); + log.trace( + "REQUEST SUMMARY DAO - updateGlobalStatusOnMatchingGlobalStatus: " + "executing {}", + update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); + } finally { + close(update); + } + } + + /** + * Method used to update the global status of the request identified by the RequestToken rt. It + * gets updated the supplied status, with the supplied explanation String and pin and file + * lifetimes are updated in order to start the countdown from now. If the supplied request token + * does not exist, nothing happens. + */ + public void updateGlobalStatusPinFileLifetime(String rt, int status, String explanation) { + + if (!checkConnection()) { + log.error( + "REQUEST SUMMARY DAO - updateGlobalStatusPinFileLifetime: " + + "unable to get a valid connection!"); + return; + } + PreparedStatement update = null; + + String query = + "UPDATE request_queue SET status=?, errstring=?, " + + "pinLifetime=pinLifetime+(UNIX_TIMESTAMP()-UNIX_TIMESTAMP(timeStamp)) " + + "WHERE r_token=?"; + + try { + update = con.prepareStatement(query); + logWarnings(con.getWarnings()); + + update.setInt(1, status); + logWarnings(update.getWarnings()); + + update.setString(2, explanation); + logWarnings(update.getWarnings()); + + update.setString(3, rt); + logWarnings(update.getWarnings()); + + log.trace("REQUEST SUMMARY DAO - updateGlobalStatus: executing {}", update); + + update.executeUpdate(); + logWarnings(update.getWarnings()); + + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO: {}", e.getMessage(), e); + } finally { + close(update); + } + } + + /** + * Method used to transit the status of a request that is in SRM_REQUEST_QUEUED state, to + * SRM_ABORTED. All files associated with the request will also get their status changed to + * SRM_ABORTED. If the supplied token is null, or not found, or not in the SRM_REQUEST_QUEUED + * state, then nothing happens. + */ + public void abortRequest(String rt) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - abortRequest: unable to get a valid connection!"); + return; + } + PreparedStatement update = null; + PreparedStatement query = null; + ResultSet rs = null; + try { + query = + con.prepareStatement( + "SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + logWarnings(query.getWarnings()); + query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); + logWarnings(query.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortRequest - {}", query); + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (rs.next()) { + long id = rs.getLong("ID"); + String type = rs.getString("config_RequestTypeID"); + update = con.prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE ID=?"); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortRequest - {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + close(update); + // update single chunk file statuses + TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); + String statusTable = null; + String requestTable = null; + String joinColumn = null; + if (rtyp != TRequestType.EMPTY) { + if (rtyp == TRequestType.PREPARE_TO_GET) { + statusTable = "status_Get"; + requestTable = "request_Get"; + joinColumn = "request_GetID"; + } else if (rtyp == TRequestType.PREPARE_TO_PUT) { + requestTable = "request_Put"; + statusTable = "status_Put"; + joinColumn = "request_PutID"; + } else if (rtyp == TRequestType.COPY) { + requestTable = "request_Copy"; + statusTable = "status_Copy"; + joinColumn = "request_CopyID"; + } else { + requestTable = "request_BoL"; + statusTable = "status_BoL"; + joinColumn = "request_BoLID"; + } + String auxstr = + "UPDATE " + + statusTable + + " s JOIN (request_queue r, " + + requestTable + + " t) ON (s." + + joinColumn + + "=t.ID AND t.request_queueID=r.ID) " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=?"; + update = con.prepareStatement(auxstr); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortRequest - {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } else { + log.error( + "REQUEST SUMMARY DAO - Unable to complete abortRequest: " + + "could not update file statuses because the request type could " + + "not be translated from the DB!"); + } + } + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - abortRequest: {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + close(update); + } + } + + /** + * Method used to transit the status of a request that is in SRM_REQUEST_INPROGRESS state, to + * SRM_ABORTED. All files associated with the request will also get their status changed to + * SRM_ABORTED. If the supplied token is null, or not found, or not in the SRM_REQUEST_INPROGRESS + * state, then nothing happens. + */ + public void abortInProgressRequest(String rt) { + + if (!checkConnection()) { + log.error( + "REQUEST SUMMARY DAO - abortInProgressRequest: unable to get " + "a valid connection!"); + return; + } + PreparedStatement update = null; + PreparedStatement query = null; + ResultSet rs = null; + try { + query = + con.prepareStatement( + "SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + logWarnings(query.getWarnings()); + query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + logWarnings(query.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", query); + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (rs.next()) { + // token found... + // get ID + long id = rs.getLong("ID"); + String type = rs.getString("config_RequestTypeID"); + // update global request status + update = con.prepareStatement("UPDATE request_queue SET status=?, errstring=? WHERE ID=?"); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + close(update); + // update single chunk file statuses + TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); + String statusTable = null; + String requestTable = null; + String joinColumn = null; + if (rtyp != TRequestType.EMPTY) { + if (rtyp == TRequestType.PREPARE_TO_GET) { + requestTable = "request_Get"; + statusTable = "status_Get"; + joinColumn = "request_GetID"; + } else if (rtyp == TRequestType.PREPARE_TO_PUT) { + requestTable = "request_Put"; + statusTable = "status_Put"; + joinColumn = "request_PutID"; + } else if (rtyp == TRequestType.COPY) { + requestTable = "request_Copy"; + statusTable = "status_Copy"; + joinColumn = "request_CopyID"; + } else { + requestTable = "request_BoL"; + statusTable = "status_BoL"; + joinColumn = "request_BoLID"; + } + String auxstr = + "UPDATE " + + statusTable + + " s JOIN (request_queue r, " + + requestTable + + " t ON s." + + joinColumn + + "=t.ID AND t.request_queueID=r.ID )" + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=?"; + update = con.prepareStatement(auxstr); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortInProgressRequest - {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } else { + log.error( + "REQUEST SUMMARY DAO - Unable to complete " + + "abortInProgressRequest: could not update file statuses because " + + "the request type could not be translated from the DB!"); + } + } + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - abortInProgressRequest: {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + close(update); + } + } + + /** + * Method used to transit the status of chunks of a request that is in SRM_REQUEST_QUEUED state, + * to SRM_ABORTED. If the supplied token is null, or not found, or not in the SRM_REQUEST_QUEUED + * state, then nothing happens. + */ + public void abortChunksOfRequest(String rt, Collection surls) { + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - abortChunksOfRequest: unable to get a valid connection!"); + return; + } + PreparedStatement update = null; + PreparedStatement query = null; + ResultSet rs = null; + try { + query = + con.prepareStatement( + "SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + logWarnings(query.getWarnings()); + query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); + logWarnings(query.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortChunksOfRequest - {}", query); + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (rs.next()) { + long id = rs.getLong("ID"); + String type = rs.getString("config_RequestTypeID"); + // update single chunk file statuses + TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); + String statusTable = null; + String requestTable = null; + String joinColumn = null; + String surlColumn = null; + if (rtyp != TRequestType.EMPTY) { + if (rtyp == TRequestType.PREPARE_TO_GET) { + requestTable = "request_Get"; + statusTable = "status_Get"; + joinColumn = "request_GetID"; + surlColumn = "sourceSURL"; + } else if (rtyp == TRequestType.PREPARE_TO_PUT) { + requestTable = "request_Put"; + statusTable = "status_Put"; + joinColumn = "request_PutID"; + surlColumn = "targetSURL"; + } else if (rtyp == TRequestType.COPY) { + requestTable = "request_Copy"; + statusTable = "status_Copy"; + joinColumn = "request_CopyID"; + surlColumn = "targetSURL"; + } else { + requestTable = "request_BoL"; + statusTable = "status_BoL"; + joinColumn = "request_BoLID"; + surlColumn = "sourceSURL"; + } + String auxstr = + "UPDATE " + + statusTable + + " s JOIN (request_queue r, " + + requestTable + + " t ON s." + + joinColumn + + "=t.ID AND t.request_queueID=r.ID " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=? AND " + + surlColumn + + " IN " + + makeInString(surls); + update = con.prepareStatement(auxstr); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortChunksOfRequest - {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } else { + log.error( + "REQUEST SUMMARY DAO - Unable to complete " + + "abortChunksOfRequest: could not update file statuses because " + + "the request type could not be translated from the DB!"); + } + } + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - abortChunksOfRequest: {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + close(update); + } + } + + /** + * Method used to transit the status of chunks of a request that is in SRM_REQUEST_INPROGRESS + * state, to SRM_ABORTED. If the supplied token is null, or not found, or not in the + * SRM_REQUEST_INPROGRESS state, then nothing happens. + */ + public void abortChunksOfInProgressRequest(String rt, Collection surls) { + + if (!checkConnection()) { + log.error( + "REQUEST SUMMARY DAO - abortChunksOfInProgressRequest: unable " + + "to get a valid connection!"); + return; + } + PreparedStatement update = null; + PreparedStatement query = null; + ResultSet rs = null; + try { + query = + con.prepareStatement( + "SELECT ID,config_RequestTypeID FROM request_queue WHERE r_token=? AND status=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + logWarnings(query.getWarnings()); + query.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + logWarnings(query.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest - {}", query); + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (rs.next()) { + long id = rs.getLong("ID"); + String type = rs.getString("config_RequestTypeID"); + // update single chunk file statuses + TRequestType rtyp = RequestTypeConverter.getInstance().toSTORM(type); + String statusTable = null; + String requestTable = null; + String joinColumn = null; + String surlColumn = null; + if (rtyp != TRequestType.EMPTY) { + if (rtyp == TRequestType.PREPARE_TO_GET) { + requestTable = "request_Get"; + statusTable = "status_Get"; + joinColumn = "request_GetID"; + surlColumn = "sourceSURL"; + } else if (rtyp == TRequestType.PREPARE_TO_PUT) { + requestTable = "request_Put"; + statusTable = "status_Put"; + joinColumn = "request_PutID"; + surlColumn = "targetSURL"; + } else if (rtyp == TRequestType.COPY) { + requestTable = "request_Copy"; + statusTable = "status_Copy"; + joinColumn = "request_CopyID"; + surlColumn = "targetSURL"; + } else { + requestTable = "request_BoL"; + statusTable = "status_BoL"; + joinColumn = "request_BoLID"; + surlColumn = "sourceSURL"; + } + String auxstr = + "UPDATE " + + statusTable + + " s JOIN (request_queue r, " + + requestTable + + " t ON s." + + joinColumn + + "=t.ID AND t.request_queueID=r.ID " + + "SET s.statusCode=?, s.explanation=? " + + "WHERE r.ID=? AND " + + surlColumn + + " IN " + + makeInString(surls); + update = con.prepareStatement(auxstr); + logWarnings(con.getWarnings()); + update.setInt(1, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_ABORTED)); + logWarnings(update.getWarnings()); + update.setString(2, "User aborted request!"); + logWarnings(update.getWarnings()); + update.setLong(3, id); + logWarnings(update.getWarnings()); + log.trace("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest " + "- {}", update); + update.executeUpdate(); + logWarnings(update.getWarnings()); + } else { + log.error( + "REQUEST SUMMARY DAO - Unable to complete " + + "abortChunksOfInProgressRequest: could not update file statuses " + + "because the request type could not be translated from the DB!"); + } + } + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - abortChunksOfInProgressRequest: {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + close(update); + } + } + + /** Private method that returns a String of all SURLS in the collection of String. */ + private String makeInString(Collection c) { + + StringBuilder sb = new StringBuilder("("); + for (Iterator i = c.iterator(); i.hasNext(); ) { + sb.append(i.next()); + if (i.hasNext()) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** + * Method that returns the config_RequestTypeID field present in request_queue table, for the + * request with the specified request token rt. In case of any error, the empty String "" is + * returned. + */ + public String typeOf(String rt) { + + PreparedStatement query = null; + ResultSet rs = null; + String result = ""; + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - typeOf: unable to get a valid connection!"); + return result; + } + try { + query = + con.prepareStatement("SELECT config_RequestTypeID from request_queue WHERE r_token=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + logWarnings(query.getWarnings()); + log.trace("REQUEST SUMMARY DAO - typeOf - {}", query); + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (rs.next()) { + result = rs.getString("config_RequestTypeID"); + } + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - typeOf - {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + } + return result; + } + + /** + * Method that returns the config_RequestTypeID field present in request_queue table, for the + * request with the specified request token rt. In case of any error, the empty String "" is + * returned. + */ + public RequestSummaryDataTO find(String rt) { + + PreparedStatement query = null; + ResultSet rs = null; + RequestSummaryDataTO to = null; + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - find: unable to get a valid connection!"); + return null; + } + try { + query = con.prepareStatement("SELECT * from request_queue WHERE r_token=?"); + logWarnings(con.getWarnings()); + query.setString(1, rt); + con.setAutoCommit(false); + + rs = query.executeQuery(); + logWarnings(query.getWarnings()); + if (!rs.first()) { + log.debug("No requests found with token {}", rt); + return null; + } + to = new RequestSummaryDataTO(); + to.setPrimaryKey(rs.getLong("ID")); + to.setRequestType(rs.getString("config_RequestTypeID")); + to.setClientDN(rs.getString("client_dn")); + to.setUserToken(rs.getString("u_token")); + to.setRetrytime(rs.getInt("retrytime")); + to.setPinLifetime(rs.getInt("pinLifetime")); + to.setSpaceToken(rs.getString("s_token")); + to.setStatus(rs.getInt("status")); + to.setErrstring(rs.getString("errstring")); + to.setRequestToken(rs.getString("r_token")); + to.setRemainingTotalTime(rs.getInt("remainingTotalTime")); + to.setFileLifetime(rs.getInt("fileLifetime")); + to.setNbreqfiles(rs.getInt("nbreqfiles")); + to.setNumOfCompleted(rs.getInt("numOfCompleted")); + to.setNumOfWaiting(rs.getInt("numOfWaiting")); + to.setNumOfFailed(rs.getInt("numOfFailed")); + to.setTimestamp(rs.getTimestamp("timeStamp")); + + java.sql.Blob blob = rs.getBlob("proxy"); + if (blob != null) { + byte[] bdata = blob.getBytes(1, (int) blob.length()); + to.setVomsAttributes(new String(bdata)); + } + to.setDeferredStartTime(rs.getInt("deferredStartTime")); + to.setRemainingDeferredStartTime(rs.getInt("remainingDeferredStartTime")); + + if (rs.next()) { + log.warn("More than a row matches token {}", rt); + } + close(rs); + close(query); + } catch (SQLException e) { + log.error("REQUEST SUMMARY DAO - find - {}", e.getMessage(), e); + } finally { + close(rs); + close(query); + } + return to; + } + + /** + * Method that purges expired requests: it only removes up to a fixed value of expired requests at + * a time. The value is configured and obtained from the configuration property getPurgeBatchSize. + * A List of Strings with the request tokens removed is returned. In order to completely remove + * all expired requests, simply keep invoking this method until an empty List is returned. This + * batch processing is needed because there could be millions of expired requests which are likely + * to result in out-of-memory problems. Notice that in case of errors only error messages get + * logged. An empty List is also returned. + */ + public List purgeExpiredRequests(long expiredRequestTime, int purgeSize) { + + PreparedStatement ps = null; + ResultSet rs = null; + List requestTokens = Lists.newArrayList(); + List ids = Lists.newArrayList(); + + if (!checkConnection()) { + log.error( + "REQUEST SUMMARY DAO - purgeExpiredRequests: unable to get a " + "valid connection!"); + return requestTokens; + } + + try { + // start transaction + con.setAutoCommit(false); + String stmt = + "SELECT ID, r_token FROM request_queue WHERE UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(timeStamp) > ? AND status <> ? AND status <> ? LIMIT ?"; + ps = con.prepareStatement(stmt); + ps.setLong(1, expiredRequestTime); + ps.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); + ps.setInt(3, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + ps.setInt(4, purgeSize); + logWarnings(con.getWarnings()); + log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", ps); + + rs = ps.executeQuery(); + logWarnings(ps.getWarnings()); + + while (rs.next()) { + requestTokens.add(rs.getString("r_token")); + ids.add(new Long(rs.getLong("ID"))); + } + + close(rs); + close(ps); + + if (!ids.isEmpty()) { + // REMOVE BATCH OF EXPIRED REQUESTS! + stmt = "DELETE FROM request_queue WHERE ID in " + makeWhereString(ids); + + ps = con.prepareStatement(stmt); + logWarnings(con.getWarnings()); + log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", stmt); + + int deleted = ps.executeUpdate(); + logWarnings(ps.getWarnings()); + if (deleted > 0) { + log.info( + "REQUEST SUMMARY DAO - purgeExpiredRequests - Deleted {} " + "expired requests.", + deleted); + } else { + log.trace( + "REQUEST SUMMARY DAO - purgeExpiredRequests - No deleted " + "expired requests."); + } + + close(ps); + + stmt = + "DELETE request_DirOption FROM request_DirOption " + + " LEFT JOIN request_Get ON request_DirOption.ID = request_Get.request_DirOptionID" + + " LEFT JOIN request_BoL ON request_DirOption.ID = request_BoL.request_DirOptionID " + + " LEFT JOIN request_Copy ON request_DirOption.ID = request_Copy.request_DirOptionID" + + " WHERE request_Copy.request_DirOptionID IS NULL AND" + + " request_Get.request_DirOptionID IS NULL AND" + + " request_BoL.request_DirOptionID IS NULL;"; + + ps = con.prepareStatement(stmt); + logWarnings(con.getWarnings()); + log.trace("REQUEST SUMMARY DAO - purgeExpiredRequests - {}", stmt); + deleted = ps.executeUpdate(); + logWarnings(ps.getWarnings()); + + if (deleted > 0) { + log.info( + "REQUEST SUMMARY DAO - purgeExpiredRequests - Deleted {} " + + "DirOption related to expired requests.", + deleted); + } else { + log.trace( + "REQUEST SUMMARY DAO - purgeExpiredRequests - No Deleted " + + "DirOption related to expired requests."); + } + close(ps); + } + // commit and finish transaction + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); + logWarnings(con.getWarnings()); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO - purgeExpiredRequests - Rolling back " + "because of error: {}", + e.getMessage(), + e); + rollback(con); + } finally { + close(rs); + close(ps); + } + return requestTokens; + } + + /** + * Retrieve the total number of expired requests. + * + * @return + */ + public int getNumberExpired() { + + int rowCount = 0; + + if (!checkConnection()) { + log.error("REQUEST SUMMARY DAO - getNumberExpired: unable to get a " + "valid connection!"); + return 0; + } + + PreparedStatement ps = null; + ResultSet rs = null; + + try { + // start transaction + con.setAutoCommit(false); + + String stmt = + "SELECT count(*) FROM request_queue WHERE UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(timeStamp) > ? AND status <> ? AND status <> ? "; + ps = con.prepareStatement(stmt); + ps.setLong(1, Configuration.getInstance().getExpiredRequestTime()); + ps.setInt(2, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_QUEUED)); + ps.setInt(3, StatusCodeConverter.getInstance().toDB(TStatusCode.SRM_REQUEST_INPROGRESS)); + + logWarnings(con.getWarnings()); + log.trace("REQUEST SUMMARY DAO - Number of expired requests: {}", ps); + rs = ps.executeQuery(); + logWarnings(ps.getWarnings()); + + // Get the number of rows from the result set + rs.next(); + rowCount = rs.getInt(1); + log.debug("Nr of expired requests is: {}", rowCount); + + close(rs); + close(ps); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO - purgeExpiredRequests - Rolling back " + "because of error: {}", + e.getMessage(), + e); + rollback(con); + } finally { + close(rs); + close(ps); + } + + return rowCount; + } + + /** Private method that returns a String of all IDs retrieved by the last SELECT. */ + private String makeWhereString(List rowids) { + + StringBuilder sb = new StringBuilder("("); + for (Iterator i = rowids.iterator(); i.hasNext(); ) { + sb.append(i.next()); + if (i.hasNext()) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** Auxiliary method that sets up the connection to the DB, as well as the prepared statement. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + logWarnings(con.getWarnings()); + response = con.isValid(0); + } catch (ClassNotFoundException | SQLException e) { + log.error("REQUEST SUMMARY DAO! Exception in setUpConnection! {}", e.getMessage(), e); + } + return response; + } + + /** Auxiliary method that tales down a connection to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + log.error( + "REQUEST SUMMARY DAO! Exception in takeDownConnection " + "method: {}", + e.getMessage(), + e); + } + } + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private boolean checkConnection() { + + boolean response = true; + if (reconnect) { + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method used to close a Statement */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "REQUEST SUMMARY DAO! Unable to close Statement {} - " + "Error: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + /** Auxiliary method used to close a ResultSet */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("REQUEST SUMMARY DAO! Unable to close ResultSet! Error: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method used to roll back a transaction */ + private void rollback(Connection con) { + + if (con != null) { + try { + con.rollback(); + logWarnings(con.getWarnings()); + log.error("PICKER2: roll back successful!"); + } catch (SQLException e2) { + log.error("PICKER2: roll back failed! {}", e2.getMessage(), e2); + } + } + } + + /** Private auxiliary method used to log SQLWarnings. */ + private void logWarnings(SQLWarning warning) { + + if (warning != null) { + log.debug("REQUEST SUMMARY DAO: {}", warning); + while ((warning = warning.getNextWarning()) != null) { + log.debug("REQUEST SUMMARY DAO: {}", warning); + } + } + } } diff --git a/src/main/java/it/grid/storm/catalogs/RequestSummaryData.java b/src/main/java/it/grid/storm/catalogs/RequestSummaryData.java index 2fac1c20..7c770444 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestSummaryData.java +++ b/src/main/java/it/grid/storm/catalogs/RequestSummaryData.java @@ -1,527 +1,456 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TRequestType; import it.grid.storm.srm.types.TReturnStatus; -// import it.grid.storm.griduser.VomsGridUser; -import it.grid.storm.griduser.GridUserInterface; /** - * This class represents the SummaryData associated with the SRM request. It - * contains info about: Primary Key of request, TRequestType, TRequestToken, - * VomsGridUser. - * + * This class represents the SummaryData associated with the SRM request. It contains info about: + * Primary Key of request, TRequestType, TRequestToken, VomsGridUser. + * * @author EGRID - ICTP Trieste * @date March 18th, 2005 * @version 4.0 */ public class RequestSummaryData { - private TRequestType requestType = null; // request type of SRM request - private TRequestToken requestToken = null; // TRequestToken of SRM request - private GridUserInterface gu = null; // VomsGridUser that issued This request - private long id = -1; // long representing This object in persistence - - private String userToken = null; - private Integer retrytime = null; - private TLifeTimeInSeconds pinLifetime = null; - private String spaceToken = null; - private TReturnStatus status = null; - private String errstring = null; - private Integer remainingTotalTime = null; - private Integer nbreqfiles = null; - private Integer numOfCompleted = null; - private TLifeTimeInSeconds fileLifetime = null; - private Integer deferredStartTime = null; - private Integer numOfWaiting = null; - private Integer numOfFailed = null; - private Integer remainingDeferredStartTime = null; - - public RequestSummaryData(TRequestType rtype, TRequestToken rtoken, - GridUserInterface gu) throws InvalidRequestSummaryDataAttributesException { - - boolean ok = rtype != null && rtoken != null && gu != null; - if (!ok) - throw new InvalidRequestSummaryDataAttributesException(rtype, rtoken, gu); - this.requestType = rtype; - this.requestToken = rtoken; - this.gu = gu; - } - - /** - * Method that returns the type of SRM request - */ - public TRequestType requestType() { - - return requestType; - } - - /** - * Method that returns the SRM request TRequestToken - */ - public TRequestToken requestToken() { - - return requestToken; - } - - /** - * Method that returns the VomsGridUser that issued this request - */ - public GridUserInterface gridUser() { - - return gu; - } - - /** - * Method that returns a long corresponding to the identifier of This object - * in persistence. - */ - public long primaryKey() { - - return id; - } - - /** - * Method used to set the log corresponding to the identifier of This object - * in persistence. - */ - public void setPrimaryKey(long l) { - - this.id = l; - } - - /** - * @return the userToken - */ - public String getUserToken() { - - return userToken; - } - - /** - * @return the retrytime - */ - public Integer getRetrytime() { - - return retrytime; - } - - /** - * @return the pinLifetime - */ - public TLifeTimeInSeconds getPinLifetime() { - - return pinLifetime; - } - - /** - * @return the spaceToken - */ - public String getSpaceToken() { - - return spaceToken; - } - - /** - * @return the status - */ - public TReturnStatus getStatus() { - - return status; - } - - /** - * @return the errstring - */ - public String getErrstring() { + private TRequestType requestType = null; // request type of SRM request + private TRequestToken requestToken = null; // TRequestToken of SRM request + private GridUserInterface gu = null; // VomsGridUser that issued This request + private long id = -1; // long representing This object in persistence - return errstring; - } + private String userToken = null; + private Integer retrytime = null; + private TLifeTimeInSeconds pinLifetime = null; + private String spaceToken = null; + private TReturnStatus status = null; + private String errstring = null; + private Integer remainingTotalTime = null; + private Integer nbreqfiles = null; + private Integer numOfCompleted = null; + private TLifeTimeInSeconds fileLifetime = null; + private Integer deferredStartTime = null; + private Integer numOfWaiting = null; + private Integer numOfFailed = null; + private Integer remainingDeferredStartTime = null; - /** - * @return the remainingTotalTime - */ - public Integer getRemainingTotalTime() { + public RequestSummaryData(TRequestType rtype, TRequestToken rtoken, GridUserInterface gu) + throws InvalidRequestSummaryDataAttributesException { - return remainingTotalTime; - } + boolean ok = rtype != null && rtoken != null && gu != null; + if (!ok) throw new InvalidRequestSummaryDataAttributesException(rtype, rtoken, gu); + this.requestType = rtype; + this.requestToken = rtoken; + this.gu = gu; + } - /** - * @return the nbreqfiles - */ - public Integer getNbreqfiles() { + /** Method that returns the type of SRM request */ + public TRequestType requestType() { - return nbreqfiles; - } + return requestType; + } - /** - * @return the numOfCompleted - */ - public Integer getNumOfCompleted() { + /** Method that returns the SRM request TRequestToken */ + public TRequestToken requestToken() { - return numOfCompleted; - } + return requestToken; + } - /** - * @return the fileLifetime - */ - public TLifeTimeInSeconds getFileLifetime() { + /** Method that returns the VomsGridUser that issued this request */ + public GridUserInterface gridUser() { - return fileLifetime; - } + return gu; + } - /** - * @return the deferredStartTime - */ - public Integer getDeferredStartTime() { + /** Method that returns a long corresponding to the identifier of This object in persistence. */ + public long primaryKey() { - return deferredStartTime; - } + return id; + } - /** - * @return the numOfWaiting - */ - public Integer getNumOfWaiting() { + /** Method used to set the log corresponding to the identifier of This object in persistence. */ + public void setPrimaryKey(long l) { - return numOfWaiting; - } + this.id = l; + } - /** - * @return the numOfFailed - */ - public Integer getNumOfFailed() { + /** @return the userToken */ + public String getUserToken() { - return numOfFailed; - } + return userToken; + } - /** - * @return the remainingDeferredStartTime - */ - public Integer getRemainingDeferredStartTime() { + /** @return the retrytime */ + public Integer getRetrytime() { - return remainingDeferredStartTime; - } + return retrytime; + } - public void setUserToken(String userToken) { + /** @return the pinLifetime */ + public TLifeTimeInSeconds getPinLifetime() { - this.userToken = userToken; - } + return pinLifetime; + } - public void setRetrytime(Integer retrytime) { + /** @return the spaceToken */ + public String getSpaceToken() { - this.retrytime = retrytime; + return spaceToken; + } - } + /** @return the status */ + public TReturnStatus getStatus() { - public void setPinLifetime(TLifeTimeInSeconds pinLifetime) { + return status; + } - this.pinLifetime = pinLifetime; + /** @return the errstring */ + public String getErrstring() { - } + return errstring; + } - public void setSpaceToken(String spaceToken) { + /** @return the remainingTotalTime */ + public Integer getRemainingTotalTime() { - this.spaceToken = spaceToken; + return remainingTotalTime; + } - } + /** @return the nbreqfiles */ + public Integer getNbreqfiles() { - public void setStatus(TReturnStatus status) { + return nbreqfiles; + } - this.status = status; + /** @return the numOfCompleted */ + public Integer getNumOfCompleted() { - } + return numOfCompleted; + } - public void setErrstring(String errstring) { + /** @return the fileLifetime */ + public TLifeTimeInSeconds getFileLifetime() { - this.errstring = errstring; + return fileLifetime; + } - } + /** @return the deferredStartTime */ + public Integer getDeferredStartTime() { - public void setRemainingTotalTime(Integer remainingTotalTime) { + return deferredStartTime; + } - this.remainingTotalTime = remainingTotalTime; + /** @return the numOfWaiting */ + public Integer getNumOfWaiting() { - } + return numOfWaiting; + } - public void setNbreqfiles(Integer nbreqfiles) { + /** @return the numOfFailed */ + public Integer getNumOfFailed() { - this.nbreqfiles = nbreqfiles; + return numOfFailed; + } - } + /** @return the remainingDeferredStartTime */ + public Integer getRemainingDeferredStartTime() { - public void setNumOfCompleted(Integer numOfCompleted) { + return remainingDeferredStartTime; + } - this.numOfCompleted = numOfCompleted; + public void setUserToken(String userToken) { - } + this.userToken = userToken; + } - public void setFileLifetime(TLifeTimeInSeconds fileLifetime) { + public void setRetrytime(Integer retrytime) { - this.fileLifetime = fileLifetime; + this.retrytime = retrytime; + } - } + public void setPinLifetime(TLifeTimeInSeconds pinLifetime) { - public void setDeferredStartTime(Integer deferredStartTime) { + this.pinLifetime = pinLifetime; + } - this.deferredStartTime = deferredStartTime; + public void setSpaceToken(String spaceToken) { - } + this.spaceToken = spaceToken; + } - public void setNumOfWaiting(Integer numOfWaiting) { + public void setStatus(TReturnStatus status) { - this.numOfWaiting = numOfWaiting; + this.status = status; + } - } + public void setErrstring(String errstring) { - public void setNumOfFailed(Integer numOfFailed) { + this.errstring = errstring; + } - this.numOfFailed = numOfFailed; + public void setRemainingTotalTime(Integer remainingTotalTime) { - } + this.remainingTotalTime = remainingTotalTime; + } - public void setRemainingDeferredStartTime(Integer remainingDeferredStartTime) { + public void setNbreqfiles(Integer nbreqfiles) { - this.remainingDeferredStartTime = remainingDeferredStartTime; + this.nbreqfiles = nbreqfiles; + } - } + public void setNumOfCompleted(Integer numOfCompleted) { - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("RequestSummaryData [requestType="); - builder.append(requestType); - builder.append(", requestToken="); - builder.append(requestToken); - builder.append(", gu="); - builder.append(gu); - builder.append(", id="); - builder.append(id); - builder.append(", userToken="); - builder.append(userToken); - builder.append(", retrytime="); - builder.append(retrytime); - builder.append(", pinLifetime="); - builder.append(pinLifetime); - builder.append(", spaceToken="); - builder.append(spaceToken); - builder.append(", status="); - builder.append(status); - builder.append(", errstring="); - builder.append(errstring); - builder.append(", remainingTotalTime="); - builder.append(remainingTotalTime); - builder.append(", nbreqfiles="); - builder.append(nbreqfiles); - builder.append(", numOfCompleted="); - builder.append(numOfCompleted); - builder.append(", fileLifetime="); - builder.append(fileLifetime); - builder.append(", deferredStartTime="); - builder.append(deferredStartTime); - builder.append(", numOfWaiting="); - builder.append(numOfWaiting); - builder.append(", numOfFailed="); - builder.append(numOfFailed); - builder.append(", remainingDeferredStartTime="); - builder.append(remainingDeferredStartTime); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result - + ((deferredStartTime == null) ? 0 : deferredStartTime.hashCode()); - result = prime * result + ((errstring == null) ? 0 : errstring.hashCode()); - result = prime * result - + ((fileLifetime == null) ? 0 : fileLifetime.hashCode()); - result = prime * result + ((gu == null) ? 0 : gu.hashCode()); - result = prime * result + (int) (id ^ (id >>> 32)); - result = prime * result - + ((nbreqfiles == null) ? 0 : nbreqfiles.hashCode()); - result = prime * result - + ((numOfCompleted == null) ? 0 : numOfCompleted.hashCode()); - result = prime * result - + ((numOfFailed == null) ? 0 : numOfFailed.hashCode()); - result = prime * result - + ((numOfWaiting == null) ? 0 : numOfWaiting.hashCode()); - result = prime * result - + ((pinLifetime == null) ? 0 : pinLifetime.hashCode()); - result = prime - * result - + ((remainingDeferredStartTime == null) ? 0 : remainingDeferredStartTime - .hashCode()); - result = prime * result - + ((remainingTotalTime == null) ? 0 : remainingTotalTime.hashCode()); - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - result = prime * result - + ((requestType == null) ? 0 : requestType.hashCode()); - result = prime * result + ((retrytime == null) ? 0 : retrytime.hashCode()); - result = prime * result - + ((spaceToken == null) ? 0 : spaceToken.hashCode()); - result = prime * result + ((status == null) ? 0 : status.hashCode()); - result = prime * result + ((userToken == null) ? 0 : userToken.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RequestSummaryData other = (RequestSummaryData) obj; - if (deferredStartTime == null) { - if (other.deferredStartTime != null) { - return false; - } - } else if (!deferredStartTime.equals(other.deferredStartTime)) { - return false; - } - if (errstring == null) { - if (other.errstring != null) { - return false; - } - } else if (!errstring.equals(other.errstring)) { - return false; - } - if (fileLifetime == null) { - if (other.fileLifetime != null) { - return false; - } - } else if (!fileLifetime.equals(other.fileLifetime)) { - return false; - } - if (gu == null) { - if (other.gu != null) { - return false; - } - } else if (!gu.equals(other.gu)) { - return false; - } - if (id != other.id) { - return false; - } - if (nbreqfiles == null) { - if (other.nbreqfiles != null) { - return false; - } - } else if (!nbreqfiles.equals(other.nbreqfiles)) { - return false; - } - if (numOfCompleted == null) { - if (other.numOfCompleted != null) { - return false; - } - } else if (!numOfCompleted.equals(other.numOfCompleted)) { - return false; - } - if (numOfFailed == null) { - if (other.numOfFailed != null) { - return false; - } - } else if (!numOfFailed.equals(other.numOfFailed)) { - return false; - } - if (numOfWaiting == null) { - if (other.numOfWaiting != null) { - return false; - } - } else if (!numOfWaiting.equals(other.numOfWaiting)) { - return false; - } - if (pinLifetime == null) { - if (other.pinLifetime != null) { - return false; - } - } else if (!pinLifetime.equals(other.pinLifetime)) { - return false; - } - if (remainingDeferredStartTime == null) { - if (other.remainingDeferredStartTime != null) { - return false; - } - } else if (!remainingDeferredStartTime - .equals(other.remainingDeferredStartTime)) { - return false; - } - if (remainingTotalTime == null) { - if (other.remainingTotalTime != null) { - return false; - } - } else if (!remainingTotalTime.equals(other.remainingTotalTime)) { - return false; - } - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - if (requestType != other.requestType) { - return false; - } - if (retrytime == null) { - if (other.retrytime != null) { - return false; - } - } else if (!retrytime.equals(other.retrytime)) { - return false; - } - if (spaceToken == null) { - if (other.spaceToken != null) { - return false; - } - } else if (!spaceToken.equals(other.spaceToken)) { - return false; - } - if (status == null) { - if (other.status != null) { - return false; - } - } else if (!status.equals(other.status)) { - return false; - } - if (userToken == null) { - if (other.userToken != null) { - return false; - } - } else if (!userToken.equals(other.userToken)) { - return false; - } - return true; - } + this.numOfCompleted = numOfCompleted; + } + public void setFileLifetime(TLifeTimeInSeconds fileLifetime) { + + this.fileLifetime = fileLifetime; + } + + public void setDeferredStartTime(Integer deferredStartTime) { + + this.deferredStartTime = deferredStartTime; + } + + public void setNumOfWaiting(Integer numOfWaiting) { + + this.numOfWaiting = numOfWaiting; + } + + public void setNumOfFailed(Integer numOfFailed) { + + this.numOfFailed = numOfFailed; + } + + public void setRemainingDeferredStartTime(Integer remainingDeferredStartTime) { + + this.remainingDeferredStartTime = remainingDeferredStartTime; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("RequestSummaryData [requestType="); + builder.append(requestType); + builder.append(", requestToken="); + builder.append(requestToken); + builder.append(", gu="); + builder.append(gu); + builder.append(", id="); + builder.append(id); + builder.append(", userToken="); + builder.append(userToken); + builder.append(", retrytime="); + builder.append(retrytime); + builder.append(", pinLifetime="); + builder.append(pinLifetime); + builder.append(", spaceToken="); + builder.append(spaceToken); + builder.append(", status="); + builder.append(status); + builder.append(", errstring="); + builder.append(errstring); + builder.append(", remainingTotalTime="); + builder.append(remainingTotalTime); + builder.append(", nbreqfiles="); + builder.append(nbreqfiles); + builder.append(", numOfCompleted="); + builder.append(numOfCompleted); + builder.append(", fileLifetime="); + builder.append(fileLifetime); + builder.append(", deferredStartTime="); + builder.append(deferredStartTime); + builder.append(", numOfWaiting="); + builder.append(numOfWaiting); + builder.append(", numOfFailed="); + builder.append(numOfFailed); + builder.append(", remainingDeferredStartTime="); + builder.append(remainingDeferredStartTime); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((deferredStartTime == null) ? 0 : deferredStartTime.hashCode()); + result = prime * result + ((errstring == null) ? 0 : errstring.hashCode()); + result = prime * result + ((fileLifetime == null) ? 0 : fileLifetime.hashCode()); + result = prime * result + ((gu == null) ? 0 : gu.hashCode()); + result = prime * result + (int) (id ^ (id >>> 32)); + result = prime * result + ((nbreqfiles == null) ? 0 : nbreqfiles.hashCode()); + result = prime * result + ((numOfCompleted == null) ? 0 : numOfCompleted.hashCode()); + result = prime * result + ((numOfFailed == null) ? 0 : numOfFailed.hashCode()); + result = prime * result + ((numOfWaiting == null) ? 0 : numOfWaiting.hashCode()); + result = prime * result + ((pinLifetime == null) ? 0 : pinLifetime.hashCode()); + result = + prime * result + + ((remainingDeferredStartTime == null) ? 0 : remainingDeferredStartTime.hashCode()); + result = prime * result + ((remainingTotalTime == null) ? 0 : remainingTotalTime.hashCode()); + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + result = prime * result + ((requestType == null) ? 0 : requestType.hashCode()); + result = prime * result + ((retrytime == null) ? 0 : retrytime.hashCode()); + result = prime * result + ((spaceToken == null) ? 0 : spaceToken.hashCode()); + result = prime * result + ((status == null) ? 0 : status.hashCode()); + result = prime * result + ((userToken == null) ? 0 : userToken.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + RequestSummaryData other = (RequestSummaryData) obj; + if (deferredStartTime == null) { + if (other.deferredStartTime != null) { + return false; + } + } else if (!deferredStartTime.equals(other.deferredStartTime)) { + return false; + } + if (errstring == null) { + if (other.errstring != null) { + return false; + } + } else if (!errstring.equals(other.errstring)) { + return false; + } + if (fileLifetime == null) { + if (other.fileLifetime != null) { + return false; + } + } else if (!fileLifetime.equals(other.fileLifetime)) { + return false; + } + if (gu == null) { + if (other.gu != null) { + return false; + } + } else if (!gu.equals(other.gu)) { + return false; + } + if (id != other.id) { + return false; + } + if (nbreqfiles == null) { + if (other.nbreqfiles != null) { + return false; + } + } else if (!nbreqfiles.equals(other.nbreqfiles)) { + return false; + } + if (numOfCompleted == null) { + if (other.numOfCompleted != null) { + return false; + } + } else if (!numOfCompleted.equals(other.numOfCompleted)) { + return false; + } + if (numOfFailed == null) { + if (other.numOfFailed != null) { + return false; + } + } else if (!numOfFailed.equals(other.numOfFailed)) { + return false; + } + if (numOfWaiting == null) { + if (other.numOfWaiting != null) { + return false; + } + } else if (!numOfWaiting.equals(other.numOfWaiting)) { + return false; + } + if (pinLifetime == null) { + if (other.pinLifetime != null) { + return false; + } + } else if (!pinLifetime.equals(other.pinLifetime)) { + return false; + } + if (remainingDeferredStartTime == null) { + if (other.remainingDeferredStartTime != null) { + return false; + } + } else if (!remainingDeferredStartTime.equals(other.remainingDeferredStartTime)) { + return false; + } + if (remainingTotalTime == null) { + if (other.remainingTotalTime != null) { + return false; + } + } else if (!remainingTotalTime.equals(other.remainingTotalTime)) { + return false; + } + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + if (requestType != other.requestType) { + return false; + } + if (retrytime == null) { + if (other.retrytime != null) { + return false; + } + } else if (!retrytime.equals(other.retrytime)) { + return false; + } + if (spaceToken == null) { + if (other.spaceToken != null) { + return false; + } + } else if (!spaceToken.equals(other.spaceToken)) { + return false; + } + if (status == null) { + if (other.status != null) { + return false; + } + } else if (!status.equals(other.status)) { + return false; + } + if (userToken == null) { + if (other.userToken != null) { + return false; + } + } else if (!userToken.equals(other.userToken)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/catalogs/RequestSummaryDataTO.java b/src/main/java/it/grid/storm/catalogs/RequestSummaryDataTO.java index 84f7adeb..77aea1dc 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestSummaryDataTO.java +++ b/src/main/java/it/grid/storm/catalogs/RequestSummaryDataTO.java @@ -1,527 +1,477 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import java.sql.Timestamp; /** - * Class that represents data of an asynchrnous Request, regardless of whether - * it is a Put, Get or Copy, in the Persistence Layer: this is all raw data - * referring to the request proper, that is, String and primitive types. - * + * Class that represents data of an asynchrnous Request, regardless of whether it is a Put, Get or + * Copy, in the Persistence Layer: this is all raw data referring to the request proper, that is, + * String and primitive types. + * * @author EGRID ICTP * @version 2.0 * @date June 2005 */ public class RequestSummaryDataTO { - public static final String PTG_REQUEST_TYPE = "PTG"; - public static final String PTP_REQUEST_TYPE = "PTP"; - public static final String BOL_REQUEST_TYPE = "BOL"; - public static final String COPY_REQUEST_TYPE = "COP"; + public static final String PTG_REQUEST_TYPE = "PTG"; + public static final String PTP_REQUEST_TYPE = "PTP"; + public static final String BOL_REQUEST_TYPE = "BOL"; + public static final String COPY_REQUEST_TYPE = "COP"; - private long id = -1; // id of request in persistence - private String requestType = ""; // request type - private String requestToken = ""; // request token - private String clientDN = ""; // DN that issued request - private String vomsAttributes = ""; // String containing all VOMS attributes - private Timestamp timestamp = null; + private long id = -1; // id of request in persistence + private String requestType = ""; // request type + private String requestToken = ""; // request token + private String clientDN = ""; // DN that issued request + private String vomsAttributes = ""; // String containing all VOMS attributes + private Timestamp timestamp = null; - private boolean empty = true; - private String userToken = null; - private Integer retrytime = null; - private Integer pinLifetime = null; - private String spaceToken = null; - private Integer status = null; - private String errstring = null; - private Integer remainingTotalTime = null; - private Integer nbreqfiles = null; - private Integer numOfCompleted = null; - private Integer fileLifetime = null; - private Integer deferredStartTime = null; - private Integer numOfWaiting = null; - private Integer numOfFailed = null; - private Integer remainingDeferredStartTime = null; + private boolean empty = true; + private String userToken = null; + private Integer retrytime = null; + private Integer pinLifetime = null; + private String spaceToken = null; + private Integer status = null; + private String errstring = null; + private Integer remainingTotalTime = null; + private Integer nbreqfiles = null; + private Integer numOfCompleted = null; + private Integer fileLifetime = null; + private Integer deferredStartTime = null; + private Integer numOfWaiting = null; + private Integer numOfFailed = null; + private Integer remainingDeferredStartTime = null; - public boolean isEmpty() { + public boolean isEmpty() { - return empty; - } + return empty; + } - public long primaryKey() { + public long primaryKey() { - return id; - } + return id; + } - public void setPrimaryKey(long l) { + public void setPrimaryKey(long l) { - empty = false; - id = l; - } + empty = false; + id = l; + } - public String requestType() { + public String requestType() { - return requestType; - } + return requestType; + } - public void setRequestType(String s) { + public void setRequestType(String s) { - empty = false; - requestType = s; - } + empty = false; + requestType = s; + } - public String requestToken() { + public String requestToken() { - return requestToken; - } + return requestToken; + } - public void setRequestToken(String s) { + public void setRequestToken(String s) { - empty = false; - requestToken = s; - } + empty = false; + requestToken = s; + } - public String clientDN() { + public String clientDN() { - return clientDN; - } + return clientDN; + } - public void setClientDN(String s) { + public void setClientDN(String s) { - empty = false; - clientDN = s; - } + empty = false; + clientDN = s; + } - public String vomsAttributes() { + public String vomsAttributes() { - return vomsAttributes; - } + return vomsAttributes; + } - public void setVomsAttributes(String s) { + public void setVomsAttributes(String s) { - empty = false; - vomsAttributes = s; - } + empty = false; + vomsAttributes = s; + } - public Timestamp timestamp() { + public Timestamp timestamp() { - return timestamp; - } + return timestamp; + } - public void setTimestamp(Timestamp timestamp) { + public void setTimestamp(Timestamp timestamp) { - empty = false; - this.timestamp = timestamp; - } + empty = false; + this.timestamp = timestamp; + } - /** - * @return the userToken - */ - public String getUserToken() { + /** @return the userToken */ + public String getUserToken() { - return userToken; - } + return userToken; + } - /** - * @return the retrytime - */ - public Integer getRetrytime() { + /** @return the retrytime */ + public Integer getRetrytime() { - return retrytime; - } + return retrytime; + } - /** - * @return the pinLifetime - */ - public Integer getPinLifetime() { + /** @return the pinLifetime */ + public Integer getPinLifetime() { - return pinLifetime; - } + return pinLifetime; + } - /** - * @return the spaceToken - */ - public String getSpaceToken() { + /** @return the spaceToken */ + public String getSpaceToken() { - return spaceToken; - } + return spaceToken; + } - /** - * @return the status - */ - public Integer getStatus() { + /** @return the status */ + public Integer getStatus() { - return status; - } + return status; + } - /** - * @return the errstring - */ - public String getErrstring() { + /** @return the errstring */ + public String getErrstring() { - return errstring; - } + return errstring; + } - /** - * @return the remainingTotalTime - */ - public Integer getRemainingTotalTime() { + /** @return the remainingTotalTime */ + public Integer getRemainingTotalTime() { - return remainingTotalTime; - } + return remainingTotalTime; + } - /** - * @return the nbreqfiles - */ - public Integer getNbreqfiles() { + /** @return the nbreqfiles */ + public Integer getNbreqfiles() { - return nbreqfiles; - } + return nbreqfiles; + } - /** - * @return the numOfCompleted - */ - public Integer getNumOfCompleted() { + /** @return the numOfCompleted */ + public Integer getNumOfCompleted() { - return numOfCompleted; - } + return numOfCompleted; + } - /** - * @return the fileLifetime - */ - public Integer getFileLifetime() { + /** @return the fileLifetime */ + public Integer getFileLifetime() { - return fileLifetime; - } + return fileLifetime; + } - /** - * @return the deferredStartTime - */ - public Integer getDeferredStartTime() { + /** @return the deferredStartTime */ + public Integer getDeferredStartTime() { - return deferredStartTime; - } + return deferredStartTime; + } - /** - * @return the numOfWaiting - */ - public Integer getNumOfWaiting() { + /** @return the numOfWaiting */ + public Integer getNumOfWaiting() { - return numOfWaiting; - } + return numOfWaiting; + } - /** - * @return the numOfFailed - */ - public Integer getNumOfFailed() { + /** @return the numOfFailed */ + public Integer getNumOfFailed() { - return numOfFailed; - } + return numOfFailed; + } - /** - * @return the remainingDeferredStartTime - */ - public Integer getRemainingDeferredStartTime() { + /** @return the remainingDeferredStartTime */ + public Integer getRemainingDeferredStartTime() { - return remainingDeferredStartTime; - } + return remainingDeferredStartTime; + } - public void setUserToken(String userToken) { + public void setUserToken(String userToken) { - this.userToken = userToken; - } + this.userToken = userToken; + } - public void setRetrytime(Integer retrytime) { + public void setRetrytime(Integer retrytime) { - this.retrytime = retrytime; + this.retrytime = retrytime; + } - } + public void setPinLifetime(Integer pinLifetime) { - public void setPinLifetime(Integer pinLifetime) { + this.pinLifetime = pinLifetime; + } - this.pinLifetime = pinLifetime; + public void setSpaceToken(String spaceToken) { - } + this.spaceToken = spaceToken; + } - public void setSpaceToken(String spaceToken) { + public void setStatus(Integer status) { - this.spaceToken = spaceToken; + this.status = status; + } - } + public void setErrstring(String errstring) { - public void setStatus(Integer status) { + this.errstring = errstring; + } - this.status = status; + public void setRemainingTotalTime(Integer remainingTotalTime) { - } + this.remainingTotalTime = remainingTotalTime; + } - public void setErrstring(String errstring) { + public void setNbreqfiles(Integer nbreqfiles) { - this.errstring = errstring; + this.nbreqfiles = nbreqfiles; + } - } + public void setNumOfCompleted(Integer numOfCompleted) { - public void setRemainingTotalTime(Integer remainingTotalTime) { - - this.remainingTotalTime = remainingTotalTime; - - } - - public void setNbreqfiles(Integer nbreqfiles) { - - this.nbreqfiles = nbreqfiles; - - } - - public void setNumOfCompleted(Integer numOfCompleted) { - - this.numOfCompleted = numOfCompleted; - - } - - public void setFileLifetime(Integer fileLifetime) { - - this.fileLifetime = fileLifetime; - - } - - public void setDeferredStartTime(Integer deferredStartTime) { - - this.deferredStartTime = deferredStartTime; - - } - - public void setNumOfWaiting(Integer numOfWaiting) { - - this.numOfWaiting = numOfWaiting; - - } - - public void setNumOfFailed(Integer numOfFailed) { - - this.numOfFailed = numOfFailed; - - } - - public void setRemainingDeferredStartTime(Integer remainingDeferredStartTime) { - - this.remainingDeferredStartTime = remainingDeferredStartTime; - - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("RequestSummaryDataTO [id="); - builder.append(id); - builder.append(", requestType="); - builder.append(requestType); - builder.append(", requestToken="); - builder.append(requestToken); - builder.append(", clientDN="); - builder.append(clientDN); - builder.append(", vomsAttributes="); - builder.append(vomsAttributes); - builder.append(", timestamp="); - builder.append(timestamp); - builder.append(", empty="); - builder.append(empty); - builder.append(", userToken="); - builder.append(userToken); - builder.append(", retrytime="); - builder.append(retrytime); - builder.append(", pinLifetime="); - builder.append(pinLifetime); - builder.append(", spaceToken="); - builder.append(spaceToken); - builder.append(", status="); - builder.append(status); - builder.append(", errstring="); - builder.append(errstring); - builder.append(", remainingTotalTime="); - builder.append(remainingTotalTime); - builder.append(", nbreqfiles="); - builder.append(nbreqfiles); - builder.append(", numOfCompleted="); - builder.append(numOfCompleted); - builder.append(", fileLifetime="); - builder.append(fileLifetime); - builder.append(", deferredStartTime="); - builder.append(deferredStartTime); - builder.append(", numOfWaiting="); - builder.append(numOfWaiting); - builder.append(", numOfFailed="); - builder.append(numOfFailed); - builder.append(", remainingDeferredStartTime="); - builder.append(remainingDeferredStartTime); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + ((clientDN == null) ? 0 : clientDN.hashCode()); - result = prime * result - + (int) (deferredStartTime ^ (deferredStartTime >>> 32)); - result = prime * result + (empty ? 1231 : 1237); - result = prime * result + ((errstring == null) ? 0 : errstring.hashCode()); - result = prime * result + (int) (fileLifetime ^ (fileLifetime >>> 32)); - result = prime * result + (int) (id ^ (id >>> 32)); - result = prime * result + (int) (nbreqfiles ^ (nbreqfiles >>> 32)); - result = prime * result + (int) (numOfCompleted ^ (numOfCompleted >>> 32)); - result = prime * result + (int) (numOfFailed ^ (numOfFailed >>> 32)); - result = prime * result + (int) (numOfWaiting ^ (numOfWaiting >>> 32)); - result = prime * result + (int) (pinLifetime ^ (pinLifetime >>> 32)); - result = prime - * result - + (int) (remainingDeferredStartTime ^ (remainingDeferredStartTime >>> 32)); - result = prime * result - + (int) (remainingTotalTime ^ (remainingTotalTime >>> 32)); - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - result = prime * result - + ((requestType == null) ? 0 : requestType.hashCode()); - result = prime * result + (int) (retrytime ^ (retrytime >>> 32)); - result = prime * result - + ((spaceToken == null) ? 0 : spaceToken.hashCode()); - result = prime * result + (int) (status ^ (status >>> 32)); - result = prime * result + ((timestamp == null) ? 0 : timestamp.hashCode()); - result = prime * result + ((userToken == null) ? 0 : userToken.hashCode()); - result = prime * result - + ((vomsAttributes == null) ? 0 : vomsAttributes.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RequestSummaryDataTO other = (RequestSummaryDataTO) obj; - if (clientDN == null) { - if (other.clientDN != null) { - return false; - } - } else if (!clientDN.equals(other.clientDN)) { - return false; - } - if (deferredStartTime != other.deferredStartTime) { - return false; - } - if (empty != other.empty) { - return false; - } - if (errstring == null) { - if (other.errstring != null) { - return false; - } - } else if (!errstring.equals(other.errstring)) { - return false; - } - if (fileLifetime != other.fileLifetime) { - return false; - } - if (id != other.id) { - return false; - } - if (nbreqfiles != other.nbreqfiles) { - return false; - } - if (numOfCompleted != other.numOfCompleted) { - return false; - } - if (numOfFailed != other.numOfFailed) { - return false; - } - if (numOfWaiting != other.numOfWaiting) { - return false; - } - if (pinLifetime != other.pinLifetime) { - return false; - } - if (remainingDeferredStartTime != other.remainingDeferredStartTime) { - return false; - } - if (remainingTotalTime != other.remainingTotalTime) { - return false; - } - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - if (requestType == null) { - if (other.requestType != null) { - return false; - } - } else if (!requestType.equals(other.requestType)) { - return false; - } - if (retrytime != other.retrytime) { - return false; - } - if (spaceToken == null) { - if (other.spaceToken != null) { - return false; - } - } else if (!spaceToken.equals(other.spaceToken)) { - return false; - } - if (status != other.status) { - return false; - } - if (timestamp == null) { - if (other.timestamp != null) { - return false; - } - } else if (!timestamp.equals(other.timestamp)) { - return false; - } - if (userToken == null) { - if (other.userToken != null) { - return false; - } - } else if (!userToken.equals(other.userToken)) { - return false; - } - if (vomsAttributes == null) { - if (other.vomsAttributes != null) { - return false; - } - } else if (!vomsAttributes.equals(other.vomsAttributes)) { - return false; - } - return true; - } + this.numOfCompleted = numOfCompleted; + } + public void setFileLifetime(Integer fileLifetime) { + + this.fileLifetime = fileLifetime; + } + + public void setDeferredStartTime(Integer deferredStartTime) { + + this.deferredStartTime = deferredStartTime; + } + + public void setNumOfWaiting(Integer numOfWaiting) { + + this.numOfWaiting = numOfWaiting; + } + + public void setNumOfFailed(Integer numOfFailed) { + + this.numOfFailed = numOfFailed; + } + + public void setRemainingDeferredStartTime(Integer remainingDeferredStartTime) { + + this.remainingDeferredStartTime = remainingDeferredStartTime; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("RequestSummaryDataTO [id="); + builder.append(id); + builder.append(", requestType="); + builder.append(requestType); + builder.append(", requestToken="); + builder.append(requestToken); + builder.append(", clientDN="); + builder.append(clientDN); + builder.append(", vomsAttributes="); + builder.append(vomsAttributes); + builder.append(", timestamp="); + builder.append(timestamp); + builder.append(", empty="); + builder.append(empty); + builder.append(", userToken="); + builder.append(userToken); + builder.append(", retrytime="); + builder.append(retrytime); + builder.append(", pinLifetime="); + builder.append(pinLifetime); + builder.append(", spaceToken="); + builder.append(spaceToken); + builder.append(", status="); + builder.append(status); + builder.append(", errstring="); + builder.append(errstring); + builder.append(", remainingTotalTime="); + builder.append(remainingTotalTime); + builder.append(", nbreqfiles="); + builder.append(nbreqfiles); + builder.append(", numOfCompleted="); + builder.append(numOfCompleted); + builder.append(", fileLifetime="); + builder.append(fileLifetime); + builder.append(", deferredStartTime="); + builder.append(deferredStartTime); + builder.append(", numOfWaiting="); + builder.append(numOfWaiting); + builder.append(", numOfFailed="); + builder.append(numOfFailed); + builder.append(", remainingDeferredStartTime="); + builder.append(remainingDeferredStartTime); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((clientDN == null) ? 0 : clientDN.hashCode()); + result = prime * result + (int) (deferredStartTime ^ (deferredStartTime >>> 32)); + result = prime * result + (empty ? 1231 : 1237); + result = prime * result + ((errstring == null) ? 0 : errstring.hashCode()); + result = prime * result + (int) (fileLifetime ^ (fileLifetime >>> 32)); + result = prime * result + (int) (id ^ (id >>> 32)); + result = prime * result + (int) (nbreqfiles ^ (nbreqfiles >>> 32)); + result = prime * result + (int) (numOfCompleted ^ (numOfCompleted >>> 32)); + result = prime * result + (int) (numOfFailed ^ (numOfFailed >>> 32)); + result = prime * result + (int) (numOfWaiting ^ (numOfWaiting >>> 32)); + result = prime * result + (int) (pinLifetime ^ (pinLifetime >>> 32)); + result = + prime * result + (int) (remainingDeferredStartTime ^ (remainingDeferredStartTime >>> 32)); + result = prime * result + (int) (remainingTotalTime ^ (remainingTotalTime >>> 32)); + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + result = prime * result + ((requestType == null) ? 0 : requestType.hashCode()); + result = prime * result + (int) (retrytime ^ (retrytime >>> 32)); + result = prime * result + ((spaceToken == null) ? 0 : spaceToken.hashCode()); + result = prime * result + (int) (status ^ (status >>> 32)); + result = prime * result + ((timestamp == null) ? 0 : timestamp.hashCode()); + result = prime * result + ((userToken == null) ? 0 : userToken.hashCode()); + result = prime * result + ((vomsAttributes == null) ? 0 : vomsAttributes.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + RequestSummaryDataTO other = (RequestSummaryDataTO) obj; + if (clientDN == null) { + if (other.clientDN != null) { + return false; + } + } else if (!clientDN.equals(other.clientDN)) { + return false; + } + if (deferredStartTime != other.deferredStartTime) { + return false; + } + if (empty != other.empty) { + return false; + } + if (errstring == null) { + if (other.errstring != null) { + return false; + } + } else if (!errstring.equals(other.errstring)) { + return false; + } + if (fileLifetime != other.fileLifetime) { + return false; + } + if (id != other.id) { + return false; + } + if (nbreqfiles != other.nbreqfiles) { + return false; + } + if (numOfCompleted != other.numOfCompleted) { + return false; + } + if (numOfFailed != other.numOfFailed) { + return false; + } + if (numOfWaiting != other.numOfWaiting) { + return false; + } + if (pinLifetime != other.pinLifetime) { + return false; + } + if (remainingDeferredStartTime != other.remainingDeferredStartTime) { + return false; + } + if (remainingTotalTime != other.remainingTotalTime) { + return false; + } + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + if (requestType == null) { + if (other.requestType != null) { + return false; + } + } else if (!requestType.equals(other.requestType)) { + return false; + } + if (retrytime != other.retrytime) { + return false; + } + if (spaceToken == null) { + if (other.spaceToken != null) { + return false; + } + } else if (!spaceToken.equals(other.spaceToken)) { + return false; + } + if (status != other.status) { + return false; + } + if (timestamp == null) { + if (other.timestamp != null) { + return false; + } + } else if (!timestamp.equals(other.timestamp)) { + return false; + } + if (userToken == null) { + if (other.userToken != null) { + return false; + } + } else if (!userToken.equals(other.userToken)) { + return false; + } + if (vomsAttributes == null) { + if (other.vomsAttributes != null) { + return false; + } + } else if (!vomsAttributes.equals(other.vomsAttributes)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/catalogs/RequestTypeConverter.java b/src/main/java/it/grid/storm/catalogs/RequestTypeConverter.java index aaaea597..0b3a10dd 100644 --- a/src/main/java/it/grid/storm/catalogs/RequestTypeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/RequestTypeConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -14,11 +13,9 @@ import static it.grid.storm.srm.types.TRequestType.PREPARE_TO_GET; import static it.grid.storm.srm.types.TRequestType.PREPARE_TO_PUT; -import java.util.Map; - import com.google.common.collect.Maps; - import it.grid.storm.srm.types.TRequestType; +import java.util.Map; /** * Package private auxiliary class used to convert between DB and StoRM object model representation @@ -40,9 +37,7 @@ private RequestTypeConverter() { dbToStorm.keySet().forEach(key -> stormToDb.put(dbToStorm.get(key), key)); } - /** - * Method that returns the only instance of RequestTypeConverter. - */ + /** Method that returns the only instance of RequestTypeConverter. */ public static RequestTypeConverter getInstance() { return c; @@ -55,8 +50,7 @@ public static RequestTypeConverter getInstance() { public String toDB(TRequestType rt) { String aux = stormToDb.get(rt); - if (aux == null) - return ""; + if (aux == null) return ""; return aux; } @@ -67,8 +61,7 @@ public String toDB(TRequestType rt) { public TRequestType toSTORM(String s) { TRequestType aux = dbToStorm.get(s); - if (aux == null) - return EMPTY; + if (aux == null) return EMPTY; return aux; } } diff --git a/src/main/java/it/grid/storm/catalogs/ReservedSpaceCatalog.java b/src/main/java/it/grid/storm/catalogs/ReservedSpaceCatalog.java index d1664cda..b5ec2d07 100644 --- a/src/main/java/it/grid/storm/catalogs/ReservedSpaceCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/ReservedSpaceCatalog.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * ReservedSpaceCatalog @@ -8,6 +7,7 @@ package it.grid.storm.catalogs; +import com.google.common.collect.Lists; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.persistence.DAOFactory; import it.grid.storm.persistence.PersistenceDirector; @@ -19,7 +19,6 @@ import it.grid.storm.srm.types.ArrayOfTSpaceToken; import it.grid.storm.srm.types.InvalidTSpaceTokenAttributesException; import it.grid.storm.srm.types.TSpaceToken; - import java.io.File; import java.util.Calendar; import java.util.Collection; @@ -29,657 +28,618 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - -/** - * - */ - +/** */ public class ReservedSpaceCatalog { - private static final Logger log = LoggerFactory - .getLogger(ReservedSpaceCatalog.class); - private static HashSet voSA_spaceTokenSet = new HashSet(); - private static HashMap voSA_UpdateTime = new HashMap(); - - private static final long NOT_INITIALIZED_SIZE_VALUE = -1L; - - private final DAOFactory daoFactory; - private StorageSpaceDAO ssDAO; - - /********************************************* - * STATIC METHODS - *********************************************/ - public static void addSpaceToken(TSpaceToken token) { - - voSA_spaceTokenSet.add(token); - voSA_UpdateTime.put(token, null); - } - - public static HashSet getTokenSet() { - - return voSA_spaceTokenSet; - } - - public static void clearTokenSet() { - - voSA_spaceTokenSet.clear(); - voSA_UpdateTime.clear(); - } - - public static void setUpdateTime(TSpaceToken token, Date updateTime) { - - if (voSA_UpdateTime.containsKey(token)) { - voSA_UpdateTime.put(token, updateTime); - } else { - log.warn("Failing while Trying to set update time in Catalog cache."); - } - } - - public static Date getUpdateTime(TSpaceToken token) { - - Date result = null; - if (voSA_UpdateTime.containsKey(token)) { - result = voSA_UpdateTime.get(token); - } else { - log.warn("Failing while Trying to set update time in Catalog cache."); - } - return result; - } - - /********************************************* - * CLASS METHODS - *********************************************/ - /** - * Default constructor - */ - public ReservedSpaceCatalog() { - - log.debug("Building Reserve Space Catalog..."); - // Binding to the persistence component - daoFactory = PersistenceDirector.getDAOFactory(); - } - - /** - * Basic method used to retrieve all the information about a StorageSpace - - * StorageSpace is selected by SpaceToken - * - * @param spaceToken - * TSpaceToken - * @return StorageSpaceData, null if no-one SS exists with the specified - * spaceToken - * @throws DataAccessException - */ - public StorageSpaceData getStorageSpace(TSpaceToken spaceToken) - throws TransferObjectDecodingException, DataAccessException { - - StorageSpaceData result = null; - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - StorageSpaceTO ssTO = ssDAO.getStorageSpaceByToken(spaceToken.getValue()); - log.debug("Storage Space retrieved by Token. "); - if (ssTO != null) { - try { - result = new StorageSpaceData(ssTO); - } catch (IllegalArgumentException e) { - log.error("Error building StorageSpaceData from StorageSpaceTO " - + "IllegalArgumentException: {}", e.getLocalizedMessage(), e); - throw new TransferObjectDecodingException( - "Unable to build StorageSpaceData from StorageSpaceTO"); - } - } else { - log.info("Unable to build StorageSpaceData. No StorageSpaceTO built " - + "from the DB"); - } - return result; - } - - /** - * Create a new StorageSpace entry into the DB. It is used for - STATIC Space - * Creation - DYNAMIC Space Reservation - * - * @param ssd - * @throws NoDataFoundException - * @throws InvalidRetrievedDataException - * @throws MultipleDataEntriesException - */ - public void addStorageSpace(StorageSpaceData ssd) throws DataAccessException { - - log.debug("ADD StorageSpace Start..."); - StorageSpaceTO ssTO = new StorageSpaceTO(ssd); - log.debug("Storage Space TO Created"); - ssTO.setUpdateTime(new Date()); - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - ssDAO.addStorageSpace(ssTO); - log.debug("StorageSpaceTO inserted in Persistence"); - } - - /** - * @param ssd - * @param updateTime - * - * @throws DataAccessException - */ - public void updateStorageSpace(StorageSpaceData ssd) throws DataAccessException { - - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - - StorageSpaceTO ssTO = new StorageSpaceTO(ssd); - ssTO.setCreated(null); // we don't want to update the creation timestamp - ssTO.setUpdateTime(new Date()); - - ssDAO.updateStorageSpace(ssTO); - log.debug("StorageSpaceTO updated in Persistence"); - } - - /** - * @param ssd - */ - public void updateStorageSpaceFreeSpace(StorageSpaceData ssd) - throws DataAccessException { - - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - StorageSpaceTO ssTO = new StorageSpaceTO(ssd); - log.debug("Storage Space TO Created"); - ssTO.setUpdateTime(new Date()); - ssDAO.updateStorageSpaceFreeSpace(ssTO); - log.debug("StorageSpaceTO updated in Persistence"); - - } - - /** - * @param ssd - * @throws NoDataFoundException - * @throws InvalidRetrievedDataException - * @throws MultipleDataEntriesException - */ - public void updateAllStorageSpace(StorageSpaceData ssd) - throws NoDataFoundException, InvalidRetrievedDataException, - MultipleDataEntriesException { - - updateAllStorageSpace(ssd, null); - } - - /** - * Update StorageSpace. This method is used to update the StorageSpace into - * the ReserveSpace Catalog. The update operation take place after a - * AbortRequest for a PrepareToPut operation done with the spaceToken.(With or - * without the size specified). - */ - - public void updateAllStorageSpace(StorageSpaceData ssd, Date updateTime) - throws NoDataFoundException, InvalidRetrievedDataException, - MultipleDataEntriesException { - - log.debug("UPDATE StorageSpace Start..."); - // Build StorageSpaceTO from SpaceData - StorageSpaceTO ssTO = new StorageSpaceTO(ssd); - log.debug("Storage Space TO Created"); - if (updateTime == null) { - // The update time of the information is now - ssTO.setUpdateTime(new Date()); - } else { - ssTO.setUpdateTime(updateTime); - } - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - - // Add the row to the persistence.. - try { - ssDAO.updateAllStorageSpace(ssTO); - log.debug("StorageSpaceTO updated in Persistence"); - } catch (DataAccessException daEx) { - log.error("Error while inserting new row in StorageSpace: {}", - daEx.getMessage(), daEx); - } - } - - /** - * @param desc - * @return - */ - public StorageSpaceData getStorageSpaceByAlias(String desc) { - - StorageSpaceData result = null; // new StorageSpaceData(); - log.debug("Retrieve Storage Space start... "); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - - // Get StorageSpaceTO form persistence - try { - Collection cl = ssDAO.getStorageSpaceByAliasOnly(desc); - if (cl != null && !cl.isEmpty()) { - log.debug("Storage Space retrieved by Token. "); - // Build the result - try { - result = new StorageSpaceData(cl.toArray(new StorageSpaceTO[0])[0]); - } catch (IllegalArgumentException e) { - log.error("unable to build StorageSpaceData from StorageSpaceTO " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - } - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpace: {}", daEx.getMessage(), - daEx); - } - - return result; - } - - /** - * Provides a list of storage spaces not initialized by comparing the used - * space stored against the well know not initialized value - * NOT_INITIALIZED_SIZE_VALUE - * - * @return SpaceData - */ - public List getStorageSpaceNotInitialized() { - - log.debug("Retrieve Storage Space not initialized start "); - List result = Lists.newLinkedList(); - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - // Get StorageSpaceTO form persistence - try { - Collection storagesSpaceTOCollection = ssDAO - .getStorageSpaceByUnavailableUsedSpace(NOT_INITIALIZED_SIZE_VALUE); - log.debug("Storage Space retrieved by not initialized used space. "); - for (StorageSpaceTO storagesSpaceTO : storagesSpaceTOCollection) { - if (storagesSpaceTO != null) { - try { - result.add(new StorageSpaceData(storagesSpaceTO)); - } catch (IllegalArgumentException e) { - log.error("unable to build StorageSpaceData from StorageSpaceTO. " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - } else { - log.warn("Received a collection of StorageSpaceTO containing null " - + "elements, skipping them"); - } - } - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpace", daEx); - } - return result; - } - - /** - * Provides a list of storage spaces not updated since the provided timestamp - * - * @param lastUpdateTimestamp - * @return - */ - - public List getStorageSpaceByLastUpdate( - Date lastUpdateTimestamp) { - - log.debug("Retrieve Storage Space not initialized start "); - LinkedList result = new LinkedList(); - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - // GetStorageSpaceTO form persistence - try { - Collection storagesSpaceTOCollection = ssDAO - .getStorageSpaceByPreviousLastUpdate(lastUpdateTimestamp); - log.debug("Storage Space retrieved by Token previous last update. "); - for (StorageSpaceTO storagesSpaceTO : storagesSpaceTOCollection) { - if (storagesSpaceTO != null) { - try { - result.add(new StorageSpaceData(storagesSpaceTO)); - } catch (IllegalArgumentException e) { - log.error("unable to build StorageSpaceData from StorageSpaceTO " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - } else { - log.warn("Received a collection of StorageSpaceTO containing null " - + "elements, skipping them"); - } - } - } catch (DataAccessException daEx) { - log.debug("Error while retrieving StorageSpace: {}", daEx.getMessage(), - daEx); - } - return result; - } - - /** - * - * @param user - * VomsGridUser - * @param spaceAlias - * String - * @return ArrayOfTSpaceToken - */ - public ArrayOfTSpaceToken getSpaceTokens(GridUserInterface user, - String spaceAlias) { - - ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); - - log.debug("Retrieving space tokens..."); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - - // Get StorageSpaceTO form persistence - try { - - Collection listOfStorageSpace = ssDAO.getStorageSpaceByOwner(user, - spaceAlias); - - int nItems = listOfStorageSpace.size(); - log.debug("getSpaceTokens : Number of Storage spaces retrieved with " - + "Alias '{}': {}", spaceAlias, nItems); - Iterator j_ssTO = listOfStorageSpace.iterator(); - - while (j_ssTO.hasNext()) { - StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); - try { - TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); - result.addTSpaceToken(spaceToken); - } catch (InvalidTSpaceTokenAttributesException ex2) { - log.error("Retrieved invalid Space token from DB"); - } - } - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), - daEx); - } catch (Exception e) { - log.error("Exception while retrieving Storage Space: {}", e.getMessage(), - e); - } - return result; - } - - /** - * This method is used for the VOspaceArea Check. - * - * @param spaceAlias - * @return - */ - - public ArrayOfTSpaceToken getSpaceTokensByAlias(String spaceAlias) { - - ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); - - log.debug("Retrieving space tokens..."); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - - // Get StorageSpaceTO form persistence - try { - Collection listOfStorageSpace = ssDAO - .getStorageSpaceByAliasOnly(spaceAlias); - - int nItems = listOfStorageSpace.size(); - log.debug("Number of Storage spaces retrieved: {}", nItems); - Iterator j_ssTO = listOfStorageSpace.iterator(); - - while (j_ssTO.hasNext()) { - StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); - try { - TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); - result.addTSpaceToken(spaceToken); - } catch (InvalidTSpaceTokenAttributesException ex2) { - log.error("Retrieved invalid Space token from DB"); - } - } - - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), - daEx); - } catch (Exception e) { - log.error("Error getting data! Error: {}", e.getMessage(), e); - } - return result; - } - - /** - * This method is used for the VOspaceArea Check. - * - * @param VOname - * @return - */ - - public ArrayOfTSpaceToken getSpaceTokensBySpaceType(String stype) { - - ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); - - log.debug("Retrieving space tokens..."); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - - // Get StorageSpaceTO form persistence - try { - Collection listOfStorageSpace = ssDAO.getStorageSpaceBySpaceType(stype); - - int nItems = listOfStorageSpace.size(); - log.debug("Number of Storage spaces retrieved: {}", nItems); - Iterator j_ssTO = listOfStorageSpace.iterator(); - - while (j_ssTO.hasNext()) { - StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); - try { - TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); - result.addTSpaceToken(spaceToken); - } catch (InvalidTSpaceTokenAttributesException ex2) { - log.error("Retrieved invalid Space token from DB"); - } - } - - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); - } catch (Exception e) { - log.error("Generic Error while retrieving StorageSpace: {}", e.getMessage(), e); - } - return result; - } - - // ************************ CHECH BELOW METHODS *************************** - - /** - * - * @param user - * GridUserInterface - * @param spaceToken - * TSpaceToken - * @return boolean - */ - public boolean release(GridUserInterface user, final TSpaceToken spaceToken) { - - log.debug("Delete storage spaceToken info from persistence: {}", spaceToken); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", - daEx.getMessage(), daEx); - } - boolean rowRemoved = true; - // Delete the row from persistence. - try { - ssDAO.removeStorageSpace(user, spaceToken.getValue()); - log.debug("spaceToken removed from DB."); - } catch (DataAccessException daEx) { - log.error("spaceToken not found in the DB: {}", spaceToken.getValue()); - rowRemoved = false; - } - return rowRemoved; - } - - /** - * Method that purges the catalog, removing expired space reservation. The - * spacefile with lifetime expired are removed from the file systems. - * - */ - public void purge() { - - log.debug("Space Garbage Collector start!"); - Calendar rightNow = Calendar.getInstance(); - - // Retrieve the Data Access Object from the factory - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), - daEx); - } - // Get the Collection of Space Resrvation Expired - Collection expiredSpaceTO; - try { - expiredSpaceTO = ssDAO.getExpired(rightNow.getTimeInMillis() / 1000); - } catch (DataAccessException e) { - // No space expired FOUND - log.debug("Space Garbage Collector: no space expired found."); - return; - } - - // For each entry expired - // 1) Delete the related space file - // 2) Remove the entry from the DB - - StorageSpaceTO spaceTO = null; - log.debug("Space Garbage Collector: Number of SpaceFile to remove {}.", - expiredSpaceTO.size()); - - for (Iterator i = expiredSpaceTO.iterator(); i.hasNext();) { - spaceTO = (StorageSpaceTO) i.next(); - // Deleteing space File - String spaceFileName = spaceTO.getSpaceFile(); - File sfile = new File(spaceFileName); - log.debug("Space Garbage Collector: SpaceFile to remove {}.", spaceFileName); - - if (sfile.delete()) { - log.debug("Space Garbage Collector: SpaceFile {} removed.", spaceFileName); - } else { - log.warn("Space Garbage Collector: problem removing {}", spaceFileName); - } - - // Removing space entry from the DB - try { - ssDAO.removeStorageSpace(spaceTO.getSpaceToken()); - } catch (DataAccessException e) { - log.warn("Space Garbage Collector: error removing space entry from catalog."); - } - - } - - } - - public boolean increaseUsedSpace(String spaceToken, Long usedSpaceToAdd) { - - log.debug("Increase {} the used space of storage spaceToken: {}", - usedSpaceToAdd, spaceToken); - - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", - daEx.getMessage(), daEx); - return false; - } - int n = 0; - try { - n = ssDAO.increaseUsedSpace(spaceToken, usedSpaceToAdd); - } catch (DataAccessException daEx) { - log.error( - "Error during the increase of used space for spaceToken {}: {}", - spaceToken, daEx.getMessage()); - return false; - } - if (n == 0) { - log.warn( - "No errors caught but it seems no used space updates done on space token {}", - spaceToken); - } - log.debug("{} increaseUsedSpace += {}", spaceToken, usedSpaceToAdd); - return n > 0; - } - - public boolean decreaseUsedSpace(String spaceToken, Long usedSpaceToRemove) { - - log.debug("Decrease {} the used space of storage spaceToken: {}", - usedSpaceToRemove, spaceToken); - - try { - ssDAO = daoFactory.getStorageSpaceDAO(); - log.debug("Storage Space DAO retrieved."); - } catch (DataAccessException daEx) { - log.error("Error while retrieving StorageSpaceDAO: {}", - daEx.getMessage(), daEx); - return false; - } - int n = 0; - try { - n = ssDAO.decreaseUsedSpace(spaceToken, usedSpaceToRemove); - } catch (DataAccessException daEx) { - log.error( - "Error during the decrease of used space for spaceToken {}: {}", - spaceToken, daEx.getMessage()); - return false; - } - if (n == 0) { - log.warn( - "No errors caught but it seems no used space updates done on space token {}", - spaceToken); - } - log.debug("{} decreaseUsedSpace -= {}", spaceToken, usedSpaceToRemove); - return n > 0; - } + private static final Logger log = LoggerFactory.getLogger(ReservedSpaceCatalog.class); + private static HashSet voSA_spaceTokenSet = new HashSet(); + private static HashMap voSA_UpdateTime = new HashMap(); + + private static final long NOT_INITIALIZED_SIZE_VALUE = -1L; + + private final DAOFactory daoFactory; + private StorageSpaceDAO ssDAO; + + /** + * ******************************************* STATIC METHODS + * ******************************************* + */ + public static void addSpaceToken(TSpaceToken token) { + + voSA_spaceTokenSet.add(token); + voSA_UpdateTime.put(token, null); + } + + public static HashSet getTokenSet() { + + return voSA_spaceTokenSet; + } + + public static void clearTokenSet() { + + voSA_spaceTokenSet.clear(); + voSA_UpdateTime.clear(); + } + + public static void setUpdateTime(TSpaceToken token, Date updateTime) { + + if (voSA_UpdateTime.containsKey(token)) { + voSA_UpdateTime.put(token, updateTime); + } else { + log.warn("Failing while Trying to set update time in Catalog cache."); + } + } + + public static Date getUpdateTime(TSpaceToken token) { + + Date result = null; + if (voSA_UpdateTime.containsKey(token)) { + result = voSA_UpdateTime.get(token); + } else { + log.warn("Failing while Trying to set update time in Catalog cache."); + } + return result; + } + + /** + * ******************************************* CLASS METHODS + * ******************************************* + */ + /** Default constructor */ + public ReservedSpaceCatalog() { + + log.debug("Building Reserve Space Catalog..."); + // Binding to the persistence component + daoFactory = PersistenceDirector.getDAOFactory(); + } + + /** + * Basic method used to retrieve all the information about a StorageSpace - StorageSpace is + * selected by SpaceToken + * + * @param spaceToken TSpaceToken + * @return StorageSpaceData, null if no-one SS exists with the specified spaceToken + * @throws DataAccessException + */ + public StorageSpaceData getStorageSpace(TSpaceToken spaceToken) + throws TransferObjectDecodingException, DataAccessException { + + StorageSpaceData result = null; + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + StorageSpaceTO ssTO = ssDAO.getStorageSpaceByToken(spaceToken.getValue()); + log.debug("Storage Space retrieved by Token. "); + if (ssTO != null) { + try { + result = new StorageSpaceData(ssTO); + } catch (IllegalArgumentException e) { + log.error( + "Error building StorageSpaceData from StorageSpaceTO " + "IllegalArgumentException: {}", + e.getLocalizedMessage(), + e); + throw new TransferObjectDecodingException( + "Unable to build StorageSpaceData from StorageSpaceTO"); + } + } else { + log.info("Unable to build StorageSpaceData. No StorageSpaceTO built " + "from the DB"); + } + return result; + } + + /** + * Create a new StorageSpace entry into the DB. It is used for - STATIC Space Creation - DYNAMIC + * Space Reservation + * + * @param ssd + * @throws NoDataFoundException + * @throws InvalidRetrievedDataException + * @throws MultipleDataEntriesException + */ + public void addStorageSpace(StorageSpaceData ssd) throws DataAccessException { + + log.debug("ADD StorageSpace Start..."); + StorageSpaceTO ssTO = new StorageSpaceTO(ssd); + log.debug("Storage Space TO Created"); + ssTO.setUpdateTime(new Date()); + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + ssDAO.addStorageSpace(ssTO); + log.debug("StorageSpaceTO inserted in Persistence"); + } + + /** + * @param ssd + * @param updateTime + * @throws DataAccessException + */ + public void updateStorageSpace(StorageSpaceData ssd) throws DataAccessException { + + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + + StorageSpaceTO ssTO = new StorageSpaceTO(ssd); + ssTO.setCreated(null); // we don't want to update the creation timestamp + ssTO.setUpdateTime(new Date()); + + ssDAO.updateStorageSpace(ssTO); + log.debug("StorageSpaceTO updated in Persistence"); + } + + /** @param ssd */ + public void updateStorageSpaceFreeSpace(StorageSpaceData ssd) throws DataAccessException { + + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + StorageSpaceTO ssTO = new StorageSpaceTO(ssd); + log.debug("Storage Space TO Created"); + ssTO.setUpdateTime(new Date()); + ssDAO.updateStorageSpaceFreeSpace(ssTO); + log.debug("StorageSpaceTO updated in Persistence"); + } + + /** + * @param ssd + * @throws NoDataFoundException + * @throws InvalidRetrievedDataException + * @throws MultipleDataEntriesException + */ + public void updateAllStorageSpace(StorageSpaceData ssd) + throws NoDataFoundException, InvalidRetrievedDataException, MultipleDataEntriesException { + + updateAllStorageSpace(ssd, null); + } + + /** + * Update StorageSpace. This method is used to update the StorageSpace into the ReserveSpace + * Catalog. The update operation take place after a AbortRequest for a PrepareToPut operation done + * with the spaceToken.(With or without the size specified). + */ + public void updateAllStorageSpace(StorageSpaceData ssd, Date updateTime) + throws NoDataFoundException, InvalidRetrievedDataException, MultipleDataEntriesException { + + log.debug("UPDATE StorageSpace Start..."); + // Build StorageSpaceTO from SpaceData + StorageSpaceTO ssTO = new StorageSpaceTO(ssd); + log.debug("Storage Space TO Created"); + if (updateTime == null) { + // The update time of the information is now + ssTO.setUpdateTime(new Date()); + } else { + ssTO.setUpdateTime(updateTime); + } + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + + // Add the row to the persistence.. + try { + ssDAO.updateAllStorageSpace(ssTO); + log.debug("StorageSpaceTO updated in Persistence"); + } catch (DataAccessException daEx) { + log.error("Error while inserting new row in StorageSpace: {}", daEx.getMessage(), daEx); + } + } + + /** + * @param desc + * @return + */ + public StorageSpaceData getStorageSpaceByAlias(String desc) { + + StorageSpaceData result = null; // new StorageSpaceData(); + log.debug("Retrieve Storage Space start... "); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + + // Get StorageSpaceTO form persistence + try { + Collection cl = ssDAO.getStorageSpaceByAliasOnly(desc); + if (cl != null && !cl.isEmpty()) { + log.debug("Storage Space retrieved by Token. "); + // Build the result + try { + result = new StorageSpaceData(cl.toArray(new StorageSpaceTO[0])[0]); + } catch (IllegalArgumentException e) { + log.error( + "unable to build StorageSpaceData from StorageSpaceTO " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + } + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); + } + + return result; + } + + /** + * Provides a list of storage spaces not initialized by comparing the used + * space stored against the well know not initialized value + * NOT_INITIALIZED_SIZE_VALUE + * + * @return SpaceData + */ + public List getStorageSpaceNotInitialized() { + + log.debug("Retrieve Storage Space not initialized start "); + List result = Lists.newLinkedList(); + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + // Get StorageSpaceTO form persistence + try { + Collection storagesSpaceTOCollection = + ssDAO.getStorageSpaceByUnavailableUsedSpace(NOT_INITIALIZED_SIZE_VALUE); + log.debug("Storage Space retrieved by not initialized used space. "); + for (StorageSpaceTO storagesSpaceTO : storagesSpaceTOCollection) { + if (storagesSpaceTO != null) { + try { + result.add(new StorageSpaceData(storagesSpaceTO)); + } catch (IllegalArgumentException e) { + log.error( + "unable to build StorageSpaceData from StorageSpaceTO. " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + } else { + log.warn( + "Received a collection of StorageSpaceTO containing null " + + "elements, skipping them"); + } + } + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpace", daEx); + } + return result; + } + + /** + * Provides a list of storage spaces not updated since the provided timestamp + * + * @param lastUpdateTimestamp + * @return + */ + public List getStorageSpaceByLastUpdate(Date lastUpdateTimestamp) { + + log.debug("Retrieve Storage Space not initialized start "); + LinkedList result = new LinkedList(); + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + // GetStorageSpaceTO form persistence + try { + Collection storagesSpaceTOCollection = + ssDAO.getStorageSpaceByPreviousLastUpdate(lastUpdateTimestamp); + log.debug("Storage Space retrieved by Token previous last update. "); + for (StorageSpaceTO storagesSpaceTO : storagesSpaceTOCollection) { + if (storagesSpaceTO != null) { + try { + result.add(new StorageSpaceData(storagesSpaceTO)); + } catch (IllegalArgumentException e) { + log.error( + "unable to build StorageSpaceData from StorageSpaceTO " + + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + } else { + log.warn( + "Received a collection of StorageSpaceTO containing null " + + "elements, skipping them"); + } + } + } catch (DataAccessException daEx) { + log.debug("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); + } + return result; + } + + /** + * @param user VomsGridUser + * @param spaceAlias String + * @return ArrayOfTSpaceToken + */ + public ArrayOfTSpaceToken getSpaceTokens(GridUserInterface user, String spaceAlias) { + + ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); + + log.debug("Retrieving space tokens..."); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + + // Get StorageSpaceTO form persistence + try { + + Collection listOfStorageSpace = ssDAO.getStorageSpaceByOwner(user, spaceAlias); + + int nItems = listOfStorageSpace.size(); + log.debug( + "getSpaceTokens : Number of Storage spaces retrieved with " + "Alias '{}': {}", + spaceAlias, + nItems); + Iterator j_ssTO = listOfStorageSpace.iterator(); + + while (j_ssTO.hasNext()) { + StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); + try { + TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); + result.addTSpaceToken(spaceToken); + } catch (InvalidTSpaceTokenAttributesException ex2) { + log.error("Retrieved invalid Space token from DB"); + } + } + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); + } catch (Exception e) { + log.error("Exception while retrieving Storage Space: {}", e.getMessage(), e); + } + return result; + } + + /** + * This method is used for the VOspaceArea Check. + * + * @param spaceAlias + * @return + */ + public ArrayOfTSpaceToken getSpaceTokensByAlias(String spaceAlias) { + + ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); + + log.debug("Retrieving space tokens..."); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + + // Get StorageSpaceTO form persistence + try { + Collection listOfStorageSpace = ssDAO.getStorageSpaceByAliasOnly(spaceAlias); + + int nItems = listOfStorageSpace.size(); + log.debug("Number of Storage spaces retrieved: {}", nItems); + Iterator j_ssTO = listOfStorageSpace.iterator(); + + while (j_ssTO.hasNext()) { + StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); + try { + TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); + result.addTSpaceToken(spaceToken); + } catch (InvalidTSpaceTokenAttributesException ex2) { + log.error("Retrieved invalid Space token from DB"); + } + } + + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); + } catch (Exception e) { + log.error("Error getting data! Error: {}", e.getMessage(), e); + } + return result; + } + + /** + * This method is used for the VOspaceArea Check. + * + * @param VOname + * @return + */ + public ArrayOfTSpaceToken getSpaceTokensBySpaceType(String stype) { + + ArrayOfTSpaceToken result = new ArrayOfTSpaceToken(); + + log.debug("Retrieving space tokens..."); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + + // Get StorageSpaceTO form persistence + try { + Collection listOfStorageSpace = ssDAO.getStorageSpaceBySpaceType(stype); + + int nItems = listOfStorageSpace.size(); + log.debug("Number of Storage spaces retrieved: {}", nItems); + Iterator j_ssTO = listOfStorageSpace.iterator(); + + while (j_ssTO.hasNext()) { + StorageSpaceTO ssTO = (StorageSpaceTO) j_ssTO.next(); + try { + TSpaceToken spaceToken = TSpaceToken.make(ssTO.getSpaceToken()); + result.addTSpaceToken(spaceToken); + } catch (InvalidTSpaceTokenAttributesException ex2) { + log.error("Retrieved invalid Space token from DB"); + } + } + + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpace: {}", daEx.getMessage(), daEx); + } catch (Exception e) { + log.error("Generic Error while retrieving StorageSpace: {}", e.getMessage(), e); + } + return result; + } + + // ************************ CHECH BELOW METHODS *************************** + + /** + * @param user GridUserInterface + * @param spaceToken TSpaceToken + * @return boolean + */ + public boolean release(GridUserInterface user, final TSpaceToken spaceToken) { + + log.debug("Delete storage spaceToken info from persistence: {}", spaceToken); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + boolean rowRemoved = true; + // Delete the row from persistence. + try { + ssDAO.removeStorageSpace(user, spaceToken.getValue()); + log.debug("spaceToken removed from DB."); + } catch (DataAccessException daEx) { + log.error("spaceToken not found in the DB: {}", spaceToken.getValue()); + rowRemoved = false; + } + return rowRemoved; + } + + /** + * Method that purges the catalog, removing expired space reservation. The spacefile with lifetime + * expired are removed from the file systems. + */ + public void purge() { + + log.debug("Space Garbage Collector start!"); + Calendar rightNow = Calendar.getInstance(); + + // Retrieve the Data Access Object from the factory + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + } + // Get the Collection of Space Resrvation Expired + Collection expiredSpaceTO; + try { + expiredSpaceTO = ssDAO.getExpired(rightNow.getTimeInMillis() / 1000); + } catch (DataAccessException e) { + // No space expired FOUND + log.debug("Space Garbage Collector: no space expired found."); + return; + } + + // For each entry expired + // 1) Delete the related space file + // 2) Remove the entry from the DB + + StorageSpaceTO spaceTO = null; + log.debug("Space Garbage Collector: Number of SpaceFile to remove {}.", expiredSpaceTO.size()); + + for (Iterator i = expiredSpaceTO.iterator(); i.hasNext(); ) { + spaceTO = (StorageSpaceTO) i.next(); + // Deleteing space File + String spaceFileName = spaceTO.getSpaceFile(); + File sfile = new File(spaceFileName); + log.debug("Space Garbage Collector: SpaceFile to remove {}.", spaceFileName); + + if (sfile.delete()) { + log.debug("Space Garbage Collector: SpaceFile {} removed.", spaceFileName); + } else { + log.warn("Space Garbage Collector: problem removing {}", spaceFileName); + } + + // Removing space entry from the DB + try { + ssDAO.removeStorageSpace(spaceTO.getSpaceToken()); + } catch (DataAccessException e) { + log.warn("Space Garbage Collector: error removing space entry from catalog."); + } + } + } + + public boolean increaseUsedSpace(String spaceToken, Long usedSpaceToAdd) { + + log.debug("Increase {} the used space of storage spaceToken: {}", usedSpaceToAdd, spaceToken); + + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + return false; + } + int n = 0; + try { + n = ssDAO.increaseUsedSpace(spaceToken, usedSpaceToAdd); + } catch (DataAccessException daEx) { + log.error( + "Error during the increase of used space for spaceToken {}: {}", + spaceToken, + daEx.getMessage()); + return false; + } + if (n == 0) { + log.warn( + "No errors caught but it seems no used space updates done on space token {}", spaceToken); + } + log.debug("{} increaseUsedSpace += {}", spaceToken, usedSpaceToAdd); + return n > 0; + } + + public boolean decreaseUsedSpace(String spaceToken, Long usedSpaceToRemove) { + + log.debug( + "Decrease {} the used space of storage spaceToken: {}", usedSpaceToRemove, spaceToken); + + try { + ssDAO = daoFactory.getStorageSpaceDAO(); + log.debug("Storage Space DAO retrieved."); + } catch (DataAccessException daEx) { + log.error("Error while retrieving StorageSpaceDAO: {}", daEx.getMessage(), daEx); + return false; + } + int n = 0; + try { + n = ssDAO.decreaseUsedSpace(spaceToken, usedSpaceToRemove); + } catch (DataAccessException daEx) { + log.error( + "Error during the decrease of used space for spaceToken {}: {}", + spaceToken, + daEx.getMessage()); + return false; + } + if (n == 0) { + log.warn( + "No errors caught but it seems no used space updates done on space token {}", spaceToken); + } + log.debug("{} decreaseUsedSpace -= {}", spaceToken, usedSpaceToRemove); + return n > 0; + } } diff --git a/src/main/java/it/grid/storm/catalogs/SizeInBytesIntConverter.java b/src/main/java/it/grid/storm/catalogs/SizeInBytesIntConverter.java index 75b94005..ad4b5bb1 100644 --- a/src/main/java/it/grid/storm/catalogs/SizeInBytesIntConverter.java +++ b/src/main/java/it/grid/storm/catalogs/SizeInBytesIntConverter.java @@ -1,58 +1,49 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TSizeInBytes; /** - * Class that handles DB representation of a TSizeInBytes, in particular it - * takes care of the NULL logic of the DB: 0/null are used to mean an empty - * field, whereas StoRM Object model uses the type TSizeInBytes.makeEmpty(); - * moreover StoRM does accept 0 as a valid TSizeInBytes, so it _is_ important to - * use this converter! - * + * Class that handles DB representation of a TSizeInBytes, in particular it takes care of the NULL + * logic of the DB: 0/null are used to mean an empty field, whereas StoRM Object model uses the type + * TSizeInBytes.makeEmpty(); moreover StoRM does accept 0 as a valid TSizeInBytes, so it _is_ + * important to use this converter! + * * @author EGRID ICTP * @version 2.0 * @date July 2005 */ public class SizeInBytesIntConverter { - private static SizeInBytesIntConverter stc = new SizeInBytesIntConverter(); + private static SizeInBytesIntConverter stc = new SizeInBytesIntConverter(); - private SizeInBytesIntConverter() { + private SizeInBytesIntConverter() {} - } + /** Method that returns the only instance of SizeInBytesIntConverter */ + public static SizeInBytesIntConverter getInstance() { - /** - * Method that returns the only instance of SizeInBytesIntConverter - */ - public static SizeInBytesIntConverter getInstance() { + return stc; + } - return stc; - } + /** + * Method that transaltes the Empty TSizeInBytes into the empty representation of DB which is 0. + * Any other int is left as is. + */ + public long toDB(long s) { - /** - * Method that transaltes the Empty TSizeInBytes into the empty representation - * of DB which is 0. Any other int is left as is. - */ - public long toDB(long s) { + if (s == TSizeInBytes.makeEmpty().value()) return 0; + return s; + } - if (s == TSizeInBytes.makeEmpty().value()) - return 0; - return s; - } + /** + * Method that returns the int as is, except if it is 0 which DB interprests as empty field: in + * that case it then returns the Empty TSizeInBytes int representation. + */ + public long toStoRM(long s) { - /** - * Method that returns the int as is, except if it is 0 which DB interprests - * as empty field: in that case it then returns the Empty TSizeInBytes int - * representation. - */ - public long toStoRM(long s) { - - if (s == 0) - return TSizeInBytes.makeEmpty().value(); - return s; - } + if (s == 0) return TSizeInBytes.makeEmpty().value(); + return s; + } } diff --git a/src/main/java/it/grid/storm/catalogs/SpaceTokenStringConverter.java b/src/main/java/it/grid/storm/catalogs/SpaceTokenStringConverter.java index 4aacf380..95f6aabe 100644 --- a/src/main/java/it/grid/storm/catalogs/SpaceTokenStringConverter.java +++ b/src/main/java/it/grid/storm/catalogs/SpaceTokenStringConverter.java @@ -1,57 +1,49 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TSpaceToken; /** - * Class that handles DPM DB representation of a SpaceToken, in particular it - * takes care of the NULL/EMPTY logic of DPM. In particular DPM uses the empty - * string "" as meaning the absence of a value for the field, wheras StoRM - * accepts it as a valis String with which to create a TSpaceToken; moreover - * StoRM uses an Empty TSpaceToken type. - * + * Class that handles DPM DB representation of a SpaceToken, in particular it takes care of the + * NULL/EMPTY logic of DPM. In particular DPM uses the empty string "" as meaning the absence of a + * value for the field, wheras StoRM accepts it as a valis String with which to create a + * TSpaceToken; moreover StoRM uses an Empty TSpaceToken type. + * * @author EGRID ICTP * @version 1.0 * @date June 2005 */ class SpaceTokenStringConverter { - private static SpaceTokenStringConverter stc = new SpaceTokenStringConverter(); + private static SpaceTokenStringConverter stc = new SpaceTokenStringConverter(); - private SpaceTokenStringConverter() { + private SpaceTokenStringConverter() {} - } + /** Method that returns the only instance od SpaceTokenConverter */ + public static SpaceTokenStringConverter getInstance() { - /** - * Method that returns the only instance od SpaceTokenConverter - */ - public static SpaceTokenStringConverter getInstance() { + return stc; + } - return stc; - } + /** + * Method that translates StoRM Empty TSpaceToken String representation into DPM empty + * representation; all other Strings are left as are. + */ + public String toDB(String s) { - /** - * Method that translates StoRM Empty TSpaceToken String representation into - * DPM empty representation; all other Strings are left as are. - */ - public String toDB(String s) { + if (s.equals(TSpaceToken.makeEmpty().toString())) return ""; + return s; + } - if (s.equals(TSpaceToken.makeEmpty().toString())) - return ""; - return s; - } + /** + * Method that translates DPM String representing an Empty TSpaceToken into StoRM representation; + * any other String is left as is. + */ + public String toStoRM(String s) { - /** - * Method that translates DPM String representing an Empty TSpaceToken into - * StoRM representation; any other String is left as is. - */ - public String toStoRM(String s) { - - if ((s == null) || (s.equals(""))) - return TSpaceToken.makeEmpty().toString(); - return s; - } + if ((s == null) || (s.equals(""))) return TSpaceToken.makeEmpty().toString(); + return s; + } } diff --git a/src/main/java/it/grid/storm/catalogs/StatusCodeConverter.java b/src/main/java/it/grid/storm/catalogs/StatusCodeConverter.java index 0d441508..cb1da475 100644 --- a/src/main/java/it/grid/storm/catalogs/StatusCodeConverter.java +++ b/src/main/java/it/grid/storm/catalogs/StatusCodeConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -39,17 +38,15 @@ import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; import static it.grid.storm.srm.types.TStatusCode.SRM_TOO_MANY_RESULTS; -import java.util.Iterator; -import java.util.Map; - import com.google.common.collect.Maps; - import it.grid.storm.srm.types.TStatusCode; +import java.util.Iterator; +import java.util.Map; /** * Package private auxiliary class used to convert between DB raw data and StoRM object model * representation of StatusCode. - * + * * @author: EGRID ICTP * @version: 2.0 * @date: June 2005 @@ -99,15 +96,13 @@ private StatusCodeConverter() { DBtoSTORM.put(Integer.valueOf(33), SRM_CUSTOM_STATUS); Object aux; - for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext();) { + for (Iterator i = DBtoSTORM.keySet().iterator(); i.hasNext(); ) { aux = i.next(); STORMtoDB.put(DBtoSTORM.get(aux), aux); } } - /** - * Method that returns the only instance of StatusCodeConverter. - */ + /** Method that returns the only instance of StatusCodeConverter. */ public static StatusCodeConverter getInstance() { return c; @@ -120,8 +115,7 @@ public static StatusCodeConverter getInstance() { public int toDB(TStatusCode sc) { Integer aux = (Integer) STORMtoDB.get(sc); - if (aux == null) - return -1; + if (aux == null) return -1; return aux.intValue(); } @@ -132,8 +126,7 @@ public int toDB(TStatusCode sc) { public TStatusCode toSTORM(int n) { TStatusCode aux = DBtoSTORM.get(Integer.valueOf(n)); - if (aux == null) - return TStatusCode.EMPTY; + if (aux == null) return TStatusCode.EMPTY; return aux; } } diff --git a/src/main/java/it/grid/storm/catalogs/StoRMDataSource.java b/src/main/java/it/grid/storm/catalogs/StoRMDataSource.java index 663d4f47..78c1f2a7 100644 --- a/src/main/java/it/grid/storm/catalogs/StoRMDataSource.java +++ b/src/main/java/it/grid/storm/catalogs/StoRMDataSource.java @@ -1,96 +1,88 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.config.Configuration; - import java.sql.Connection; import java.sql.SQLException; - import javax.sql.DataSource; - import org.apache.commons.dbcp2.BasicDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class StoRMDataSource { - - public static final Logger log = LoggerFactory - .getLogger(StoRMDataSource.class); - public static class Builder{ - + public static final Logger log = LoggerFactory.getLogger(StoRMDataSource.class); + + public static class Builder { + private static final String VALIDATION_QUERY = "select 1 from dual"; - + private String driver; private String url; - + private String username; private String password; - + private int maxPooledConnections = 200; private int initialPoolSize = 10; - + private BasicDataSource ds; - - public Builder() { - } - - public Builder driver(String driver){ + + public Builder() {} + + public Builder driver(String driver) { this.driver = driver; return this; } - - public Builder url(String url){ + + public Builder url(String url) { this.url = url; return this; } - - public Builder username(String username){ + + public Builder username(String username) { this.username = username; return this; } - - public Builder password(String password){ + + public Builder password(String password) { this.password = password; return this; } - - public Builder maxPooledConnections(int maxPool){ - if (maxPool < 1){ + + public Builder maxPooledConnections(int maxPool) { + if (maxPool < 1) { throw new IllegalArgumentException("maxPooledConnections must be >= 1"); } this.maxPooledConnections = maxPool; return this; } - - public Builder initialPoolSize(int initialSize){ - if (initialSize <= 0){ + + public Builder initialPoolSize(int initialSize) { + if (initialSize <= 0) { throw new IllegalArgumentException("initialSize must be >= 0"); } this.initialPoolSize = initialSize; return this; } - - private void sanityChecks(){ + + private void sanityChecks() { if ((username == null) || (username.isEmpty())) throw new IllegalArgumentException("null or empty username"); - + if ((driver == null) || (driver.isEmpty())) throw new IllegalArgumentException("null or empty driver"); - - if ((url == null) || (url.isEmpty())) - throw new IllegalArgumentException("null or empty url"); + + if ((url == null) || (url.isEmpty())) throw new IllegalArgumentException("null or empty url"); if ((password == null) || (password.isEmpty())) throw new IllegalArgumentException("null or empty password"); } - - private void logConfiguration(){ - if (log.isDebugEnabled()){ + + private void logConfiguration() { + if (log.isDebugEnabled()) { log.debug("driver: {}", driver); log.debug("url: {}", url); log.debug("username: {}", username); @@ -99,7 +91,8 @@ private void logConfiguration(){ log.debug("maxPooledConnections: {}", maxPooledConnections); } } - public StoRMDataSource build(){ + + public StoRMDataSource build() { sanityChecks(); logConfiguration(); ds = new BasicDataSource(); @@ -115,24 +108,19 @@ public StoRMDataSource build(){ ds.setMaxOpenPreparedStatements(200); return new StoRMDataSource(this); } - } - + private StoRMDataSource(Builder b) { this.dataSource = b.ds; } - + private BasicDataSource dataSource; - - /** - * @return the dataSource - */ + /** @return the dataSource */ public DataSource getDataSource() { return dataSource; } - /** * @throws SQLException * @see org.apache.commons.dbcp.BasicDataSource#close() @@ -140,9 +128,7 @@ public DataSource getDataSource() { public void close() throws SQLException { dataSource.close(); } - - - + /** * @return * @throws SQLException @@ -153,29 +139,30 @@ public Connection getConnection() throws SQLException { } private static volatile StoRMDataSource instance = null; - - public static synchronized StoRMDataSource getInstance(){ - return instance; + + public static synchronized StoRMDataSource getInstance() { + return instance; } - - public static synchronized void init(){ - if (instance != null){ - log.warn("Called init on already initialized Storm data source."); - log.warn("The datasource will be closed and re-initialized."); - try { + + public static synchronized void init() { + if (instance != null) { + log.warn("Called init on already initialized Storm data source."); + log.warn("The datasource will be closed and re-initialized."); + try { instance.close(); } catch (SQLException e) { log.error("Error closing storm data source: {}", e.getMessage(), e); } - } - - log.info("Initializing StoRM datasource"); - Configuration conf = Configuration.getInstance(); - instance = new StoRMDataSource.Builder() - .driver(conf.getDBDriver()) - .url(conf.getStormDbURL()) - .username(conf.getDBUserName()) - .password(conf.getDBPassword()) - .build(); + } + + log.info("Initializing StoRM datasource"); + Configuration conf = Configuration.getInstance(); + instance = + new StoRMDataSource.Builder() + .driver(conf.getDBDriver()) + .url(conf.getStormDbURL()) + .username(conf.getDBUserName()) + .password(conf.getDBPassword()) + .build(); } } diff --git a/src/main/java/it/grid/storm/catalogs/SurlMultyOperationRequestData.java b/src/main/java/it/grid/storm/catalogs/SurlMultyOperationRequestData.java index dec48059..470f6d0c 100644 --- a/src/main/java/it/grid/storm/catalogs/SurlMultyOperationRequestData.java +++ b/src/main/java/it/grid/storm/catalogs/SurlMultyOperationRequestData.java @@ -1,9 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; +import com.google.common.collect.Maps; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; @@ -14,14 +14,10 @@ import it.grid.storm.synchcall.surl.SURLStatusStore; import it.grid.storm.synchcall.surl.UnknownSurlException; import it.grid.storm.synchcall.surl.UnknownTokenException; - import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Maps; - public abstract class SurlMultyOperationRequestData extends SurlRequestData implements SynchMultyOperationRequestData { @@ -50,8 +46,7 @@ public synchronized void store() { stored = true; } - private static Map buildSurlStatusMap(TSURL surl, - TReturnStatus status) { + private static Map buildSurlStatusMap(TSURL surl, TReturnStatus status) { if (surl == null || status == null) { throw new IllegalArgumentException( @@ -86,14 +81,20 @@ public final void setStatus(TReturnStatus status) { try { SURLStatusStore.INSTANCE.update(generatedRequestToken, this.SURL, status); } catch (UnknownTokenException e) { - log.warn("Received an UnknownTokenException, probably the token has " - + "expired, unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an UnknownTokenException, probably the token has " + + "expired, unable to update its status in the store: {}", + e.getMessage()); } catch (ExpiredTokenException e) { - log.warn("Received an ExpiredTokenException. The token is expired, " - + "unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an ExpiredTokenException. The token is expired, " + + "unable to update its status in the store: {}", + e.getMessage()); } catch (UnknownSurlException e) { - log.warn("Received an UnknownSurlException, probably the token has " - + "expired, unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an UnknownSurlException, probably the token has " + + "expired, unable to update its status in the store: {}", + e.getMessage()); } } } @@ -107,14 +108,20 @@ protected final void setStatus(TStatusCode statusCode, String explanation) { SURLStatusStore.INSTANCE.update(generatedRequestToken, this.SURL, super.getStatus()); } catch (UnknownTokenException e) { // Never thrown - log.warn("Received an UnknownTokenException, probably the token has " - + "expired, unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an UnknownTokenException, probably the token has " + + "expired, unable to update its status in the store: {}", + e.getMessage()); } catch (ExpiredTokenException e) { - log.warn("Received an ExpiredTokenException. The token is expired, " - + "unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an ExpiredTokenException. The token is expired, " + + "unable to update its status in the store: {}", + e.getMessage()); } catch (UnknownSurlException e) { - log.warn("Received an UnknownSurlException, probably the token has " - + "expired, unable to update its status in the store: {}", e.getMessage()); + log.warn( + "Received an UnknownSurlException, probably the token has " + + "expired, unable to update its status in the store: {}", + e.getMessage()); } } } diff --git a/src/main/java/it/grid/storm/catalogs/SurlRequestData.java b/src/main/java/it/grid/storm/catalogs/SurlRequestData.java index 0ba2ed04..122aea8a 100644 --- a/src/main/java/it/grid/storm/catalogs/SurlRequestData.java +++ b/src/main/java/it/grid/storm/catalogs/SurlRequestData.java @@ -1,68 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; -import java.util.Map; - import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public abstract class SurlRequestData implements RequestData { - private static final Logger log = LoggerFactory - .getLogger(SurlRequestData.class); + private static final Logger log = LoggerFactory.getLogger(SurlRequestData.class); - protected TSURL SURL; - protected TReturnStatus status; + protected TSURL SURL; + protected TReturnStatus status; - public SurlRequestData(TSURL toSURL, TReturnStatus status) - throws InvalidSurlRequestDataAttributesException { + public SurlRequestData(TSURL toSURL, TReturnStatus status) + throws InvalidSurlRequestDataAttributesException { - if (toSURL == null || status == null || status.getStatusCode() == null) { + if (toSURL == null || status == null || status.getStatusCode() == null) { throw new InvalidSurlRequestDataAttributesException(toSURL, status); } this.SURL = toSURL; this.status = status; } - /** - * Method that returns the TURL for this chunk of the srm request. - */ - @Override - public final TSURL getSURL() { - - return SURL; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - @Override - public final TReturnStatus getStatus() { - - return status; - } - - /** - * Method used to set the Status associated to this chunk. If status is null, - * then nothing gets set! - */ + /** Method that returns the TURL for this chunk of the srm request. */ + @Override + public final TSURL getSURL() { + + return SURL; + } + + /** Method that returns the status for this chunk of the srm request. */ + @Override + public final TReturnStatus getStatus() { + + return status; + } + + /** + * Method used to set the Status associated to this chunk. If status is null, then nothing gets + * set! + */ public void setStatus(TReturnStatus status) { if (status != null) { this.status = status; } - } + } protected void setStatus(TStatusCode statusCode, String explanation) { @@ -71,186 +60,186 @@ protected void setStatus(TStatusCode statusCode, String explanation) { } else { status = new TReturnStatus(statusCode, explanation); } - } - - /** - * Method that sets the status of this request to SRM_REQUEST_QUEUED; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_REQUEST_QUEUED(String explanation) { - - setStatus(TStatusCode.SRM_REQUEST_QUEUED, explanation); - } - - /** - * Method that sets the status of this request to SRM_REQUEST_INPROGRESS; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_REQUEST_INPROGRESS(String explanation) { - - setStatus(TStatusCode.SRM_REQUEST_INPROGRESS, explanation); - } - - /** - * Method that sets the status of this request to SRM_SUCCESS; it needs the - * explanation String which describes the situation in greater detail; if a - * null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_SUCCESS(String explanation) { - - setStatus(TStatusCode.SRM_SUCCESS, explanation); - } - - /** - * Method that sets the status of this request to SRM_INTERNAL_ERROR; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_INTERNAL_ERROR(String explanation) { - - setStatus(TStatusCode.SRM_INTERNAL_ERROR, explanation); - } - - /** - * Method that sets the status of this request to SRM_INVALID_REQUEST; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_INVALID_REQUEST(String explanation) { - - setStatus(TStatusCode.SRM_INVALID_REQUEST, explanation); - } - - /** - * Method that sets the status of this request to SRM_AUTHORIZATION_FAILURE; - * it needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_AUTHORIZATION_FAILURE(String explanation) { - - setStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, explanation); - } - - /** - * Method that sets the status of this request to SRM_ABORTED; it needs the - * explanation String which describes the situation in greater detail; if a - * null is passed, then an empty String is used as explanation. - */ - @Override - public final void changeStatusSRM_ABORTED(String explanation) { - - setStatus(TStatusCode.SRM_ABORTED, explanation); - } - - @Override - public final void changeStatusSRM_FILE_BUSY(String explanation) { - - setStatus(TStatusCode.SRM_FILE_BUSY, explanation); - } - - @Override - public final void changeStatusSRM_INVALID_PATH(String explanation) { - - setStatus(TStatusCode.SRM_INVALID_PATH, explanation); - } - - @Override - public final void changeStatusSRM_NOT_SUPPORTED(String explanation) { - - setStatus(TStatusCode.SRM_NOT_SUPPORTED, explanation); - } - - @Override - public final void changeStatusSRM_FAILURE(String explanation) { - - setStatus(TStatusCode.SRM_FAILURE, explanation); - } - - @Override - public final void changeStatusSRM_SPACE_LIFETIME_EXPIRED(String explanation) { - - setStatus(TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, explanation); - } - - @Override - public String display(Map map) { - - // nonsense method - return ""; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + ((SURL == null) ? 0 : SURL.hashCode()); - result = prime * result + ((status == null) ? 0 : status.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - SurlRequestData other = (SurlRequestData) obj; - if (SURL == null) { - if (other.SURL != null) { - return false; - } - } else if (!SURL.equals(other.SURL)) { - return false; - } - if (status == null) { - if (other.status != null) { - return false; - } - } else if (!status.equals(other.status)) { - return false; - } - return true; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("SurlRequestData [SURL="); - builder.append(SURL); - builder.append(", status="); - builder.append(status); - builder.append("]"); - return builder.toString(); - } + } + + /** + * Method that sets the status of this request to SRM_REQUEST_QUEUED; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public final void changeStatusSRM_REQUEST_QUEUED(String explanation) { + + setStatus(TStatusCode.SRM_REQUEST_QUEUED, explanation); + } + + /** + * Method that sets the status of this request to SRM_REQUEST_INPROGRESS; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public final void changeStatusSRM_REQUEST_INPROGRESS(String explanation) { + + setStatus(TStatusCode.SRM_REQUEST_INPROGRESS, explanation); + } + + /** + * Method that sets the status of this request to SRM_SUCCESS; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + @Override + public final void changeStatusSRM_SUCCESS(String explanation) { + + setStatus(TStatusCode.SRM_SUCCESS, explanation); + } + + /** + * Method that sets the status of this request to SRM_INTERNAL_ERROR; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public final void changeStatusSRM_INTERNAL_ERROR(String explanation) { + + setStatus(TStatusCode.SRM_INTERNAL_ERROR, explanation); + } + + /** + * Method that sets the status of this request to SRM_INVALID_REQUEST; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + @Override + public final void changeStatusSRM_INVALID_REQUEST(String explanation) { + + setStatus(TStatusCode.SRM_INVALID_REQUEST, explanation); + } + + /** + * Method that sets the status of this request to SRM_AUTHORIZATION_FAILURE; it needs the + * explanation String which describes the situation in greater detail; if a null is passed, then + * an empty String is used as explanation. + */ + @Override + public final void changeStatusSRM_AUTHORIZATION_FAILURE(String explanation) { + + setStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, explanation); + } + + /** + * Method that sets the status of this request to SRM_ABORTED; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + @Override + public final void changeStatusSRM_ABORTED(String explanation) { + + setStatus(TStatusCode.SRM_ABORTED, explanation); + } + + @Override + public final void changeStatusSRM_FILE_BUSY(String explanation) { + + setStatus(TStatusCode.SRM_FILE_BUSY, explanation); + } + + @Override + public final void changeStatusSRM_INVALID_PATH(String explanation) { + + setStatus(TStatusCode.SRM_INVALID_PATH, explanation); + } + + @Override + public final void changeStatusSRM_NOT_SUPPORTED(String explanation) { + + setStatus(TStatusCode.SRM_NOT_SUPPORTED, explanation); + } + + @Override + public final void changeStatusSRM_FAILURE(String explanation) { + + setStatus(TStatusCode.SRM_FAILURE, explanation); + } + + @Override + public final void changeStatusSRM_SPACE_LIFETIME_EXPIRED(String explanation) { + + setStatus(TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, explanation); + } + + @Override + public String display(Map map) { + + // nonsense method + return ""; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((SURL == null) ? 0 : SURL.hashCode()); + result = prime * result + ((status == null) ? 0 : status.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + SurlRequestData other = (SurlRequestData) obj; + if (SURL == null) { + if (other.SURL != null) { + return false; + } + } else if (!SURL.equals(other.SURL)) { + return false; + } + if (status == null) { + if (other.status != null) { + return false; + } + } else if (!status.equals(other.status)) { + return false; + } + return true; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("SurlRequestData [SURL="); + builder.append(SURL); + builder.append(", status="); + builder.append(status); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/SynchMultyOperationRequestData.java b/src/main/java/it/grid/storm/catalogs/SynchMultyOperationRequestData.java index 0a351c21..8aeb6773 100644 --- a/src/main/java/it/grid/storm/catalogs/SynchMultyOperationRequestData.java +++ b/src/main/java/it/grid/storm/catalogs/SynchMultyOperationRequestData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -8,7 +7,7 @@ public interface SynchMultyOperationRequestData extends RequestData { - public TRequestToken getGeneratedRequestToken(); + public TRequestToken getGeneratedRequestToken(); - public void store(); + public void store(); } diff --git a/src/main/java/it/grid/storm/catalogs/TURLConverter.java b/src/main/java/it/grid/storm/catalogs/TURLConverter.java index 54df7da7..33b74ec0 100644 --- a/src/main/java/it/grid/storm/catalogs/TURLConverter.java +++ b/src/main/java/it/grid/storm/catalogs/TURLConverter.java @@ -1,56 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.srm.types.TTURL; /** - * Class that handles DPM DB representation of a TTURL, in particular it takes - * care of the NULL/EMPTY logic of DPM. Indeed DPM uses 0/null to mean an empty - * field, whereas StoRM uses the type TTURL.makeEmpty(); in particular StoRM - * converts an empty String or a null to an Empty TTURL! - * + * Class that handles DPM DB representation of a TTURL, in particular it takes care of the + * NULL/EMPTY logic of DPM. Indeed DPM uses 0/null to mean an empty field, whereas StoRM uses the + * type TTURL.makeEmpty(); in particular StoRM converts an empty String or a null to an Empty TTURL! + * * @author EGRID ICTP * @version 1.0 * @date March 2006 */ public class TURLConverter { - private static TURLConverter stc = new TURLConverter(); // only instance + private static TURLConverter stc = new TURLConverter(); // only instance - private TURLConverter() { + private TURLConverter() {} - } + /** Method that returns the only instance of SizeInBytesIntConverter */ + public static TURLConverter getInstance() { - /** - * Method that returns the only instance of SizeInBytesIntConverter - */ - public static TURLConverter getInstance() { + return stc; + } - return stc; - } + /** + * Method that transaltes the Empty TTURL into the empty representation of DPM which is a null! + * Any other String is left as is. + */ + public String toDB(String s) { - /** - * Method that transaltes the Empty TTURL into the empty representation of DPM - * which is a null! Any other String is left as is. - */ - public String toDB(String s) { + if (s.equals(TTURL.makeEmpty().toString())) return null; + return s; + } - if (s.equals(TTURL.makeEmpty().toString())) - return null; - return s; - } + /** + * Method that translates DPMs "" or null String as the Empty TTURL String representation. Any + * other String is left as is. + */ + public String toStoRM(String s) { - /** - * Method that translates DPMs "" or null String as the Empty TTURL String - * representation. Any other String is left as is. - */ - public String toStoRM(String s) { - - if ((s == null) || (s.equals(""))) - return TTURL.makeEmpty().toString(); - return s; - } + if ((s == null) || (s.equals(""))) return TTURL.makeEmpty().toString(); + return s; + } } diff --git a/src/main/java/it/grid/storm/catalogs/TransferProtocolListConverter.java b/src/main/java/it/grid/storm/catalogs/TransferProtocolListConverter.java index 205e2527..2db97cc8 100644 --- a/src/main/java/it/grid/storm/catalogs/TransferProtocolListConverter.java +++ b/src/main/java/it/grid/storm/catalogs/TransferProtocolListConverter.java @@ -1,54 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import it.grid.storm.common.types.TURLPrefix; +import it.grid.storm.namespace.model.Protocol; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.ArrayList; -import it.grid.storm.namespace.model.Protocol; /** - * Package private auxiliary class used to convert between the DB raw data - * representation and StoRM s Object model list of transfer protocols. - * + * Package private auxiliary class used to convert between the DB raw data representation and StoRM + * s Object model list of transfer protocols. */ - class TransferProtocolListConverter { - /** - * Method that returns a List of Uppercase Strings used in the DB to represent - * the given TURLPrefix. An empty List is returned in case the conversion does - * not succeed, a null TURLPrefix is supplied, or its size is 0. - */ - public static List toDB(TURLPrefix turlPrefix) { + /** + * Method that returns a List of Uppercase Strings used in the DB to represent the given + * TURLPrefix. An empty List is returned in case the conversion does not succeed, a null + * TURLPrefix is supplied, or its size is 0. + */ + public static List toDB(TURLPrefix turlPrefix) { - List result = new ArrayList(); - Protocol protocol; - for (Iterator it = turlPrefix.getDesiredProtocols().iterator(); it - .hasNext();) { - protocol = it.next(); - result.add(protocol.getSchema()); - } - return result; - } + List result = new ArrayList(); + Protocol protocol; + for (Iterator it = turlPrefix.getDesiredProtocols().iterator(); it.hasNext(); ) { + protocol = it.next(); + result.add(protocol.getSchema()); + } + return result; + } - /** - * Method that returns a TURLPrefix of transfer protocol. If the translation - * cannot take place, a TURLPrefix of size 0 is returned. Likewise if a null - * List is supplied. - */ - public static TURLPrefix toSTORM(List listOfProtocol) { + /** + * Method that returns a TURLPrefix of transfer protocol. If the translation cannot take place, a + * TURLPrefix of size 0 is returned. Likewise if a null List is supplied. + */ + public static TURLPrefix toSTORM(List listOfProtocol) { - TURLPrefix turlPrefix = new TURLPrefix(); - Protocol protocol = null; - for (Iterator i = listOfProtocol.iterator(); i.hasNext();) { - protocol = Protocol.getProtocol(i.next()); - if (!(protocol.equals(Protocol.UNKNOWN))) - turlPrefix.addProtocol(protocol); - } - return turlPrefix; - } + TURLPrefix turlPrefix = new TURLPrefix(); + Protocol protocol = null; + for (Iterator i = listOfProtocol.iterator(); i.hasNext(); ) { + protocol = Protocol.getProtocol(i.next()); + if (!(protocol.equals(Protocol.UNKNOWN))) turlPrefix.addProtocol(protocol); + } + return turlPrefix; + } } diff --git a/src/main/java/it/grid/storm/catalogs/VolatileAndJiTCatalog.java b/src/main/java/it/grid/storm/catalogs/VolatileAndJiTCatalog.java index 14928077..6329428c 100644 --- a/src/main/java/it/grid/storm/catalogs/VolatileAndJiTCatalog.java +++ b/src/main/java/it/grid/storm/catalogs/VolatileAndJiTCatalog.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; @@ -14,7 +13,6 @@ import it.grid.storm.griduser.LocalUser; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.srm.types.TLifeTimeInSeconds; - import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; @@ -22,582 +20,593 @@ import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This catalog holds all info needed to pin files for JiT ACL tracking, and - * for keeping track of Volatile files. pinLifetime is the time Jit ACLs will be - * in place: upon expiry ACLs are removed; fileLifetime is the time Volatile - * files will remain in the system: upon expiry those files are removed. In - * particular the srmPrepareToPut analyzes the request and if the specified file - * is set to Volatile, then it calls on the catalog to add the corresponding - * entry for the given fileLifetime. If StoRM is configured for JiT, another - * method is invoked to add an entry to keep track of the ACLs for the desired - * pinLifetime. For srmPrepareToGet, only if StoRM is configured for JiT ACLs - * then a method is invoked to add the corresponding entry for the given - * pinLifetime. Repeatedly putting the same Volatile file, will overwrite - * existing fileLifetime only if the overwrite option allows file overwriting. - * If JiT is enabled and it is a new user that is putting again the same file - * in, a new pinLifetime entry is added; but if it is the same user, the - * pinLifetime WILL be changed provided the new expiry exceeds the current one! - * Repeatedly invoking PtG on the same file behaves similarly: different users - * will have their own pinLifetime record, but the same user WILL change the - * pinLifetime provided the new expiry exceeds the current one! In case the - * pinLifetime exceeds the fileLifetime, the fileLifetime is used as ceiling. - * This may occur when a file is Put and defined Volatile, but with a - * pinLifetime that is longer than that of the pin. Or if _subsequent_ calls to - * PtG specify a pinLifetime that lasts longer. To be more precise, the - * pinLifetime gets recorded as requested, but upon expiry of the volatile entry - * any associated acl will get removed as well, regardless of the acl expiry. - * When lifetime expires: volatile files get erased from the system and their - * entries in the catalog are removed; tracked ACLs get removed from the files - * WITHOUT erasing the files, and their entries in the catalog are removed; - * finally for Volatile files with ACLs set up on them, the ACLs are removed AND - * the files are erased, also cleaning up the catalog. As a last note, the - * catalog checks periodically its entries for any expired ones, and then - * proceeds with purging; this frequency of cleaning is specified in a - * configuration parameter, and the net effect is that the pinning/volatile may - * actually last longer (but never less) because the self cleaning mechanism is - * active only at those predetermined times. - * + * This catalog holds all info needed to pin files for JiT ACL tracking, and for keeping track of + * Volatile files. pinLifetime is the time Jit ACLs will be in place: upon expiry ACLs are removed; + * fileLifetime is the time Volatile files will remain in the system: upon expiry those files are + * removed. In particular the srmPrepareToPut analyzes the request and if the specified file is set + * to Volatile, then it calls on the catalog to add the corresponding entry for the given + * fileLifetime. If StoRM is configured for JiT, another method is invoked to add an entry to keep + * track of the ACLs for the desired pinLifetime. For srmPrepareToGet, only if StoRM is configured + * for JiT ACLs then a method is invoked to add the corresponding entry for the given pinLifetime. + * Repeatedly putting the same Volatile file, will overwrite existing fileLifetime only if the + * overwrite option allows file overwriting. If JiT is enabled and it is a new user that is putting + * again the same file in, a new pinLifetime entry is added; but if it is the same user, the + * pinLifetime WILL be changed provided the new expiry exceeds the current one! Repeatedly invoking + * PtG on the same file behaves similarly: different users will have their own pinLifetime record, + * but the same user WILL change the pinLifetime provided the new expiry exceeds the current one! In + * case the pinLifetime exceeds the fileLifetime, the fileLifetime is used as ceiling. This may + * occur when a file is Put and defined Volatile, but with a pinLifetime that is longer than that of + * the pin. Or if _subsequent_ calls to PtG specify a pinLifetime that lasts longer. To be more + * precise, the pinLifetime gets recorded as requested, but upon expiry of the volatile entry any + * associated acl will get removed as well, regardless of the acl expiry. When lifetime expires: + * volatile files get erased from the system and their entries in the catalog are removed; tracked + * ACLs get removed from the files WITHOUT erasing the files, and their entries in the catalog are + * removed; finally for Volatile files with ACLs set up on them, the ACLs are removed AND the files + * are erased, also cleaning up the catalog. As a last note, the catalog checks periodically its + * entries for any expired ones, and then proceeds with purging; this frequency of cleaning is + * specified in a configuration parameter, and the net effect is that the pinning/volatile may + * actually last longer (but never less) because the self cleaning mechanism is active only at those + * predetermined times. + * * @author EGRID - ICTP Trieste * @version 2.0 * @date November 2006 */ public class VolatileAndJiTCatalog { - private static final Logger log = LoggerFactory - .getLogger(VolatileAndJiTCatalog.class); - - /** only instance of Catalog! */ - private static final VolatileAndJiTCatalog cat = new VolatileAndJiTCatalog(); - /** only instance of DAO object! */ - private static final VolatileAndJiTDAO dao = VolatileAndJiTDAO.getInstance(); - /** Timer object in charge of cleaning periodically the Catalog! */ - private final Timer cleaner = new Timer(); - /** Delay time before starting cleaning thread! Set to 1 minute */ - private final long delay = Configuration.getInstance() - .getCleaningInitialDelay() * 1000; - /** Period of execution of cleaning! Set to 1 hour */ - private final long period = Configuration.getInstance() - .getCleaningTimeInterval() * 1000; - /** fileLifetime to use if user specified a non-positive value */ - private final long defaultFileLifetime = Configuration.getInstance() - .getFileLifetimeDefault(); - /** Number of seconds to use as default if the supplied lifetime is zero! */ - private final long floor = Configuration.getInstance() - .getPinLifetimeDefault(); - /** - * Maximum number of seconds that an ACL can live: the life time requested by - * the user cannot be greater than this value! This ceiling is needed because - * of the cron job that removes pool account mappings: when the mapping is - * removed, there must NOT be ANY ACL for that pool-user left! - */ - private final long ceiling = Configuration.getInstance() - .getPinLifetimeMaximum(); - - /** - * Private constructor that starts the cleaning timer. - */ - private VolatileAndJiTCatalog() { - - TimerTask cleaningTask = new TimerTask() { - - @Override - public void run() { - - purge(); - } - }; - cleaner.scheduleAtFixedRate(cleaningTask, delay, period); - } - - /** - * Method that returns the only instance of PinnedFilesCatalog. - */ - public static VolatileAndJiTCatalog getInstance() { - - return cat; - } - - /** - * Checks whether the given file exists in the volatile table or not. - * - * @param filename - * @return true if there is antry for the given file in the - * volatilte table, false otherwise. - */ - synchronized public boolean exists(PFN pfn) { - - return dao.exists(pfn.getValue()); - } - - /** - * Method used to expire _all_ related entries in the JiT catalogue, that were - * setup during a PtG operation. The method is intended to be used by code - * handling srmAbort command. Notice that the Traverse on the parents is NOT - * removed! This is to accomodate for the use case of a user that has run many - * PtG on different SURLs but all contained in the same directory tree! In - * practice this method removes the R permission. If any entry does not exist, - * then nothing happens and a warning gets written in the logs; otherwise - * entries get their start time set to now, and the lifetime set to zero; in - * case more than one matching entry is found, a message gets written to the - * logs, and the updating continues anyway as explained. At this point, when - * the garbage collector wakes up the entries get cleanly handled (physical - * ACL is removed, catalog entry removed, etc.); or an earlier cleaning can be - * forced by invoking directly the purge mehod. The method returns FALSE in - * case an entry was not found or the supplied parameters were null, and TRUE - * otherwise. Yet keep in mind that it says nothing of whether the DB - * operation was successful or not. - */ - synchronized public boolean expireGetJiTs(PFN pfn, LocalUser localUser) { - - if (pfn != null && localUser != null) { - return expireJiT(pfn, localUser, FilesystemPermission.Read); - } - log.error("VolatileAndJiT CATALOG: programming bug! expireGetJiTs invoked " - + "on null attributes; pfn={} localUser={}", pfn, localUser); - return false; - } - - /** - * Method used to expire an entry in the JiT catalogue. The method is intended - * to be used by code handling srmAbort command. If the entry does not exist, - * then nothing happens and a warning gets written in the logs; otherwise the - * entry gets its start time set to now, and its lifetime set to zero; in case - * more than one matching entry is found, a message gets written to the logs, - * and the updating continues anyway as explained. At this point, when the - * garbage collector wakes up the entry is cleanly handled (physical ACL is - * removed, catalog entry removed, etc.); or an earlier cleaning can be forced - * by invoking directly the purge method. The method returns FALSE in case no - * entry was found or the supplied parameters were null, and TRUE otherwise. - * Yet keep in mind that is says nothing of whether the DB operation was - * successful or not. - */ - synchronized public boolean expireJiT(PFN pfn, LocalUser localUser, - FilesystemPermission acl) { - - if (pfn != null && localUser != null && acl != null) { - String fileName = pfn.getValue(); - int uid = localUser.getUid(); - int intacl = acl.getInt(); - // from the current time we remove 10 seconds because it was observed - // that when executing purge right after invoking this method, less - // than 1 second elapses, so no purging takes place at all since expiry - // is not yet reached! - // Seconds needed and not milliseconds! - long pinStart = (Calendar.getInstance().getTimeInMillis() / 1000) - 10; - long pinTime = 0; // set to zero the lifetime! - int n = dao.numberJiT(fileName, uid, intacl); - if (n == 0) { - log.warn("VolatileAndJiT CATALOG: expireJiT found no entry for ({}, {}, " - + "{})!", fileName, uid, intacl); - return false; - } - dao.forceUpdateJiT(fileName, uid, intacl, pinStart, pinTime); - if (n > 1) { - log.warn("VolatileAndJiT CATALOG: expireJiT found more than one entry " - + "for ({}, {}, {}); the catalogue could be corrupt!", fileName, uid, intacl); - } - return true; - } - log.error("VolatileAndJiT CATALOG: programming bug! expireJiT invoked on " - + "null attributes; pfn={} localUser={} acl={}", pfn, localUser, acl); - return false; - } - - /** - * Method used to expire _all_ related entries in the JiT catalogue, that were - * setup during a PtP operation. The method is intended to be used by code - * handling srmAbort command, and by srmPutDone. Notice that the Traverse on - * the parents is NOT removed! This is to accomodate for the use case of a - * user that has run many PtP on different SURLs but that are all contained in - * the same directory tree! In practice, this method removes R and W - * permissions. If any entry does not exist, then nothing happens and a - * warning gets written in the logs; otherwise entries get their start time - * set to now, and the lifetime set to zero; in case more than one matching - * entry is found, a message gets written to the logs, and the updating - * continues anyway as explained. At this point, when the garbage collector - * wakes up the entries get cleanly handled (physical ACL is removed, catalog - * entry removed, etc.); or an earlier cleaning can be forced by invoking - * directly the purge mehod. The method returns FALSE in case an entry was not - * found or the supplied parameters were null, and TRUE otherwise. Yet keep in - * mind that is says nothing of whether the DB operation was successful or - * not. - */ - synchronized public boolean expirePutJiTs(PFN pfn, LocalUser localUser) { - - if (pfn != null && localUser != null) { - return expireJiT(pfn, localUser, FilesystemPermission.Read) - && expireJiT(pfn, localUser, FilesystemPermission.Write); - } - - log.error("VolatileAndJiT CATALOG: programming bug! expirePutJiTs invoked " - + "on null attributes; pfn={} localUser={}", pfn, localUser); - return false; - } - - /** - * Method that purges the catalog, removing expired ACLs and deleting expired - * Volatile files. When Volatile entries expire, any realted JiT will - * automatically expire too, regardless of the specified pinLifetime: that is, - * fileLifetime wins over pinLifetime. WARNING! Notice that the catalogue DOES - * get cleaned up even if the physical removal of the ACL or erasing of the - * file fails. - */ - public synchronized void purge() { - - log.debug("VolatileAndJiT CATALOG! Executing purge!"); - Calendar rightNow = Calendar.getInstance(); - /** - * removes all expired entries from storm_pin and storm_track, returning two - * Collections: one with the PFN of Volatile files, and the other with PFN + - * GridUser couple of the entries that were just being tracked for the ACLs - * set up on them. - */ - Collection[] expired = dao.removeExpired(rightNow.getTimeInMillis() / 1000); - Collection expiredVolatile = expired[0]; - Collection expiredJiT = expired[1]; - if (expiredVolatile.size() == 0) { - log.debug("VolatileAndJiT CATALOG! No expired Volatile entries found."); - } else { - log.info("VolatileAndJiT CATALOG! Found and purged the following expired " - + "Volatile entries:\n {}", volatileString(expired[0])); - } - if (expiredJiT.size() == 0) { - log.debug("VolatileAndJiT CATALOG! No JiT entries found."); - } else { - log.info("VolatileAndJiT CATALOG! Found and purged the following expired " - + "JiT ACLs entries:\n {}", jitString(expired[1])); - } - // Remove ACLs - JiTData aux = null; - for (Iterator i = expiredJiT.iterator(); i.hasNext();) { - aux = (JiTData) i.next(); - int jitacl = aux.acl(); - String jitfile = aux.pfn(); - int jituid = aux.uid(); - int jitgid = aux.gid(); - try { - log.info("VolatileAndJiT CATALOG. Removing ACL {} on file {} for " - + "user {},{}", jitacl, jitfile, jituid, jitgid); - LocalFile auxFile = NamespaceDirector.getNamespace() - .resolveStoRIbyPFN(PFN.make(jitfile)).getLocalFile(); - LocalUser auxUser = new LocalUser(jituid, jitgid); - FilesystemPermission auxACL = new FilesystemPermission(jitacl); - - AclManager manager = AclManagerFS.getInstance(); - if (auxFile == null) { - log.warn("VolatileAndJiT CATALOG! Unable to setting up the ACL. " - + "LocalFile is null!"); - } else { - try { - manager.revokeUserPermission(auxFile, auxUser, auxACL); - } catch (IllegalArgumentException e) { - log.error("Unable to revoke user permissions on the file. " - + "IllegalArgumentException: {}", e.getMessage(), e); - } - } - } catch (Exception e) { - log.error("VolatileAndJiT CATALOG! Entry removed from Catalog, but " - + "physical ACL {} for user {}, could NOT be removed from {}", - jitacl, jituid, jitgid, jitfile); - log.error("VolatileAndJiT CATALOG! {}", e.getMessage(), e); - } - } - // Delete files - String auxPFN = null; - for (Iterator i = expiredVolatile.iterator(); i.hasNext();) { - auxPFN = (String) i.next(); - try { - log.info("VolatileAndJiT CATALOG. Deleting file {}", auxPFN); - LocalFile auxFile = NamespaceDirector.getNamespace() - .resolveStoRIbyPFN(PFN.make(auxPFN)).getLocalFile(); - boolean ok = auxFile.delete(); - if (!ok) { - throw new Exception("Java File deletion failed!"); - } - } catch (Exception e) { - log.error("VolatileAndJiT CATALOG! Entry removed from Catalog, but " - + "physical file {} could NOT be deleted!", auxPFN); - log.error("VolatileAndJiT CATALOG! {}", e.getMessage(), e); - } - } - } - - /** - * Method used upon expiry of SRM_SPACE_AVAILABLE to remove all JiT entries in - * the DB table, related to the given PFN; Notice that _no_ distinction is - * made aboutthe specific user! This is because upon expiry of - * SRM_SPACE_AVAILABLE the file gets erased, so all JiTs on that file are - * automatically erased. This implies that all catalogue entries get removed. - * If no entries are present nothing happens. - */ - public synchronized void removeAllJiTsOn(PFN pfn) { - - if (pfn != null) { - dao.removeAllJiTsOn(pfn.getValue()); - return; - } - log.error("VolatileAndJiT CATALOG: programming bug! removeAllJiTsOn " - + "invoked on null pfn!"); - } - - /** - * Method used to remove a Volatile entry that matches the supplied pfn, from - * the DB. If null is supplied, an error message gets logged and nothing - * happens. If PFN is not found, nothing happens and _no_ message gets logged. - */ - public synchronized void removeVolatile(PFN pfn) { - - if (pfn != null) { - dao.removeVolatile(pfn.getValue()); - return; - } - log.warn("VolatileAndJiT CATALOG: programming bug! removeVolatile invoked " - + "on null pfn!"); - } - - /** - * Method used to keep track of an ACL set up on a PFN; it needs the PFN, the - * LocalUser, the ACL and the desired pinLifeTime. If the 3-ple (PFN, ACL, - * LocalUser) is not present, it gets added; if it is already present, - * provided the new desired expiry occurs after the present one, it gets - * changed. If the supplied lifetime is zero, then a default value is used - * instead. If it is larger than a ceiling, that ceiling is used instead. The - * floor value in seconds can be set from the configuration file, with the - * property: pinLifetime.minimum While the ceiling value in seconds is set - * with: pinLifetime.maximum BEWARE: The intended use case is in both - * srmPrepareToGet and srmPrepareToPut, for the case of the _JiT_ security - * mechanism. The maximum is necessary because JiT ACLs cannot last longer - * than the amount of time the pool account is leased. Notice that for - * Volatile entries, a pinLifetime larger than the fileLifetime can be - * specified. However, when Volatile files expire any related JiTs - * automatically expire in anticipation! - */ - public synchronized void trackJiT(PFN pfn, LocalUser localUser, - FilesystemPermission acl, Calendar start, TLifeTimeInSeconds pinLifetime) { - - if (pfn != null && localUser != null && acl != null && start != null - && pinLifetime != null) { - - String fileName = pfn.getValue(); - int uid = localUser.getUid(); - int gid = localUser.getPrimaryGid(); - int intacl = acl.getInt(); - // seconds needed and not milliseconds! - long pinStart = start.getTimeInMillis() / 1000; - long pinTime = validatePinLifetime(pinLifetime.value()); - int n = dao.numberJiT(fileName, uid, intacl); - if (n == 0) { - dao.addJiT(fileName, uid, gid, intacl, pinStart, pinTime); - } else { - dao.updateJiT(fileName, uid, intacl, pinStart, pinTime); - if (n > 1) { - log.warn("VolatileAndJiT CATALOG: More than one entry found for " - + "({}, {}, {}); the catalogue could be corrupt!", fileName, uid, - intacl); - } - } - return; - } - log.error("VolatileAndJiT CATALOG: programming bug! TrackACL invoked on " - + "null attributes; pfn={} localUser={} acl={} start={} pinLifetime={}", - pfn, localUser, acl, start, pinLifetime); - } - - /** - * Method that adds an entry to the catalog that keeps track of Volatile - * files. The PFN and the fileLifetime are needed. If no entry corresponding - * to the given PFN is found, a new one gets recorded. If the PFN is already - * present, then provided the new expiry (obtained by adding together - * current-time and requested-lifetime) exceeds the expiry in the catalog, - * the entry is updated. Otherwise nothing takes place. If the supplied - * fileLifetime is zero, then a default value is used instead. This floor - * default value in seconds can be set from the configuration file, with the - * property: fileLifetime.default BEWARE: The intended use case for this - * method is during srmPrepareToPut. When files are uploaded into StoRM, they - * get specified as Volatile or Permanent. The PtP logic determines if the - * request is for a Volatile file and in that case it adds a new entry in the - * catalog. That is the purpose of this method. Any subsequent PtP call will - * just result in a modification of the expiry, provided the newer one lasts - * longer than the original one. Yet bear in mind that two or more PtP on the - * same file makes NO SENSE AT ALL! If any DB error occurs, then nothing gets - * added/updated and an error message gets logged. - */ - public synchronized void trackVolatile(PFN pfn, Calendar start, - TLifeTimeInSeconds fileLifetime) { - - if (pfn != null && fileLifetime != null && start != null) { - - String fileName = pfn.getValue(); - long fileTime = fileLifetime.value(); - if (fileTime <= 0) { - fileTime = defaultFileLifetime; - } - long fileStart = start.getTimeInMillis() / 1000; // seconds needed and not - // milliseconds! - int n = dao.numberVolatile(fileName); - if (n == -1) { - log.error("VolatileAndJiT CATALOG! DB problem does not allow to count " - + "number of Volatile entries for {}! Volatile entry NOT processed!", - pfn); - } else if (n == 0) { - dao.addVolatile(fileName, fileStart, fileTime); - } else { - dao.updateVolatile(fileName, fileStart, fileTime); - if (n > 1) { - log.warn("VolatileAndJiT CATALOG: More than one entry found for {}; " - + "the catalogue could be corrupt!", fileName); - } - } - return; - } - log.warn("VolatileAndJiT CATALOG: programming bug! volatileEntry invoked " - + "on null attributes; pfn={} start={} fileLifetime={}", pfn, start, - fileLifetime); - } - - public synchronized void setStartTime(PFN pfn, Calendar start) - throws Exception { - - if (pfn == null || start == null) { - log.warn("VolatileAndJiT CATALOG: programming bug! volatileEntry invoked " - + "on null attributes; pfn={} start={}", pfn, start); - return; - } - - String fileName = pfn.getValue(); - // seconds needed and not milliseconds! - long fileStart = start.getTimeInMillis() / 1000; - int n = dao.numberVolatile(fileName); - if (n == -1) { - log.error("VolatileAndJiT CATALOG! DB problem does not allow to count " - + "number of Volatile entries for {}! Volatile entry NOT processed!", - pfn); - return; - } - if (n == 0) { - throw new Exception("Unable to update row volatile for pfn \'" + pfn - + "\' , not on the database!"); - } - dao.updateVolatile(fileName, fileStart); - if (n > 1) { - log.warn("VolatileAndJiT CATALOG: More than one entry found for {}; " - + "the catalogue could be corrupt!", fileName); - } - } - - /** - * Method that returns a List whose first element is a Calendar with the - * starting date and time of the lifetime of the supplied PFN, and whose - * second element is the TLifeTime the system is keeping the PFN. If no entry - * is found for the given PFN, an empty List is returned. Likewise if any DB - * error occurs. In any case, proper error messages get logged. Moreover - * notice that if for any reason the value for the Lifetime read from the DB - * does not allow creation of a valid TLifeTimeInSeconds, an Empty one is - * returned. Error messages in logs warn of the situation. - */ - public synchronized List volatileInfoOn(PFN pfn) { - - ArrayList aux = new ArrayList(); - if (pfn == null) { - log.error("VolatileAndJiT CATALOG: programming bug! volatileInfoOn " - + "invoked on null PFN!"); - return aux; - } - Collection c = dao.volatileInfoOn(pfn.getValue()); - if (c.size() != 2) { - return aux; - } - Iterator i = c.iterator(); - // start time - long startInMillis = i.next().longValue() * 1000; - Calendar auxcal = Calendar.getInstance(); - auxcal.setTimeInMillis(startInMillis); - aux.add(auxcal); - // lifeTime - long lifetimeInSeconds = ((Long) i.next()).longValue(); - TLifeTimeInSeconds auxLifeTime = TLifeTimeInSeconds.makeEmpty(); - try { - auxLifeTime = TLifeTimeInSeconds - .make(lifetimeInSeconds, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - log.error("VolatileAndJiT CATALOG: programming bug! Retrieved long does " - + "not allow TLifeTimeCreation! long is: {}; error is: {}", - lifetimeInSeconds, e.getMessage(), e); - } - aux.add(auxLifeTime); - return aux; - } - - /** - * Private method used to return a String representation of the expired - * entries Collection of JiTData. - */ - private String jitString(Collection c) { - - if (c == null) { - return ""; - } - StringBuilder sb = new StringBuilder(); - sb.append("file,acl,uid,gid\n"); - JiTData aux = null; - for (Iterator i = c.iterator(); i.hasNext();) { - aux = i.next(); - sb.append(aux.pfn()); - sb.append(","); - sb.append(aux.acl()); - sb.append(","); - sb.append(aux.uid()); - sb.append(","); - sb.append(aux.gid()); - if (i.hasNext()) { - sb.append("\n"); - } - } - return sb.toString(); - } - - /** - * Private method that makes sure that the lifeTime of the request: (1) It is - * not less than a predetermined value: this check is needed because clients - * may omit to supply a value and some default one must be used; moreover, it - * is feared that if the requested lifetime is very low, such as 0 or a few - * seconds, there could be strange problems in having a file written and - * erased immediately. (2) It is not larger than a given ceiling; this is - * necessary because in the JiT model, the underlying system may decide to - * remove the pool account mappings; it is paramount that no ACLs remain set - * up for the now un-associated pool account. - */ - private long validatePinLifetime(long lifetime) { - - long duration = lifetime < floor ? floor : lifetime; // adjust for lifetime - // set to zero! - duration = duration <= ceiling ? duration : ceiling; // make sure lifetime - // is not longer than - // the maximum set! - return duration; - } - - /** - * Private method used to return a String representation of the expired - * entries Collection of pfn Strings. - */ - private String volatileString(Collection c) { - - if (c == null) { - return ""; - } - StringBuilder sb = new StringBuilder(); - for (Iterator i = c.iterator(); i.hasNext();) { - sb.append(i.next()); - if (i.hasNext()) { - sb.append(","); - } - } - return sb.toString(); - } + private static final Logger log = LoggerFactory.getLogger(VolatileAndJiTCatalog.class); + + /** only instance of Catalog! */ + private static final VolatileAndJiTCatalog cat = new VolatileAndJiTCatalog(); + /** only instance of DAO object! */ + private static final VolatileAndJiTDAO dao = VolatileAndJiTDAO.getInstance(); + /** Timer object in charge of cleaning periodically the Catalog! */ + private final Timer cleaner = new Timer(); + /** Delay time before starting cleaning thread! Set to 1 minute */ + private final long delay = Configuration.getInstance().getCleaningInitialDelay() * 1000; + /** Period of execution of cleaning! Set to 1 hour */ + private final long period = Configuration.getInstance().getCleaningTimeInterval() * 1000; + /** fileLifetime to use if user specified a non-positive value */ + private final long defaultFileLifetime = Configuration.getInstance().getFileLifetimeDefault(); + /** Number of seconds to use as default if the supplied lifetime is zero! */ + private final long floor = Configuration.getInstance().getPinLifetimeDefault(); + /** + * Maximum number of seconds that an ACL can live: the life time requested by the user cannot be + * greater than this value! This ceiling is needed because of the cron job that removes pool + * account mappings: when the mapping is removed, there must NOT be ANY ACL for that pool-user + * left! + */ + private final long ceiling = Configuration.getInstance().getPinLifetimeMaximum(); + + /** Private constructor that starts the cleaning timer. */ + private VolatileAndJiTCatalog() { + + TimerTask cleaningTask = + new TimerTask() { + + @Override + public void run() { + + purge(); + } + }; + cleaner.scheduleAtFixedRate(cleaningTask, delay, period); + } + + /** Method that returns the only instance of PinnedFilesCatalog. */ + public static VolatileAndJiTCatalog getInstance() { + + return cat; + } + + /** + * Checks whether the given file exists in the volatile table or not. + * + * @param filename + * @return true if there is antry for the given file in the volatilte table, + * false otherwise. + */ + public synchronized boolean exists(PFN pfn) { + + return dao.exists(pfn.getValue()); + } + + /** + * Method used to expire _all_ related entries in the JiT catalogue, that were setup during a PtG + * operation. The method is intended to be used by code handling srmAbort command. Notice that the + * Traverse on the parents is NOT removed! This is to accomodate for the use case of a user that + * has run many PtG on different SURLs but all contained in the same directory tree! In practice + * this method removes the R permission. If any entry does not exist, then nothing happens and a + * warning gets written in the logs; otherwise entries get their start time set to now, and the + * lifetime set to zero; in case more than one matching entry is found, a message gets written to + * the logs, and the updating continues anyway as explained. At this point, when the garbage + * collector wakes up the entries get cleanly handled (physical ACL is removed, catalog entry + * removed, etc.); or an earlier cleaning can be forced by invoking directly the purge mehod. The + * method returns FALSE in case an entry was not found or the supplied parameters were null, and + * TRUE otherwise. Yet keep in mind that it says nothing of whether the DB operation was + * successful or not. + */ + public synchronized boolean expireGetJiTs(PFN pfn, LocalUser localUser) { + + if (pfn != null && localUser != null) { + return expireJiT(pfn, localUser, FilesystemPermission.Read); + } + log.error( + "VolatileAndJiT CATALOG: programming bug! expireGetJiTs invoked " + + "on null attributes; pfn={} localUser={}", + pfn, + localUser); + return false; + } + + /** + * Method used to expire an entry in the JiT catalogue. The method is intended to be used by code + * handling srmAbort command. If the entry does not exist, then nothing happens and a warning gets + * written in the logs; otherwise the entry gets its start time set to now, and its lifetime set + * to zero; in case more than one matching entry is found, a message gets written to the logs, and + * the updating continues anyway as explained. At this point, when the garbage collector wakes up + * the entry is cleanly handled (physical ACL is removed, catalog entry removed, etc.); or an + * earlier cleaning can be forced by invoking directly the purge method. The method returns FALSE + * in case no entry was found or the supplied parameters were null, and TRUE otherwise. Yet keep + * in mind that is says nothing of whether the DB operation was successful or not. + */ + public synchronized boolean expireJiT(PFN pfn, LocalUser localUser, FilesystemPermission acl) { + + if (pfn != null && localUser != null && acl != null) { + String fileName = pfn.getValue(); + int uid = localUser.getUid(); + int intacl = acl.getInt(); + // from the current time we remove 10 seconds because it was observed + // that when executing purge right after invoking this method, less + // than 1 second elapses, so no purging takes place at all since expiry + // is not yet reached! + // Seconds needed and not milliseconds! + long pinStart = (Calendar.getInstance().getTimeInMillis() / 1000) - 10; + long pinTime = 0; // set to zero the lifetime! + int n = dao.numberJiT(fileName, uid, intacl); + if (n == 0) { + log.warn( + "VolatileAndJiT CATALOG: expireJiT found no entry for ({}, {}, " + "{})!", + fileName, + uid, + intacl); + return false; + } + dao.forceUpdateJiT(fileName, uid, intacl, pinStart, pinTime); + if (n > 1) { + log.warn( + "VolatileAndJiT CATALOG: expireJiT found more than one entry " + + "for ({}, {}, {}); the catalogue could be corrupt!", + fileName, + uid, + intacl); + } + return true; + } + log.error( + "VolatileAndJiT CATALOG: programming bug! expireJiT invoked on " + + "null attributes; pfn={} localUser={} acl={}", + pfn, + localUser, + acl); + return false; + } + + /** + * Method used to expire _all_ related entries in the JiT catalogue, that were setup during a PtP + * operation. The method is intended to be used by code handling srmAbort command, and by + * srmPutDone. Notice that the Traverse on the parents is NOT removed! This is to accomodate for + * the use case of a user that has run many PtP on different SURLs but that are all contained in + * the same directory tree! In practice, this method removes R and W permissions. If any entry + * does not exist, then nothing happens and a warning gets written in the logs; otherwise entries + * get their start time set to now, and the lifetime set to zero; in case more than one matching + * entry is found, a message gets written to the logs, and the updating continues anyway as + * explained. At this point, when the garbage collector wakes up the entries get cleanly handled + * (physical ACL is removed, catalog entry removed, etc.); or an earlier cleaning can be forced by + * invoking directly the purge mehod. The method returns FALSE in case an entry was not found or + * the supplied parameters were null, and TRUE otherwise. Yet keep in mind that is says nothing of + * whether the DB operation was successful or not. + */ + public synchronized boolean expirePutJiTs(PFN pfn, LocalUser localUser) { + + if (pfn != null && localUser != null) { + return expireJiT(pfn, localUser, FilesystemPermission.Read) + && expireJiT(pfn, localUser, FilesystemPermission.Write); + } + + log.error( + "VolatileAndJiT CATALOG: programming bug! expirePutJiTs invoked " + + "on null attributes; pfn={} localUser={}", + pfn, + localUser); + return false; + } + + /** + * Method that purges the catalog, removing expired ACLs and deleting expired Volatile files. When + * Volatile entries expire, any realted JiT will automatically expire too, regardless of the + * specified pinLifetime: that is, fileLifetime wins over pinLifetime. WARNING! Notice that the + * catalogue DOES get cleaned up even if the physical removal of the ACL or erasing of the file + * fails. + */ + public synchronized void purge() { + + log.debug("VolatileAndJiT CATALOG! Executing purge!"); + Calendar rightNow = Calendar.getInstance(); + /** + * removes all expired entries from storm_pin and storm_track, returning two Collections: one + * with the PFN of Volatile files, and the other with PFN + GridUser couple of the entries that + * were just being tracked for the ACLs set up on them. + */ + Collection[] expired = dao.removeExpired(rightNow.getTimeInMillis() / 1000); + Collection expiredVolatile = expired[0]; + Collection expiredJiT = expired[1]; + if (expiredVolatile.size() == 0) { + log.debug("VolatileAndJiT CATALOG! No expired Volatile entries found."); + } else { + log.info( + "VolatileAndJiT CATALOG! Found and purged the following expired " + + "Volatile entries:\n {}", + volatileString(expired[0])); + } + if (expiredJiT.size() == 0) { + log.debug("VolatileAndJiT CATALOG! No JiT entries found."); + } else { + log.info( + "VolatileAndJiT CATALOG! Found and purged the following expired " + + "JiT ACLs entries:\n {}", + jitString(expired[1])); + } + // Remove ACLs + JiTData aux = null; + for (Iterator i = expiredJiT.iterator(); i.hasNext(); ) { + aux = (JiTData) i.next(); + int jitacl = aux.acl(); + String jitfile = aux.pfn(); + int jituid = aux.uid(); + int jitgid = aux.gid(); + try { + log.info( + "VolatileAndJiT CATALOG. Removing ACL {} on file {} for " + "user {},{}", + jitacl, + jitfile, + jituid, + jitgid); + LocalFile auxFile = + NamespaceDirector.getNamespace().resolveStoRIbyPFN(PFN.make(jitfile)).getLocalFile(); + LocalUser auxUser = new LocalUser(jituid, jitgid); + FilesystemPermission auxACL = new FilesystemPermission(jitacl); + + AclManager manager = AclManagerFS.getInstance(); + if (auxFile == null) { + log.warn("VolatileAndJiT CATALOG! Unable to setting up the ACL. " + "LocalFile is null!"); + } else { + try { + manager.revokeUserPermission(auxFile, auxUser, auxACL); + } catch (IllegalArgumentException e) { + log.error( + "Unable to revoke user permissions on the file. " + "IllegalArgumentException: {}", + e.getMessage(), + e); + } + } + } catch (Exception e) { + log.error( + "VolatileAndJiT CATALOG! Entry removed from Catalog, but " + + "physical ACL {} for user {}, could NOT be removed from {}", + jitacl, + jituid, + jitgid, + jitfile); + log.error("VolatileAndJiT CATALOG! {}", e.getMessage(), e); + } + } + // Delete files + String auxPFN = null; + for (Iterator i = expiredVolatile.iterator(); i.hasNext(); ) { + auxPFN = (String) i.next(); + try { + log.info("VolatileAndJiT CATALOG. Deleting file {}", auxPFN); + LocalFile auxFile = + NamespaceDirector.getNamespace().resolveStoRIbyPFN(PFN.make(auxPFN)).getLocalFile(); + boolean ok = auxFile.delete(); + if (!ok) { + throw new Exception("Java File deletion failed!"); + } + } catch (Exception e) { + log.error( + "VolatileAndJiT CATALOG! Entry removed from Catalog, but " + + "physical file {} could NOT be deleted!", + auxPFN); + log.error("VolatileAndJiT CATALOG! {}", e.getMessage(), e); + } + } + } + + /** + * Method used upon expiry of SRM_SPACE_AVAILABLE to remove all JiT entries in the DB table, + * related to the given PFN; Notice that _no_ distinction is made aboutthe specific user! This is + * because upon expiry of SRM_SPACE_AVAILABLE the file gets erased, so all JiTs on that file are + * automatically erased. This implies that all catalogue entries get removed. If no entries are + * present nothing happens. + */ + public synchronized void removeAllJiTsOn(PFN pfn) { + + if (pfn != null) { + dao.removeAllJiTsOn(pfn.getValue()); + return; + } + log.error("VolatileAndJiT CATALOG: programming bug! removeAllJiTsOn " + "invoked on null pfn!"); + } + + /** + * Method used to remove a Volatile entry that matches the supplied pfn, from the DB. If null is + * supplied, an error message gets logged and nothing happens. If PFN is not found, nothing + * happens and _no_ message gets logged. + */ + public synchronized void removeVolatile(PFN pfn) { + + if (pfn != null) { + dao.removeVolatile(pfn.getValue()); + return; + } + log.warn("VolatileAndJiT CATALOG: programming bug! removeVolatile invoked " + "on null pfn!"); + } + + /** + * Method used to keep track of an ACL set up on a PFN; it needs the PFN, the LocalUser, the ACL + * and the desired pinLifeTime. If the 3-ple (PFN, ACL, LocalUser) is not present, it gets added; + * if it is already present, provided the new desired expiry occurs after the present one, it gets + * changed. If the supplied lifetime is zero, then a default value is used instead. If it is + * larger than a ceiling, that ceiling is used instead. The floor value in seconds can be set from + * the configuration file, with the property: pinLifetime.minimum While the ceiling value in + * seconds is set with: pinLifetime.maximum BEWARE: The intended use case is in both + * srmPrepareToGet and srmPrepareToPut, for the case of the _JiT_ security mechanism. The maximum + * is necessary because JiT ACLs cannot last longer than the amount of time the pool account is + * leased. Notice that for Volatile entries, a pinLifetime larger than the fileLifetime can be + * specified. However, when Volatile files expire any related JiTs automatically expire in + * anticipation! + */ + public synchronized void trackJiT( + PFN pfn, + LocalUser localUser, + FilesystemPermission acl, + Calendar start, + TLifeTimeInSeconds pinLifetime) { + + if (pfn != null && localUser != null && acl != null && start != null && pinLifetime != null) { + + String fileName = pfn.getValue(); + int uid = localUser.getUid(); + int gid = localUser.getPrimaryGid(); + int intacl = acl.getInt(); + // seconds needed and not milliseconds! + long pinStart = start.getTimeInMillis() / 1000; + long pinTime = validatePinLifetime(pinLifetime.value()); + int n = dao.numberJiT(fileName, uid, intacl); + if (n == 0) { + dao.addJiT(fileName, uid, gid, intacl, pinStart, pinTime); + } else { + dao.updateJiT(fileName, uid, intacl, pinStart, pinTime); + if (n > 1) { + log.warn( + "VolatileAndJiT CATALOG: More than one entry found for " + + "({}, {}, {}); the catalogue could be corrupt!", + fileName, + uid, + intacl); + } + } + return; + } + log.error( + "VolatileAndJiT CATALOG: programming bug! TrackACL invoked on " + + "null attributes; pfn={} localUser={} acl={} start={} pinLifetime={}", + pfn, + localUser, + acl, + start, + pinLifetime); + } + + /** + * Method that adds an entry to the catalog that keeps track of Volatile files. The PFN and the + * fileLifetime are needed. If no entry corresponding to the given PFN is found, a new one gets + * recorded. If the PFN is already present, then provided the new expiry (obtained by adding + * together current-time and requested-lifetime) exceeds the expiry in the catalog, the entry is + * updated. Otherwise nothing takes place. If the supplied fileLifetime is zero, then a default + * value is used instead. This floor default value in seconds can be set from the configuration + * file, with the property: fileLifetime.default BEWARE: The intended use case for this method is + * during srmPrepareToPut. When files are uploaded into StoRM, they get specified as Volatile or + * Permanent. The PtP logic determines if the request is for a Volatile file and in that case it + * adds a new entry in the catalog. That is the purpose of this method. Any subsequent PtP call + * will just result in a modification of the expiry, provided the newer one lasts longer than the + * original one. Yet bear in mind that two or more PtP on the same file makes NO SENSE AT ALL! If + * any DB error occurs, then nothing gets added/updated and an error message gets logged. + */ + public synchronized void trackVolatile(PFN pfn, Calendar start, TLifeTimeInSeconds fileLifetime) { + + if (pfn != null && fileLifetime != null && start != null) { + + String fileName = pfn.getValue(); + long fileTime = fileLifetime.value(); + if (fileTime <= 0) { + fileTime = defaultFileLifetime; + } + long fileStart = start.getTimeInMillis() / 1000; // seconds needed and not + // milliseconds! + int n = dao.numberVolatile(fileName); + if (n == -1) { + log.error( + "VolatileAndJiT CATALOG! DB problem does not allow to count " + + "number of Volatile entries for {}! Volatile entry NOT processed!", + pfn); + } else if (n == 0) { + dao.addVolatile(fileName, fileStart, fileTime); + } else { + dao.updateVolatile(fileName, fileStart, fileTime); + if (n > 1) { + log.warn( + "VolatileAndJiT CATALOG: More than one entry found for {}; " + + "the catalogue could be corrupt!", + fileName); + } + } + return; + } + log.warn( + "VolatileAndJiT CATALOG: programming bug! volatileEntry invoked " + + "on null attributes; pfn={} start={} fileLifetime={}", + pfn, + start, + fileLifetime); + } + + public synchronized void setStartTime(PFN pfn, Calendar start) throws Exception { + + if (pfn == null || start == null) { + log.warn( + "VolatileAndJiT CATALOG: programming bug! volatileEntry invoked " + + "on null attributes; pfn={} start={}", + pfn, + start); + return; + } + + String fileName = pfn.getValue(); + // seconds needed and not milliseconds! + long fileStart = start.getTimeInMillis() / 1000; + int n = dao.numberVolatile(fileName); + if (n == -1) { + log.error( + "VolatileAndJiT CATALOG! DB problem does not allow to count " + + "number of Volatile entries for {}! Volatile entry NOT processed!", + pfn); + return; + } + if (n == 0) { + throw new Exception( + "Unable to update row volatile for pfn \'" + pfn + "\' , not on the database!"); + } + dao.updateVolatile(fileName, fileStart); + if (n > 1) { + log.warn( + "VolatileAndJiT CATALOG: More than one entry found for {}; " + + "the catalogue could be corrupt!", + fileName); + } + } + + /** + * Method that returns a List whose first element is a Calendar with the starting date and time of + * the lifetime of the supplied PFN, and whose second element is the TLifeTime the system is + * keeping the PFN. If no entry is found for the given PFN, an empty List is returned. Likewise if + * any DB error occurs. In any case, proper error messages get logged. Moreover notice that if for + * any reason the value for the Lifetime read from the DB does not allow creation of a valid + * TLifeTimeInSeconds, an Empty one is returned. Error messages in logs warn of the situation. + */ + public synchronized List volatileInfoOn(PFN pfn) { + + ArrayList aux = new ArrayList(); + if (pfn == null) { + log.error( + "VolatileAndJiT CATALOG: programming bug! volatileInfoOn " + "invoked on null PFN!"); + return aux; + } + Collection c = dao.volatileInfoOn(pfn.getValue()); + if (c.size() != 2) { + return aux; + } + Iterator i = c.iterator(); + // start time + long startInMillis = i.next().longValue() * 1000; + Calendar auxcal = Calendar.getInstance(); + auxcal.setTimeInMillis(startInMillis); + aux.add(auxcal); + // lifeTime + long lifetimeInSeconds = ((Long) i.next()).longValue(); + TLifeTimeInSeconds auxLifeTime = TLifeTimeInSeconds.makeEmpty(); + try { + auxLifeTime = TLifeTimeInSeconds.make(lifetimeInSeconds, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + log.error( + "VolatileAndJiT CATALOG: programming bug! Retrieved long does " + + "not allow TLifeTimeCreation! long is: {}; error is: {}", + lifetimeInSeconds, + e.getMessage(), + e); + } + aux.add(auxLifeTime); + return aux; + } + + /** + * Private method used to return a String representation of the expired entries Collection of + * JiTData. + */ + private String jitString(Collection c) { + + if (c == null) { + return ""; + } + StringBuilder sb = new StringBuilder(); + sb.append("file,acl,uid,gid\n"); + JiTData aux = null; + for (Iterator i = c.iterator(); i.hasNext(); ) { + aux = i.next(); + sb.append(aux.pfn()); + sb.append(","); + sb.append(aux.acl()); + sb.append(","); + sb.append(aux.uid()); + sb.append(","); + sb.append(aux.gid()); + if (i.hasNext()) { + sb.append("\n"); + } + } + return sb.toString(); + } + + /** + * Private method that makes sure that the lifeTime of the request: (1) It is not less than a + * predetermined value: this check is needed because clients may omit to supply a value and some + * default one must be used; moreover, it is feared that if the requested lifetime is very low, + * such as 0 or a few seconds, there could be strange problems in having a file written and erased + * immediately. (2) It is not larger than a given ceiling; this is necessary because in the JiT + * model, the underlying system may decide to remove the pool account mappings; it is paramount + * that no ACLs remain set up for the now un-associated pool account. + */ + private long validatePinLifetime(long lifetime) { + + long duration = lifetime < floor ? floor : lifetime; // adjust for lifetime + // set to zero! + duration = duration <= ceiling ? duration : ceiling; // make sure lifetime + // is not longer than + // the maximum set! + return duration; + } + + /** + * Private method used to return a String representation of the expired entries Collection of pfn + * Strings. + */ + private String volatileString(Collection c) { + + if (c == null) { + return ""; + } + StringBuilder sb = new StringBuilder(); + for (Iterator i = c.iterator(); i.hasNext(); ) { + sb.append(i.next()); + if (i.hasNext()) { + sb.append(","); + } + } + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/catalogs/VolatileAndJiTDAO.java b/src/main/java/it/grid/storm/catalogs/VolatileAndJiTDAO.java index a117e805..7bf456b7 100644 --- a/src/main/java/it/grid/storm/catalogs/VolatileAndJiTDAO.java +++ b/src/main/java/it/grid/storm/catalogs/VolatileAndJiTDAO.java @@ -1,13 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs; import com.google.common.collect.Lists; - import it.grid.storm.config.Configuration; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -21,856 +18,810 @@ import java.util.List; import java.util.Timer; import java.util.TimerTask; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * DAO class for VolatileAndJiTCatalog: it has been specifically designed for - * MySQL. - * + * DAO class for VolatileAndJiTCatalog: it has been specifically designed for MySQL. + * * @author EGRID ICTP * @version 1.0 (based on old PinnedFilesDAO) * @date November, 2006 */ public class VolatileAndJiTDAO { - private static final Logger log = LoggerFactory.getLogger(VolatileAndJiTDAO.class); - - // The name of the class for the DB driver - private final String driver = Configuration.getInstance().getDBDriver(); - - // The URL of the DB - private final String url = Configuration.getInstance().getStormDbURL(); - - // The password for the DB - private final String password = Configuration.getInstance().getDBPassword(); - - // The name for the DB - private final String name = Configuration.getInstance().getDBUserName(); - - // Connection to DB - private Connection con = null; - - // instance of DAO - private static final VolatileAndJiTDAO dao = new VolatileAndJiTDAO(); - - // timer thread that will run a task to alert when reconnecting is necessary! - private Timer clock = null; - - // timer task that will update the boolean signaling that a reconnection is needed! - private TimerTask clockTask = null; - - // milliseconds that must pass before reconnecting to DB - private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; - - // initial delay in milliseconds before starting timer - private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; - - // boolean that tells whether reconnection is needed because of MySQL bug! - private boolean reconnect = false; - - private VolatileAndJiTDAO() { - - setUpConnection(); - clock = new Timer(); - clockTask = new TimerTask() { - - @Override - public void run() { - - reconnect = true; - } - }; // clock task - clock.scheduleAtFixedRate(clockTask, delay, period); - } - - /** - * Method that returns the only instance of VolatileAndJiTDAO. - */ - public static VolatileAndJiTDAO getInstance() { - - return dao; - } - - /** - * Method that inserts a new entry in the JiT table of the DB, consisting of - * the specified filename, the local user uid, the local user gid, the acl, - * the start time as expressed by UNIX epoch (seconds since 00:00:00 1 1 1970) - * and the number of seconds the jit must last. - * - * In the DB, the start time gets translated into DATE:TIME in order to make - * it more readable. pinLifetime remains in seconds. - */ - public void addJiT(String filename, int uid, int gid, int acl, long start, - long pinLifetime) { - - if (!checkConnection()) { - log - .error("VolatileAndJiTDAO. addJiT: unable to get a valid connection!"); - return; - } - String sql = "INSERT INTO jit(file,uid,gid,acl,start,pinLifetime) VALUES(?,?,?,?,FROM_UNIXTIME(?),?)"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - stmt.setInt(2, uid); - logWarnings(stmt.getWarnings()); - stmt.setInt(3, gid); - logWarnings(stmt.getWarnings()); - stmt.setInt(4, acl); - logWarnings(stmt.getWarnings()); - stmt.setLong(5, start); - logWarnings(stmt.getWarnings()); - stmt.setLong(6, pinLifetime); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. addJiT: {}", stmt); - stmt.execute(); - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in addJiT: {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that inserts a new entry in the Volatile table of the DB, consisting - * of the specified filename, the start time as expressed by UNIX epoch - * (seconds since 00:00:00 1 1 1970), and the number of seconds the file must - * be kept for. - * - * In the DB, the start time gets translated into DATE:TIME in order to make - * it more readable. pinLifetime remains in seconds. - */ - public void addVolatile(String filename, long start, long fileLifetime) { - - if (!checkConnection()) { - log - .error("VolatileAndJiTDAO. addVolatile: unable to get a valid connection!"); - return; - } - String sql = "INSERT INTO volatile(file,start,fileLifetime) VALUES(?,FROM_UNIXTIME(?),?)"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - stmt.setLong(2, start); - logWarnings(stmt.getWarnings()); - stmt.setLong(3, fileLifetime); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. addVolatile: {}", stmt); - stmt.execute(); - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in addVolatile: {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Checks whether the given file exists in the volatile table or not. - * - * @param filename - * @return true if there is antry for the given file in the - * volatilte table, false otherwise. - */ - public boolean exists(String filename) { - - if (!checkConnection()) { - log - .error("VolatileAndJiTDAO. exists: unable to get a valid connection!"); - return false; - } - String sql = "SELECT ID FROM volatile WHERE file=? LIMIT 1"; - PreparedStatement stmt = null; - ResultSet rs = null; - boolean result; - - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - - log.debug("VolatileAndJiTDAO - existsOnVolatile - {}", stmt); - - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - - if (rs.next()) { - result = true; - } else { - result = false; - } - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in existsOnVolatile: {}", - e.getMessage(), e); - result = false; - } finally { - close(rs); - close(stmt); - } - return result; - } - - /** - * Method that updates an existing entry in the JiT table of the DB, - * consisting of the specified filename, the uid and gid of the local user, - * the acl, the start time as expressed by UNIX epoch (seconds since 00:00:00 - * 1 1 1970), and the number of seconds the jit must last. - * - * In the DB, the start time gets translated into DATE:TIME in order to make - * it more readable. pinLifetime remains in seconds. - * - * This method _forces_ the update regardless of the fact that the new expiry - * lasts less than the current one! This method is intended to be used by - * expireJiT. - * - * Only start and pinLifetime get updated, while filename, uid, gid and acl, - * are used as criteria to select records. - */ - public void forceUpdateJiT(String filename, int uid, int acl, long start, - long pinLifetime) { - - if (!checkConnection()) { - log - .error("VolatileAndJiTDAO. forceUpdateJiT: unable to get a valid connection!"); - return; - } - String sql = "UPDATE jit " + "SET start=FROM_UNIXTIME(?), pinLifetime=? " - + "WHERE file=? AND uid=? AND acl=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setLong(1, start); - logWarnings(stmt.getWarnings()); - stmt.setLong(2, pinLifetime); - logWarnings(stmt.getWarnings()); - stmt.setString(3, filename); - logWarnings(stmt.getWarnings()); - stmt.setInt(4, uid); - logWarnings(stmt.getWarnings()); - stmt.setInt(5, acl); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. forceUpdateJiT: {}", stmt); - int n = stmt.executeUpdate(); - log.debug("VolatileAndJiTDAO. {} jit entries forced updated.", n); - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in forceUpdateJiT: {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that returns the number of entries in the catalogue, matching the - * given filename, uid and acl. - * - * Notice that in general there should be either one or none, and more should - * be taken as indication of catalogue corruption. - * - * -1 is returned if there are problems with the DB. - */ - public int numberJiT(String filename, int uid, int acl) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. numberJiT: unable to get a valid connection!"); - return -1; - } - String sql = "SELECT COUNT(ID) FROM jit WHERE file=? AND uid=? AND acl=?"; - PreparedStatement stmt = null; - ResultSet rs = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - stmt.setInt(2, uid); - logWarnings(stmt.getWarnings()); - stmt.setInt(3, acl); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. numberJiT: {}", stmt); - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - int n = -1; - if (rs.next()) { - n = rs.getInt(1); - } else { - log.error("VolatileAndJiTDAO! Unexpected situation in numberJiT: " - + "result set empty!"); - } - close(rs); - close(stmt); - return n; - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in numberJiT: {}", e.getMessage(), e); - close(rs); - close(stmt); - return -1; - } - } - - /** - * Method that returns the number of Volatile entries in the catalogue, for - * the given filename. - * - * Notice that in general there should be either one or none, and more should - * be taken as indication of catalogue corruption. - * - * -1 is returned if there are problems with the DB. - */ - public int numberVolatile(String filename) { - - if (!checkConnection()) { - log - .error("VolatileAndJiTDAO. numberVolatile: unable to get a valid connection!"); - return -1; - } - String sql = "SELECT COUNT(ID) FROM volatile WHERE file=?"; - PreparedStatement stmt = null; - ResultSet rs = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. numberVolatile: {}", stmt); - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - int n = -1; - if (rs.next()) { - n = rs.getInt(1); - } else { - log.error("VolatileAndJiTDAO! Unexpected situation in numberVolatile: " - + "result set empty!"); - } - close(rs); - close(stmt); - return n; - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in numberVolatile: {}", - e.getMessage(), e); - close(rs); - close(stmt); - return -1; - } - } - - /** - * Method that removes all entries in the JiT table of the DB, that match the - * specified filename. So this action takes place _regardless_ of the user - * that set up the ACL! - */ - public void removeAllJiTsOn(String filename) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. removeAllJiTsOn: unable to get a " - + "valid connection!"); - return; - } - String sql = "DELETE FROM jit WHERE file=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. removeJiT: {}", stmt); - int n = stmt.executeUpdate(); - log.debug("VolatileAndJiTDAO. removeJiT: {} entries removed", n); - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in removeJiT: {}", e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method used to remove all expired entries, both of pinned files and of jit - * ACLs. Also, when removing volatile entries, any jit entry that refers to - * those expired volatiles will also be removed. - * - * The method requires a long representing the time measured as UNIX EPOCH - * upon which to base the purging: entries are evaluated expired when compared - * to this date. - * - * The method returns an array of two Collections; Collection[0] contains - * expired volatile entries String PFNs, while Collection[1] contains - * JiTDataTO objects. Collection[1] also contains those entries that may not - * have expired yet, but since the respective Volatile is being removed they - * too must be removed automatically. - * - * WARNING! If any error occurs it gets logged, and an array of two empty - * Collection is returned. This operation is treated as a Transcation by the - * DB, so a Roll Back should return everything to its original state! - */ - public Collection[] removeExpired(long time) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. removeExpired: unable to get a valid connection!"); - // in case of any failure return an array of two empty Collection - return new Collection[] { new ArrayList(), new ArrayList() }; - } - - String vol = "SELECT ID,file FROM volatile WHERE (UNIX_TIMESTAMP(start)+fileLifetime 0) { - // there are expired volatile entries: adjust jit selection to include - // those SURLs too! - jit = jit + " OR file IN " + makeFileString(volat); - } - stmt = con.prepareStatement(jit); - logWarnings(con.getWarnings()); - stmt.setLong(1, time); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - Collection track = new ArrayList(); - Collection trackid = new ArrayList(); - JiTData aux = null; - while (rs.next()) { - trackid.add(new Long(rs.getLong("ID"))); - aux = new JiTData(rs.getString("file"), rs.getInt("acl"), - rs.getInt("uid"), rs.getInt("gid")); - track.add(aux); - } - int njit = trackid.size(); - close(rs); - close(stmt); - - // remove entries - Collection volcol = new ArrayList(); - Collection jitcol = new ArrayList(); - try { - con.setAutoCommit(false); // begin transaction! - logWarnings(con.getWarnings()); - // delete volatile - int deletedvol = 0; - if (nvolat > 0) { - delvol = delvol + makeIDString(volatid); - stmt = con.prepareStatement(delvol); - logWarnings(con.getWarnings()); - log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); - deletedvol = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - close(stmt); - } - // delete jits - int deletedjit = 0; - if (njit > 0) { - deljit = deljit + makeIDString(trackid); - stmt = con.prepareStatement(deljit); - logWarnings(con.getWarnings()); - log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); - deletedjit = stmt.executeUpdate(); - logWarnings(stmt.getWarnings()); - close(stmt); - } - con.commit(); - logWarnings(con.getWarnings()); - con.setAutoCommit(true); // end transaction! - logWarnings(con.getWarnings()); - log.debug("VolatileAndJiTDAO. Removed {} volatile catalogue entries " - + "and {} jit catalogue entries.", deletedvol, deletedjit); - volcol = volat; - jitcol = track; - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Unable to complete removeExpired... " - + "rolling back! {}", e.getMessage(), e); - rollback(con); - close(stmt); - } - - // return collections - return new Collection[] { volcol, jitcol }; - } catch (SQLException e) { - close(rs); - close(stmt); - log.error("VolatileAndJiTDAO! Unable to complete removeExpired! {}", - e.getMessage(), e); - // in case of any failure return an array of two empty Collection - return new Collection[] { new ArrayList(), new ArrayList() }; - } - } - - /** - * Method that removes all entries in the Volatile table of the DB, that match - * the specified filename. - */ - public void removeVolatile(String filename) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. removeVolatile: unable to get a valid " - + "connection!"); - return; - } - String sql = "DELETE FROM volatile WHERE file=?"; - PreparedStatement stmt = null; - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO. removeVolatile: {}", stmt); - int n = stmt.executeUpdate(); - log.debug("VolatileAndJiTDAO. removeVolatile: {} entries removed.", n); - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in removeVolatile: {}", - e.getMessage(), e); - } finally { - close(stmt); - } - } - - /** - * Method that updates an existing entry in the JiT table of the DB, - * consisting of the specified filename, the uid and gid of the local user, - * the acl, the start time as expressed by UNIX epoch (seconds since 00:00:00 - * 1 1 1970), and the number of seconds the jit must last. - * - * In the DB, the start time gets translated into DATE:TIME in order to make - * it more readable. pinLifetime remains in seconds. - * - * Entries get updated only if the new expiry calculated by adding start and - * pinLifetime, is larger than the existing one. - * - * Only start and pinLifetime get updated, while filename, uid, gid and acl, - * are used as criteria to select records. - */ - public void updateJiT(String filename, int uid, int acl, long start, - long pinLifetime) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. updateJiT: unable to get a valid " - + "connection!"); - return; - } - String sql = "UPDATE jit " - + "SET start=FROM_UNIXTIME(?), pinLifetime=? " - + "WHERE file=? AND uid=? AND acl=? AND (UNIX_TIMESTAMP(start)+pinLifetime volatileInfoOn(String filename) { - - if (!checkConnection()) { - log.error("VolatileAndJiTDAO. volatileInfoOn: unable to get a valid connection!"); - return Lists.newArrayList(); - } - String sql = "SELECT UNIX_TIMESTAMP(start), fileLifetime FROM volatile WHERE file=?"; - PreparedStatement stmt = null; - ResultSet rs = null; - List aux = Lists.newArrayList(); - try { - stmt = con.prepareStatement(sql); - logWarnings(con.getWarnings()); - stmt.setString(1, filename); - logWarnings(stmt.getWarnings()); - log.debug("VolatileAndJiTDAO - infoOnVolatile - {}", stmt); - rs = stmt.executeQuery(); - logWarnings(stmt.getWarnings()); - if (rs.next()) { - aux.add(rs.getLong("UNIX_TIMESTAMP(start)")); - aux.add(rs.getLong("fileLifetime")); - } else { - log.debug("VolatileAndJiTDAO! infoOnVolatile did not find {}", filename); - } - } catch (SQLException e) { - log.error("VolatileAndJiTDAO! Error in infoOnVolatile: {}", - e.getMessage(), e); - } finally { - close(rs); - close(stmt); - } - return aux; - } - - /** - * Auxiliary method that checks if time for resetting the connection has come, - * and eventually takes it down and up back again. - */ - private boolean checkConnection() { - - boolean response = true; - if (reconnect) { - log.debug("VolatileAndJiTDAO: reconnecting to DB. "); - takeDownConnection(); - response = setUpConnection(); - if (response) { - reconnect = false; - } - } - return response; - } - - /** - * Auxiliary method that closes a ResultSet and handles all possible - * exceptions. - */ - private void close(ResultSet rset) { - - if (rset != null) { - try { - rset.close(); - } catch (Exception e) { - log.error("VolatileAndJiTDAO! Unable to close ResultSet - Error: {}", - e.getMessage(), e); - } - } - } - - /** - * Auxiliary method that closes a Statement and handles all possible - * exceptions. - */ - private void close(Statement stmt) { - - if (stmt != null) { - try { - stmt.close(); - } catch (Exception e) { - log.error("VolatileAndJiTDAO! Unable to close Statement {} - Error: {}", - stmt.toString(), e.getMessage(), e); - } - } - } - - /** - * Auxiliary method used to log warnings. - */ - private void logWarnings(SQLWarning warning) { - - if (warning != null) { - log.debug("VolatileAndJiTDAO: {}", warning); - while ((warning = warning.getNextWarning()) != null) { - log.debug("VolatileAndJiTDAO: {}", warning); - } - } - } - - /** - * Method that returns a String containing all Files. - */ - private String makeFileString(Collection files) { - - StringBuilder sb = new StringBuilder("("); - for (Iterator i = files.iterator(); i.hasNext();) { - sb.append("'"); - sb.append((String) i.next()); - sb.append("'"); - if (i.hasNext()) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Method that returns a String containing all IDs. - */ - private String makeIDString(Collection rowids) { - - StringBuilder sb = new StringBuilder("("); - for (Iterator i = rowids.iterator(); i.hasNext();) { - sb.append(i.next()); - if (i.hasNext()) { - sb.append(","); - } - } - sb.append(")"); - return sb.toString(); - } - - /** - * Auxiliary method used to roll back a transaction and handles all possible - * exceptions. - */ - private void rollback(Connection con) { - - if (con != null) { - try { - con.rollback(); - logWarnings(con.getWarnings()); - log.error("VolatileAndJiTDAO! Roll back successful!"); - } catch (SQLException e3) { - log.error("VolatileAndJiTDAO! Roll back failed! {}", e3.getMessage(), e3); - } - } - } - - /** - * Auxiliary method that sets up the connection to the DB. - */ - private boolean setUpConnection() { - - boolean response = false; - try { - Class.forName(driver); - con = DriverManager.getConnection(url, name, password); - response = con.isValid(0); - logWarnings(con.getWarnings()); - } catch (Exception e) { - log.error("VolatileAndJiTDAO! Exception in setUpconnection! {}", - e.getMessage(), e); - } - return response; - } - - /** - * Auxiliary method that takes down a connection to the DB. - */ - private void takeDownConnection() { - - if (con != null) { - try { - con.close(); - } catch (Exception e) { - log.error("VolatileAndJiTDAO! Exception in takeDownConnection! {}", - e.getMessage(), e); - } - } - } + private static final Logger log = LoggerFactory.getLogger(VolatileAndJiTDAO.class); + + // The name of the class for the DB driver + private final String driver = Configuration.getInstance().getDBDriver(); + + // The URL of the DB + private final String url = Configuration.getInstance().getStormDbURL(); + + // The password for the DB + private final String password = Configuration.getInstance().getDBPassword(); + + // The name for the DB + private final String name = Configuration.getInstance().getDBUserName(); + + // Connection to DB + private Connection con = null; + + // instance of DAO + private static final VolatileAndJiTDAO dao = new VolatileAndJiTDAO(); + + // timer thread that will run a task to alert when reconnecting is necessary! + private Timer clock = null; + + // timer task that will update the boolean signaling that a reconnection is needed! + private TimerTask clockTask = null; + + // milliseconds that must pass before reconnecting to DB + private final long period = Configuration.getInstance().getDBReconnectPeriod() * 1000; + + // initial delay in milliseconds before starting timer + private final long delay = Configuration.getInstance().getDBReconnectDelay() * 1000; + + // boolean that tells whether reconnection is needed because of MySQL bug! + private boolean reconnect = false; + + private VolatileAndJiTDAO() { + + setUpConnection(); + clock = new Timer(); + clockTask = + new TimerTask() { + + @Override + public void run() { + + reconnect = true; + } + }; // clock task + clock.scheduleAtFixedRate(clockTask, delay, period); + } + + /** Method that returns the only instance of VolatileAndJiTDAO. */ + public static VolatileAndJiTDAO getInstance() { + + return dao; + } + + /** + * Method that inserts a new entry in the JiT table of the DB, consisting of the specified + * filename, the local user uid, the local user gid, the acl, the start time as expressed by UNIX + * epoch (seconds since 00:00:00 1 1 1970) and the number of seconds the jit must last. + * + *

In the DB, the start time gets translated into DATE:TIME in order to make it more readable. + * pinLifetime remains in seconds. + */ + public void addJiT(String filename, int uid, int gid, int acl, long start, long pinLifetime) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. addJiT: unable to get a valid connection!"); + return; + } + String sql = + "INSERT INTO jit(file,uid,gid,acl,start,pinLifetime) VALUES(?,?,?,?,FROM_UNIXTIME(?),?)"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + stmt.setInt(2, uid); + logWarnings(stmt.getWarnings()); + stmt.setInt(3, gid); + logWarnings(stmt.getWarnings()); + stmt.setInt(4, acl); + logWarnings(stmt.getWarnings()); + stmt.setLong(5, start); + logWarnings(stmt.getWarnings()); + stmt.setLong(6, pinLifetime); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. addJiT: {}", stmt); + stmt.execute(); + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in addJiT: {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method that inserts a new entry in the Volatile table of the DB, consisting of the specified + * filename, the start time as expressed by UNIX epoch (seconds since 00:00:00 1 1 1970), and the + * number of seconds the file must be kept for. + * + *

In the DB, the start time gets translated into DATE:TIME in order to make it more readable. + * pinLifetime remains in seconds. + */ + public void addVolatile(String filename, long start, long fileLifetime) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. addVolatile: unable to get a valid connection!"); + return; + } + String sql = "INSERT INTO volatile(file,start,fileLifetime) VALUES(?,FROM_UNIXTIME(?),?)"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + stmt.setLong(2, start); + logWarnings(stmt.getWarnings()); + stmt.setLong(3, fileLifetime); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. addVolatile: {}", stmt); + stmt.execute(); + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in addVolatile: {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Checks whether the given file exists in the volatile table or not. + * + * @param filename + * @return true if there is antry for the given file in the volatilte table, + * false otherwise. + */ + public boolean exists(String filename) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. exists: unable to get a valid connection!"); + return false; + } + String sql = "SELECT ID FROM volatile WHERE file=? LIMIT 1"; + PreparedStatement stmt = null; + ResultSet rs = null; + boolean result; + + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + + log.debug("VolatileAndJiTDAO - existsOnVolatile - {}", stmt); + + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + + if (rs.next()) { + result = true; + } else { + result = false; + } + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in existsOnVolatile: {}", e.getMessage(), e); + result = false; + } finally { + close(rs); + close(stmt); + } + return result; + } + + /** + * Method that updates an existing entry in the JiT table of the DB, consisting of the specified + * filename, the uid and gid of the local user, the acl, the start time as expressed by UNIX epoch + * (seconds since 00:00:00 1 1 1970), and the number of seconds the jit must last. + * + *

In the DB, the start time gets translated into DATE:TIME in order to make it more readable. + * pinLifetime remains in seconds. + * + *

This method _forces_ the update regardless of the fact that the new expiry lasts less than + * the current one! This method is intended to be used by expireJiT. + * + *

Only start and pinLifetime get updated, while filename, uid, gid and acl, are used as + * criteria to select records. + */ + public void forceUpdateJiT(String filename, int uid, int acl, long start, long pinLifetime) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. forceUpdateJiT: unable to get a valid connection!"); + return; + } + String sql = + "UPDATE jit " + + "SET start=FROM_UNIXTIME(?), pinLifetime=? " + + "WHERE file=? AND uid=? AND acl=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setLong(1, start); + logWarnings(stmt.getWarnings()); + stmt.setLong(2, pinLifetime); + logWarnings(stmt.getWarnings()); + stmt.setString(3, filename); + logWarnings(stmt.getWarnings()); + stmt.setInt(4, uid); + logWarnings(stmt.getWarnings()); + stmt.setInt(5, acl); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. forceUpdateJiT: {}", stmt); + int n = stmt.executeUpdate(); + log.debug("VolatileAndJiTDAO. {} jit entries forced updated.", n); + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in forceUpdateJiT: {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method that returns the number of entries in the catalogue, matching the given filename, uid + * and acl. + * + *

Notice that in general there should be either one or none, and more should be taken as + * indication of catalogue corruption. + * + *

-1 is returned if there are problems with the DB. + */ + public int numberJiT(String filename, int uid, int acl) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. numberJiT: unable to get a valid connection!"); + return -1; + } + String sql = "SELECT COUNT(ID) FROM jit WHERE file=? AND uid=? AND acl=?"; + PreparedStatement stmt = null; + ResultSet rs = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + stmt.setInt(2, uid); + logWarnings(stmt.getWarnings()); + stmt.setInt(3, acl); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. numberJiT: {}", stmt); + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + int n = -1; + if (rs.next()) { + n = rs.getInt(1); + } else { + log.error("VolatileAndJiTDAO! Unexpected situation in numberJiT: " + "result set empty!"); + } + close(rs); + close(stmt); + return n; + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in numberJiT: {}", e.getMessage(), e); + close(rs); + close(stmt); + return -1; + } + } + + /** + * Method that returns the number of Volatile entries in the catalogue, for the given filename. + * + *

Notice that in general there should be either one or none, and more should be taken as + * indication of catalogue corruption. + * + *

-1 is returned if there are problems with the DB. + */ + public int numberVolatile(String filename) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. numberVolatile: unable to get a valid connection!"); + return -1; + } + String sql = "SELECT COUNT(ID) FROM volatile WHERE file=?"; + PreparedStatement stmt = null; + ResultSet rs = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. numberVolatile: {}", stmt); + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + int n = -1; + if (rs.next()) { + n = rs.getInt(1); + } else { + log.error( + "VolatileAndJiTDAO! Unexpected situation in numberVolatile: " + "result set empty!"); + } + close(rs); + close(stmt); + return n; + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in numberVolatile: {}", e.getMessage(), e); + close(rs); + close(stmt); + return -1; + } + } + + /** + * Method that removes all entries in the JiT table of the DB, that match the specified filename. + * So this action takes place _regardless_ of the user that set up the ACL! + */ + public void removeAllJiTsOn(String filename) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. removeAllJiTsOn: unable to get a " + "valid connection!"); + return; + } + String sql = "DELETE FROM jit WHERE file=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. removeJiT: {}", stmt); + int n = stmt.executeUpdate(); + log.debug("VolatileAndJiTDAO. removeJiT: {} entries removed", n); + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in removeJiT: {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method used to remove all expired entries, both of pinned files and of jit ACLs. Also, when + * removing volatile entries, any jit entry that refers to those expired volatiles will also be + * removed. + * + *

The method requires a long representing the time measured as UNIX EPOCH upon which to base + * the purging: entries are evaluated expired when compared to this date. + * + *

The method returns an array of two Collections; Collection[0] contains expired volatile + * entries String PFNs, while Collection[1] contains JiTDataTO objects. Collection[1] also + * contains those entries that may not have expired yet, but since the respective Volatile is + * being removed they too must be removed automatically. + * + *

WARNING! If any error occurs it gets logged, and an array of two empty Collection is + * returned. This operation is treated as a Transcation by the DB, so a Roll Back should return + * everything to its original state! + */ + public Collection[] removeExpired(long time) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. removeExpired: unable to get a valid connection!"); + // in case of any failure return an array of two empty Collection + return new Collection[] {new ArrayList(), new ArrayList()}; + } + + String vol = "SELECT ID,file FROM volatile WHERE (UNIX_TIMESTAMP(start)+fileLifetime 0) { + // there are expired volatile entries: adjust jit selection to include + // those SURLs too! + jit = jit + " OR file IN " + makeFileString(volat); + } + stmt = con.prepareStatement(jit); + logWarnings(con.getWarnings()); + stmt.setLong(1, time); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + Collection track = new ArrayList(); + Collection trackid = new ArrayList(); + JiTData aux = null; + while (rs.next()) { + trackid.add(new Long(rs.getLong("ID"))); + aux = + new JiTData(rs.getString("file"), rs.getInt("acl"), rs.getInt("uid"), rs.getInt("gid")); + track.add(aux); + } + int njit = trackid.size(); + close(rs); + close(stmt); + + // remove entries + Collection volcol = new ArrayList(); + Collection jitcol = new ArrayList(); + try { + con.setAutoCommit(false); // begin transaction! + logWarnings(con.getWarnings()); + // delete volatile + int deletedvol = 0; + if (nvolat > 0) { + delvol = delvol + makeIDString(volatid); + stmt = con.prepareStatement(delvol); + logWarnings(con.getWarnings()); + log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); + deletedvol = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + close(stmt); + } + // delete jits + int deletedjit = 0; + if (njit > 0) { + deljit = deljit + makeIDString(trackid); + stmt = con.prepareStatement(deljit); + logWarnings(con.getWarnings()); + log.debug("VolatileAndJiTDAO. removeExpired: {}", stmt); + deletedjit = stmt.executeUpdate(); + logWarnings(stmt.getWarnings()); + close(stmt); + } + con.commit(); + logWarnings(con.getWarnings()); + con.setAutoCommit(true); // end transaction! + logWarnings(con.getWarnings()); + log.debug( + "VolatileAndJiTDAO. Removed {} volatile catalogue entries " + + "and {} jit catalogue entries.", + deletedvol, + deletedjit); + volcol = volat; + jitcol = track; + } catch (SQLException e) { + log.error( + "VolatileAndJiTDAO! Unable to complete removeExpired... " + "rolling back! {}", + e.getMessage(), + e); + rollback(con); + close(stmt); + } + + // return collections + return new Collection[] {volcol, jitcol}; + } catch (SQLException e) { + close(rs); + close(stmt); + log.error("VolatileAndJiTDAO! Unable to complete removeExpired! {}", e.getMessage(), e); + // in case of any failure return an array of two empty Collection + return new Collection[] {new ArrayList(), new ArrayList()}; + } + } + + /** + * Method that removes all entries in the Volatile table of the DB, that match the specified + * filename. + */ + public void removeVolatile(String filename) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. removeVolatile: unable to get a valid " + "connection!"); + return; + } + String sql = "DELETE FROM volatile WHERE file=?"; + PreparedStatement stmt = null; + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO. removeVolatile: {}", stmt); + int n = stmt.executeUpdate(); + log.debug("VolatileAndJiTDAO. removeVolatile: {} entries removed.", n); + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in removeVolatile: {}", e.getMessage(), e); + } finally { + close(stmt); + } + } + + /** + * Method that updates an existing entry in the JiT table of the DB, consisting of the specified + * filename, the uid and gid of the local user, the acl, the start time as expressed by UNIX epoch + * (seconds since 00:00:00 1 1 1970), and the number of seconds the jit must last. + * + *

In the DB, the start time gets translated into DATE:TIME in order to make it more readable. + * pinLifetime remains in seconds. + * + *

Entries get updated only if the new expiry calculated by adding start and pinLifetime, is + * larger than the existing one. + * + *

Only start and pinLifetime get updated, while filename, uid, gid and acl, are used as + * criteria to select records. + */ + public void updateJiT(String filename, int uid, int acl, long start, long pinLifetime) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. updateJiT: unable to get a valid " + "connection!"); + return; + } + String sql = + "UPDATE jit " + + "SET start=FROM_UNIXTIME(?), pinLifetime=? " + + "WHERE file=? AND uid=? AND acl=? AND (UNIX_TIMESTAMP(start)+pinLifetimeIn the DB, the start time gets translated into DATE:TIME in order to make it more readable. + * pinLifetime remains in seconds. + * + *

Entries get updated only if the new expiry calculated by adding start and fileLifetime, is + * larger than the existing one. + */ + public void updateVolatile(String filename, long start, long fileLifetime) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. updateVolatile: unable to get a valid " + "connection!"); + return; + } + String sql = + "UPDATE volatile " + + "SET file=?, start=FROM_UNIXTIME(?), fileLifetime=? " + + "WHERE file=? AND (UNIX_TIMESTAMP(start)+fileLifetimeThe two long are returned inside a List: the first one is the start time expressed in Unix + * epoch; the second long is the lifetime expressed in seconds. + * + *

In case no entry is found or there are errors, an empty List is returned and proper error + * messagges get logged. + */ + public List volatileInfoOn(String filename) { + + if (!checkConnection()) { + log.error("VolatileAndJiTDAO. volatileInfoOn: unable to get a valid connection!"); + return Lists.newArrayList(); + } + String sql = "SELECT UNIX_TIMESTAMP(start), fileLifetime FROM volatile WHERE file=?"; + PreparedStatement stmt = null; + ResultSet rs = null; + List aux = Lists.newArrayList(); + try { + stmt = con.prepareStatement(sql); + logWarnings(con.getWarnings()); + stmt.setString(1, filename); + logWarnings(stmt.getWarnings()); + log.debug("VolatileAndJiTDAO - infoOnVolatile - {}", stmt); + rs = stmt.executeQuery(); + logWarnings(stmt.getWarnings()); + if (rs.next()) { + aux.add(rs.getLong("UNIX_TIMESTAMP(start)")); + aux.add(rs.getLong("fileLifetime")); + } else { + log.debug("VolatileAndJiTDAO! infoOnVolatile did not find {}", filename); + } + } catch (SQLException e) { + log.error("VolatileAndJiTDAO! Error in infoOnVolatile: {}", e.getMessage(), e); + } finally { + close(rs); + close(stmt); + } + return aux; + } + + /** + * Auxiliary method that checks if time for resetting the connection has come, and eventually + * takes it down and up back again. + */ + private boolean checkConnection() { + + boolean response = true; + if (reconnect) { + log.debug("VolatileAndJiTDAO: reconnecting to DB. "); + takeDownConnection(); + response = setUpConnection(); + if (response) { + reconnect = false; + } + } + return response; + } + + /** Auxiliary method that closes a ResultSet and handles all possible exceptions. */ + private void close(ResultSet rset) { + + if (rset != null) { + try { + rset.close(); + } catch (Exception e) { + log.error("VolatileAndJiTDAO! Unable to close ResultSet - Error: {}", e.getMessage(), e); + } + } + } + + /** Auxiliary method that closes a Statement and handles all possible exceptions. */ + private void close(Statement stmt) { + + if (stmt != null) { + try { + stmt.close(); + } catch (Exception e) { + log.error( + "VolatileAndJiTDAO! Unable to close Statement {} - Error: {}", + stmt.toString(), + e.getMessage(), + e); + } + } + } + + /** Auxiliary method used to log warnings. */ + private void logWarnings(SQLWarning warning) { + + if (warning != null) { + log.debug("VolatileAndJiTDAO: {}", warning); + while ((warning = warning.getNextWarning()) != null) { + log.debug("VolatileAndJiTDAO: {}", warning); + } + } + } + + /** Method that returns a String containing all Files. */ + private String makeFileString(Collection files) { + + StringBuilder sb = new StringBuilder("("); + for (Iterator i = files.iterator(); i.hasNext(); ) { + sb.append("'"); + sb.append((String) i.next()); + sb.append("'"); + if (i.hasNext()) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** Method that returns a String containing all IDs. */ + private String makeIDString(Collection rowids) { + + StringBuilder sb = new StringBuilder("("); + for (Iterator i = rowids.iterator(); i.hasNext(); ) { + sb.append(i.next()); + if (i.hasNext()) { + sb.append(","); + } + } + sb.append(")"); + return sb.toString(); + } + + /** Auxiliary method used to roll back a transaction and handles all possible exceptions. */ + private void rollback(Connection con) { + + if (con != null) { + try { + con.rollback(); + logWarnings(con.getWarnings()); + log.error("VolatileAndJiTDAO! Roll back successful!"); + } catch (SQLException e3) { + log.error("VolatileAndJiTDAO! Roll back failed! {}", e3.getMessage(), e3); + } + } + } + + /** Auxiliary method that sets up the connection to the DB. */ + private boolean setUpConnection() { + + boolean response = false; + try { + Class.forName(driver); + con = DriverManager.getConnection(url, name, password); + response = con.isValid(0); + logWarnings(con.getWarnings()); + } catch (Exception e) { + log.error("VolatileAndJiTDAO! Exception in setUpconnection! {}", e.getMessage(), e); + } + return response; + } + + /** Auxiliary method that takes down a connection to the DB. */ + private void takeDownConnection() { + + if (con != null) { + try { + con.close(); + } catch (Exception e) { + log.error("VolatileAndJiTDAO! Exception in takeDownConnection! {}", e.getMessage(), e); + } + } + } } diff --git a/src/main/java/it/grid/storm/catalogs/surl/DelegatingSURLStatusManager.java b/src/main/java/it/grid/storm/catalogs/surl/DelegatingSURLStatusManager.java index cde43a87..d15ad25c 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/DelegatingSURLStatusManager.java +++ b/src/main/java/it/grid/storm/catalogs/surl/DelegatingSURLStatusManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; @@ -9,7 +8,6 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.util.List; import java.util.Map; @@ -23,66 +21,66 @@ public DelegatingSURLStatusManager(SURLStatusManager delegate) { } @Override - public boolean abortAllGetRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllGetRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { return delegate.abortAllGetRequestsForSURL(user, surl, explanation); } @Override - public boolean abortAllPutRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllPutRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { return delegate.abortAllPutRequestsForSURL(user, surl, explanation); } @Override - public boolean abortRequest(GridUserInterface user, TRequestToken token, - String explanation) { + public boolean abortRequest(GridUserInterface user, TRequestToken token, String explanation) { return delegate.abortRequest(user, token, explanation); } @Override - public boolean abortRequestForSURL(GridUserInterface user, TRequestToken token, - TSURL surl, String explanation) { + public boolean abortRequestForSURL( + GridUserInterface user, TRequestToken token, TSURL surl, String explanation) { return delegate.abortRequestForSURL(user, token, surl, explanation); } @Override - public boolean failRequestForSURL(GridUserInterface user, TRequestToken token, - TSURL surl, TStatusCode code, String explanation) { + public boolean failRequestForSURL( + GridUserInterface user, + TRequestToken token, + TSURL surl, + TStatusCode code, + String explanation) { return delegate.failRequestForSURL(user, token, surl, code, explanation); } @Override public Map getPinnedSURLsForUser( - GridUserInterface user, List surls) { + GridUserInterface user, List surls) { return delegate.getPinnedSURLsForUser(user, surls); - } @Override public Map getPinnedSURLsForUser( - GridUserInterface user, TRequestToken token, List surls) { + GridUserInterface user, TRequestToken token, List surls) { return delegate.getPinnedSURLsForUser(user, token, surls); } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token) { + public Map getSURLStatuses(GridUserInterface user, TRequestToken token) { return delegate.getSURLStatuses(user, token); } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token, - List surls) { + public Map getSURLStatuses( + GridUserInterface user, TRequestToken token, List surls) { return delegate.getSURLStatuses(user, token, surls); } diff --git a/src/main/java/it/grid/storm/catalogs/surl/InMemorySURLStatusManager.java b/src/main/java/it/grid/storm/catalogs/surl/InMemorySURLStatusManager.java index bbe0288f..88cd40ca 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/InMemorySURLStatusManager.java +++ b/src/main/java/it/grid/storm/catalogs/surl/InMemorySURLStatusManager.java @@ -1,9 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; +import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; + import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; @@ -12,59 +13,50 @@ import it.grid.storm.synchcall.surl.SURLStatusStore; import it.grid.storm.synchcall.surl.SURLStatusStoreIF; import it.grid.storm.synchcall.surl.UnknownSurlException; - -import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; - import java.util.Collection; import java.util.EnumSet; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class InMemorySURLStatusManager extends DelegatingSURLStatusManager { - public static final Logger LOGGER = LoggerFactory - .getLogger(InMemorySURLStatusManager.class); + public static final Logger LOGGER = LoggerFactory.getLogger(InMemorySURLStatusManager.class); - private static final EnumSet busyStatuses = EnumSet - .of(TStatusCode.SRM_SPACE_AVAILABLE); + private static final EnumSet busyStatuses = + EnumSet.of(TStatusCode.SRM_SPACE_AVAILABLE); - private static final EnumSet pinnedStatuses = EnumSet - .of(TStatusCode.SRM_FILE_PINNED); + private static final EnumSet pinnedStatuses = + EnumSet.of(TStatusCode.SRM_FILE_PINNED); public InMemorySURLStatusManager(SURLStatusManager delegate) { super(delegate); - } @Override - public boolean abortAllGetRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllGetRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; boolean someRequestAborted = (store.abortAllRequestForSURL(surl) != 0); - return (someRequestAborted || super.abortAllGetRequestsForSURL(user, surl, - explanation)); + return (someRequestAborted || super.abortAllGetRequestsForSURL(user, surl, explanation)); } @Override - public boolean abortAllPutRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllPutRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { final SURLStatusStoreIF store = SURLStatusStore.INSTANCE; boolean someRequestAborted = (store.abortAllRequestForSURL(surl) != 0); - return (someRequestAborted || super.abortAllPutRequestsForSURL(user, surl, - explanation)); + return (someRequestAborted || super.abortAllPutRequestsForSURL(user, surl, explanation)); } @Override - public boolean abortRequest(GridUserInterface user, TRequestToken token, - String explanation) { + public boolean abortRequest(GridUserInterface user, TRequestToken token, String explanation) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; @@ -73,22 +65,20 @@ public boolean abortRequest(GridUserInterface user, TRequestToken token, try { - return (store.update(token, new TReturnStatus(TStatusCode.SRM_ABORTED, - explanation)) != 0); + return (store.update(token, new TReturnStatus(TStatusCode.SRM_ABORTED, explanation)) != 0); } catch (UnknownSurlException e) { LOGGER.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } - } return super.abortRequest(user, token, explanation); } @Override - public boolean abortRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, String explanation) { + public boolean abortRequestForSURL( + GridUserInterface user, TRequestToken token, TSURL surl, String explanation) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; @@ -109,14 +99,22 @@ public boolean abortRequestForSURL(GridUserInterface user, } @Override - public boolean failRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, TStatusCode code, String explanation) { + public boolean failRequestForSURL( + GridUserInterface user, + TRequestToken token, + TSURL surl, + TStatusCode code, + String explanation) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; if (store.hasEntryForToken(token)) { - LOGGER.debug("failRequestForSURL in memory: token={}, surl={}, code={}, " - + "explanation={}", token, surl, code, explanation); + LOGGER.debug( + "failRequestForSURL in memory: token={}, surl={}, code={}, " + "explanation={}", + token, + surl, + code, + explanation); try { @@ -128,24 +126,26 @@ public boolean failRequestForSURL(GridUserInterface user, } } - LOGGER.debug("failRequestForSURL on DB: token={}, surl={}, code={}, " - + "explanation={}", token, surl, code, explanation); + LOGGER.debug( + "failRequestForSURL on DB: token={}, surl={}, code={}, " + "explanation={}", + token, + surl, + code, + explanation); return super.failRequestForSURL(user, token, surl, code, explanation); } @Override public Map getPinnedSURLsForUser( - GridUserInterface user, List surls) { + GridUserInterface user, List surls) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; - Map surlsMap = store.getPinnedSURLsForUser(user, - null, surls); + Map surlsMap = store.getPinnedSURLsForUser(user, null, surls); if (!surlsMap.isEmpty()) { - LOGGER.debug("getPinnedSURLsForUser user={}, surls={} got {}", user, - surls, surlsMap); + LOGGER.debug("getPinnedSURLsForUser user={}, surls={} got {}", user, surls, surlsMap); return surlsMap; } @@ -154,16 +154,15 @@ public Map getPinnedSURLsForUser( @Override public Map getPinnedSURLsForUser( - GridUserInterface user, TRequestToken token, List surls) { + GridUserInterface user, TRequestToken token, List surls) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; - Map surlsMap = store.getPinnedSURLsForUser(user, - token, surls); + Map surlsMap = store.getPinnedSURLsForUser(user, token, surls); if (!surlsMap.isEmpty()) { - LOGGER.debug("getPinnedSURLsForUser user={}, token={}, surls={} got {}", - user, surls, surlsMap); + LOGGER.debug( + "getPinnedSURLsForUser user={}, token={}, surls={} got {}", user, surls, surlsMap); return surlsMap; } @@ -171,8 +170,7 @@ public Map getPinnedSURLsForUser( } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token) { + public Map getSURLStatuses(GridUserInterface user, TRequestToken token) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; if (store.hasEntryForToken(token)) { @@ -185,26 +183,23 @@ public Map getSURLStatuses(GridUserInterface user, } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token, - List surls) { + public Map getSURLStatuses( + GridUserInterface user, TRequestToken token, List surls) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; if (store.hasEntryForToken(token)) { - LOGGER.debug("getSURLStatuses from memory for token {} and SURLs {}", - token, surls); - return store.getSurlStatuses(user,token, surls); + LOGGER.debug("getSURLStatuses from memory for token {} and SURLs {}", token, surls); + return store.getSurlStatuses(user, token, surls); } - LOGGER.debug("getSURLStatuses from DB for token {} and SURLs {}", token, - surls); + LOGGER.debug("getSURLStatuses from DB for token {} and SURLs {}", token, surls); return super.getSURLStatuses(user, token, surls); } private Map getSURLStatusesExcludingToken( - TRequestToken token, TSURL surl) { + TRequestToken token, TSURL surl) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; @@ -220,11 +215,9 @@ private Map getSURLStatusesExcludingToken( } return statusMap; - } - private Collection getSURLStatusList(GridUserInterface user, - TSURL surl) { + private Collection getSURLStatusList(GridUserInterface user, TSURL surl) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; @@ -244,8 +237,8 @@ private Collection getSURLStatusList(GridUserInterface user, @Override public boolean isSURLBusy(TRequestToken requestTokenToExclude, TSURL surl) { - final Map statusMap = getSURLStatusesExcludingToken( - requestTokenToExclude, surl); + final Map statusMap = + getSURLStatusesExcludingToken(requestTokenToExclude, surl); if (statusMap != null && !statusMap.isEmpty()) { @@ -264,13 +257,12 @@ public boolean isSURLBusy(TRequestToken requestTokenToExclude, TSURL surl) { } return busyOnDB; - } @Override public boolean isSURLBusy(TSURL surl) { - final Collection statusList = getSURLStatusList(null,surl); + final Collection statusList = getSURLStatusList(null, surl); if (statusList != null && (!statusList.isEmpty())) { @@ -298,8 +290,7 @@ private boolean isSURLBusy(TSURL surl, Collection statusList) { return false; } - private boolean isSURLBusy(TSURL surl, - Map statusMap) { + private boolean isSURLBusy(TSURL surl, Map statusMap) { for (TReturnStatus status : statusMap.values()) { if (busyStatuses.contains(status.getStatusCode())) { @@ -312,7 +303,7 @@ private boolean isSURLBusy(TSURL surl, @Override public boolean isSURLPinned(TSURL surl) { - final Collection statusList = getSURLStatusList(null,surl); + final Collection statusList = getSURLStatusList(null, surl); if (statusList != null && !statusList.isEmpty()) { @@ -351,14 +342,17 @@ public int markSURLsReadyForRead(TRequestToken token, List surls) { LOGGER.debug("PutDone on SURLs on in memory cache for token {}", token); try { - return store.checkedUpdate(token, surls, TStatusCode.SRM_SPACE_AVAILABLE, - TStatusCode.SRM_SUCCESS, "Put done. SURL ready."); + return store.checkedUpdate( + token, + surls, + TStatusCode.SRM_SPACE_AVAILABLE, + TStatusCode.SRM_SUCCESS, + "Put done. SURL ready."); } catch (Throwable e) { LOGGER.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); - } } @@ -371,8 +365,10 @@ public void releaseSURLs(GridUserInterface user, List surls) { SURLStatusStoreIF store = SURLStatusStore.INSTANCE; - boolean statusUpdated = (store.checkedUpdate(user, surls, - TStatusCode.SRM_FILE_PINNED, TStatusCode.SRM_RELEASED, "File released") != 0); + boolean statusUpdated = + (store.checkedUpdate( + user, surls, TStatusCode.SRM_FILE_PINNED, TStatusCode.SRM_RELEASED, "File released") + != 0); if (statusUpdated) { LOGGER.debug("SURLs released in memory cache for user {}", user); @@ -381,7 +377,6 @@ public void releaseSURLs(GridUserInterface user, List surls) { LOGGER.debug("Releasing SURLs on DB for user {}", user); super.releaseSURLs(user, surls); - } @Override @@ -394,8 +389,12 @@ public void releaseSURLs(TRequestToken token, List surls) { try { - store.checkedUpdate(token, surls, TStatusCode.SRM_FILE_PINNED, - TStatusCode.SRM_RELEASED, "File released succesfully."); + store.checkedUpdate( + token, + surls, + TStatusCode.SRM_FILE_PINNED, + TStatusCode.SRM_RELEASED, + "File released succesfully."); return; @@ -403,13 +402,10 @@ public void releaseSURLs(TRequestToken token, List surls) { LOGGER.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); - } - } LOGGER.debug("Releasing SURLs on DB for token {}", token); super.releaseSURLs(token, surls); } - } diff --git a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusDAO.java b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusDAO.java index 088641b3..4680601d 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusDAO.java +++ b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; @@ -17,7 +16,6 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -27,17 +25,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SURLStatusDAO { - public static final Logger LOGGER = LoggerFactory - .getLogger(SURLStatusDAO.class); + public static final Logger LOGGER = LoggerFactory.getLogger(SURLStatusDAO.class); - public boolean abortActivePtGsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortActivePtGsForSURL(GridUserInterface user, TSURL surl, String explanation) { surlSanityChecks(surl); @@ -47,12 +42,13 @@ public boolean abortActivePtGsForSURL(GridUserInterface user, TSURL surl, try { con = getConnection(); - String query = "UPDATE status_Get sg " - + "JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=20, rq.status=20, sg.explanation=? " - + "WHERE rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ? " - + "AND (sg.statusCode=22 OR sg.statusCode=17) "; + String query = + "UPDATE status_Get sg " + + "JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=20, rq.status=20, sg.explanation=? " + + "WHERE rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ? " + + "AND (sg.statusCode=22 OR sg.statusCode=17) "; if (user != null) { query += "AND rq.client_dn = ?"; @@ -68,14 +64,12 @@ public boolean abortActivePtGsForSURL(GridUserInterface user, TSURL surl, } final int updateCount = stat.executeUpdate(); - LOGGER.debug("abortActivePtGsForSURL: surl={}, numOfAbortedRequests={}", - surl, updateCount); + LOGGER.debug("abortActivePtGsForSURL: surl={}, numOfAbortedRequests={}", surl, updateCount); return (updateCount != 0); } catch (SQLException e) { - String msg = String.format("abortActivePtGsForSURL: SQL error: %s", - e.getMessage()); + String msg = String.format("abortActivePtGsForSURL: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); @@ -84,11 +78,9 @@ public boolean abortActivePtGsForSURL(GridUserInterface user, TSURL surl, closeStatetement(stat); closeConnection(con); } - } - public boolean abortActivePtPsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortActivePtPsForSURL(GridUserInterface user, TSURL surl, String explanation) { surlSanityChecks(surl); @@ -98,12 +90,13 @@ public boolean abortActivePtPsForSURL(GridUserInterface user, TSURL surl, try { con = getConnection(); - String query = "UPDATE status_Put sp " - + "JOIN (request_Put rp, request_queue rq) " - + "ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " - + "SET sp.statusCode=20, rq.status=20, sp.explanation=? " - + "WHERE rp.targetSURL = ? and rp.targetSURL_uniqueID = ? " - + "AND (sp.statusCode=24 OR sp.statusCode=17)"; + String query = + "UPDATE status_Put sp " + + "JOIN (request_Put rp, request_queue rq) " + + "ON sp.request_PutID=rp.ID AND rp.request_queueID=rq.ID " + + "SET sp.statusCode=20, rq.status=20, sp.explanation=? " + + "WHERE rp.targetSURL = ? and rp.targetSURL_uniqueID = ? " + + "AND (sp.statusCode=24 OR sp.statusCode=17)"; if (user != null) { query += "AND rq.client_dn = ?"; @@ -120,14 +113,12 @@ public boolean abortActivePtPsForSURL(GridUserInterface user, TSURL surl, final int updateCount = stat.executeUpdate(); - LOGGER.debug("abortActivePtPsForSURL: surl={}, numOfAbortedRequests={}", - surl, updateCount); + LOGGER.debug("abortActivePtPsForSURL: surl={}, numOfAbortedRequests={}", surl, updateCount); return (updateCount != 0); } catch (SQLException e) { - String msg = String.format("abortActivePtPsForSURL: SQL error: %s", - e.getMessage()); + String msg = String.format("abortActivePtPsForSURL: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); @@ -136,11 +127,9 @@ public boolean abortActivePtPsForSURL(GridUserInterface user, TSURL surl, closeStatetement(stat); closeConnection(con); } - } - private Map buildStatusMap(ResultSet rs) - throws SQLException { + private Map buildStatusMap(ResultSet rs) throws SQLException { if (rs == null) { throw new IllegalArgumentException("rs cannot be null"); @@ -156,7 +145,6 @@ private Map buildStatusMap(ResultSet rs) } return statusMap; - } private void closeConnection(Connection conn) { @@ -194,7 +182,7 @@ private void closeStatetement(Statement stat) { } private Map filterSURLStatuses( - Map statuses, List surls) { + Map statuses, List surls) { if (surls == null) { return statuses; @@ -213,8 +201,9 @@ private Map filterSURLStatuses( // Add a failure state for the surls that were // requested but are not linked to the token for (TSURL s : surlsCopy) { - statuses.put(s, new TReturnStatus(TStatusCode.SRM_FAILURE, - "SURL not linked to passed request token.")); + statuses.put( + s, + new TReturnStatus(TStatusCode.SRM_FAILURE, "SURL not linked to passed request token.")); } return statuses; @@ -231,10 +220,11 @@ private Map getBoLSURLStatuses(TRequestToken token) { try { con = getConnection(); - String query = "SELECT rb.sourceSURL, sb.statusCode " - + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " - + "ON (rb.request_queueID = rq.ID AND sb.request_BoLID = rb.ID)" - + "WHERE ( rq.r_token = ? )"; + String query = + "SELECT rb.sourceSURL, sb.statusCode " + + "FROM request_queue rq JOIN (request_BoL rb, status_BoL sb) " + + "ON (rb.request_queueID = rq.ID AND sb.request_BoLID = rb.ID)" + + "WHERE ( rq.r_token = ? )"; stat = con.prepareStatement(query); stat.setString(1, token.getValue()); @@ -244,8 +234,7 @@ private Map getBoLSURLStatuses(TRequestToken token) { } catch (SQLException e) { - String msg = String.format("getBoLSURLStatuses: SQL error: %s", - e.getMessage()); + String msg = String.format("getBoLSURLStatuses: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); @@ -255,7 +244,6 @@ private Map getBoLSURLStatuses(TRequestToken token) { closeResultSet(rs); closeConnection(con); } - } private Connection getConnection() throws SQLException { @@ -267,7 +255,7 @@ private Connection getConnection() throws SQLException { } public Map getPinnedSURLsForUser( - GridUserInterface user, List surls) { + GridUserInterface user, List surls) { if (user == null) { throw new NullPointerException("getPinnedSURLsForUser: null user!"); @@ -283,10 +271,11 @@ public Map getPinnedSURLsForUser( con = getConnection(); - String query = "SELECT rg.sourceSURL, rg.sourceSURL_uniqueID, sg.statusCode " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE ( sg.statusCode = 22 and rq.client_dn = ? )"; + String query = + "SELECT rg.sourceSURL, rg.sourceSURL_uniqueID, sg.statusCode " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE ( sg.statusCode = 22 and rq.client_dn = ? )"; stat = con.prepareStatement(query); stat.setString(1, user.getDn()); @@ -299,14 +288,12 @@ public Map getPinnedSURLsForUser( TSURL surl = surlFromString(rs.getString(1)); surl.setUniqueID(rs.getInt(2)); statusMap.put(surl, new TReturnStatus(converter.toSTORM(rs.getInt(3)))); - } return filterSURLStatuses(statusMap, surls); } catch (SQLException e) { - String msg = String.format("getPinnedSURLsForUser: SQL error: %s", - e.getMessage()); + String msg = String.format("getPinnedSURLsForUser: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); } finally { @@ -317,7 +304,7 @@ public Map getPinnedSURLsForUser( } public Map getPinnedSURLsForUser( - GridUserInterface user, TRequestToken token, List surls) { + GridUserInterface user, TRequestToken token, List surls) { userSanityChecks(user); tokenSanityChecks(token); @@ -332,10 +319,11 @@ public Map getPinnedSURLsForUser( try { con = getConnection(); - String query = "SELECT rg.sourceSURL, rg.sourceSURL_uniqueID, sg.statusCode " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE ( sg.statusCode = 22 and rq.client_dn = ? and rq.r_token = ? )"; + String query = + "SELECT rg.sourceSURL, rg.sourceSURL_uniqueID, sg.statusCode " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID=rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE ( sg.statusCode = 22 and rq.client_dn = ? and rq.r_token = ? )"; stat = con.prepareStatement(query); stat.setString(1, user.getDn()); @@ -349,14 +337,12 @@ public Map getPinnedSURLsForUser( TSURL surl = surlFromString(rs.getString(1)); surl.setUniqueID(rs.getInt(2)); statusMap.put(surl, new TReturnStatus(converter.toSTORM(rs.getInt(3)))); - } return filterSURLStatuses(statusMap, surls); } catch (SQLException e) { - String msg = String.format("getPinnedSURLsForUser: SQL error: %s", - e.getMessage()); + String msg = String.format("getPinnedSURLsForUser: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); } finally { @@ -377,10 +363,11 @@ private Map getPtGSURLStatuses(TRequestToken token) { try { con = getConnection(); - String query = "SELECT rg.sourceSURL, sg.statusCode " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID = rq.ID AND sg.request_GetID=rg.ID) " - + "WHERE ( rq.r_token = ? )"; + String query = + "SELECT rg.sourceSURL, sg.statusCode " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID = rq.ID AND sg.request_GetID=rg.ID) " + + "WHERE ( rq.r_token = ? )"; stat = con.prepareStatement(query); stat.setString(1, token.getValue()); @@ -389,8 +376,7 @@ private Map getPtGSURLStatuses(TRequestToken token) { return buildStatusMap(rs); } catch (SQLException e) { - String msg = String.format("getPtGSURLStatuses: SQL error: %s", - e.getMessage()); + String msg = String.format("getPtGSURLStatuses: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); } finally { @@ -411,10 +397,11 @@ private Map getPtPSURLStatuses(TRequestToken token) { try { con = getConnection(); - String query = "SELECT rp.targetSURL, sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID = rq.ID AND sp.request_PutID = rp.ID)" - + "WHERE ( rq.r_token = ? )"; + String query = + "SELECT rp.targetSURL, sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID = rq.ID AND sp.request_PutID = rp.ID)" + + "WHERE ( rq.r_token = ? )"; stat = con.prepareStatement(query); stat.setString(1, token.getValue()); @@ -424,8 +411,7 @@ private Map getPtPSURLStatuses(TRequestToken token) { } catch (SQLException e) { - String msg = String.format("getPtPSURLStatuses: SQL error: %s", - e.getMessage()); + String msg = String.format("getPtPSURLStatuses: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); @@ -434,55 +420,50 @@ private Map getPtPSURLStatuses(TRequestToken token) { closeResultSet(rs); closeConnection(con); } - } public Map getSURLStatuses(TRequestToken token) { TRequestType rt = RequestSummaryCatalog.getInstance().typeOf(token); - if (rt.isEmpty()) - throw new UnknownTokenException(token.getValue()); + if (rt.isEmpty()) throw new UnknownTokenException(token.getValue()); if (token.hasExpirationDate() && token.isExpired()) throw new ExpiredTokenException(token.getValue()); switch (rt) { - case PREPARE_TO_GET: - return getPtGSURLStatuses(token); + case PREPARE_TO_GET: + return getPtGSURLStatuses(token); - case PREPARE_TO_PUT: - return getPtPSURLStatuses(token); + case PREPARE_TO_PUT: + return getPtPSURLStatuses(token); - case BRING_ON_LINE: - return getBoLSURLStatuses(token); + case BRING_ON_LINE: + return getBoLSURLStatuses(token); - default: - String msg = String.format("Invalid request type for token %s: %s", - token, rt.toString()); - throw new IllegalArgumentException(msg); + default: + String msg = String.format("Invalid request type for token %s: %s", token, rt.toString()); + throw new IllegalArgumentException(msg); } } - public Map getSURLStatuses(TRequestToken token, - List surls) { + public Map getSURLStatuses(TRequestToken token, List surls) { TRequestType rt = RequestSummaryCatalog.getInstance().typeOf(token); switch (rt) { - case PREPARE_TO_GET: - return filterSURLStatuses(getPtGSURLStatuses(token), surls); + case PREPARE_TO_GET: + return filterSURLStatuses(getPtGSURLStatuses(token), surls); - case PREPARE_TO_PUT: - return filterSURLStatuses(getPtPSURLStatuses(token), surls); + case PREPARE_TO_PUT: + return filterSURLStatuses(getPtPSURLStatuses(token), surls); - case BRING_ON_LINE: - return filterSURLStatuses(getBoLSURLStatuses(token), surls); + case BRING_ON_LINE: + return filterSURLStatuses(getBoLSURLStatuses(token), surls); - default: - String msg = String.format("Invalid request type for token %s: %s", - token, rt.toString()); - throw new IllegalArgumentException(msg); + default: + String msg = String.format("Invalid request type for token %s: %s", token, rt.toString()); + throw new IllegalArgumentException(msg); } } @@ -492,9 +473,9 @@ public int markSURLsReadyForRead(TRequestToken token, List surls) { surlSanityChecks(surls); // I am not reimplementing the whole catalog functions - return PtPChunkCatalog.getInstance().updateFromPreviousStatus(token, surls, - TStatusCode.SRM_SPACE_AVAILABLE, TStatusCode.SRM_SUCCESS); - + return PtPChunkCatalog.getInstance() + .updateFromPreviousStatus( + token, surls, TStatusCode.SRM_SPACE_AVAILABLE, TStatusCode.SRM_SUCCESS); } private String quoteSURLUniqueIDs(List surls) { @@ -510,7 +491,6 @@ private String quoteSURLUniqueIDs(List surls) { } return sb.toString(); - } private String quoteSURLList(List surls) { @@ -538,12 +518,13 @@ public void releaseSURL(TSURL surl) { try { con = getConnection(); - String query = "UPDATE status_Get sg " - + "JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=21" - + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " - + "AND rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ?"; + String query = + "UPDATE status_Get sg " + + "JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=21" + + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " + + "AND rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ?"; stat = con.prepareStatement(query); stat.setString(1, surl.getSURLString()); @@ -572,14 +553,19 @@ public void releaseSURLs(GridUserInterface user, List surls) { try { con = getConnection(); - String query = "UPDATE status_Get sg " - + "JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=21 " - + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " - + "AND rg.sourceSURL_uniqueID IN (" + quoteSURLUniqueIDs(surls) + ") " - + "AND rg.sourceSURL IN (" + quoteSURLList(surls) + ") " - + "AND rq.client_dn = ?"; + String query = + "UPDATE status_Get sg " + + "JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=21 " + + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " + + "AND rg.sourceSURL_uniqueID IN (" + + quoteSURLUniqueIDs(surls) + + ") " + + "AND rg.sourceSURL IN (" + + quoteSURLList(surls) + + ") " + + "AND rq.client_dn = ?"; stat = con.prepareStatement(query); stat.setString(1, user.getDn()); @@ -596,7 +582,6 @@ public void releaseSURLs(GridUserInterface user, List surls) { closeStatetement(stat); closeConnection(con); } - } public void releaseSURLs(List surls) { @@ -609,13 +594,18 @@ public void releaseSURLs(List surls) { try { con = getConnection(); - String query = "UPDATE status_Get sg " - + "JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=21 " - + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " - + "AND rg.sourceSURL_uniqueID IN (" + quoteSURLUniqueIDs(surls) + ") " - + "AND rg.sourceSURL IN (" + quoteSURLList(surls) + ")"; + String query = + "UPDATE status_Get sg " + + "JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=21 " + + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " + + "AND rg.sourceSURL_uniqueID IN (" + + quoteSURLUniqueIDs(surls) + + ") " + + "AND rg.sourceSURL IN (" + + quoteSURLList(surls) + + ")"; stat = con.prepareStatement(query); stat.executeUpdate(); @@ -642,14 +632,19 @@ public void releaseSURLs(TRequestToken token, List surls) { try { con = getConnection(); - String query = "UPDATE status_Get sg " - + "JOIN (request_Get rg, request_queue rq) " - + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " - + "SET sg.statusCode=21 " - + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " - + "AND rg.sourceSURL_uniqueID IN (" + quoteSURLUniqueIDs(surls) + ") " - + "AND rg.sourceSURL IN (" + quoteSURLList(surls) + ") " - + "AND rq.r_token = ?"; + String query = + "UPDATE status_Get sg " + + "JOIN (request_Get rg, request_queue rq) " + + "ON sg.request_GetID=rg.ID AND rg.request_queueID=rq.ID " + + "SET sg.statusCode=21 " + + "WHERE (sg.statusCode=22 OR sg.statusCode=0) " + + "AND rg.sourceSURL_uniqueID IN (" + + quoteSURLUniqueIDs(surls) + + ") " + + "AND rg.sourceSURL IN (" + + quoteSURLList(surls) + + ") " + + "AND rq.r_token = ?"; stat = con.prepareStatement(query); stat.setString(1, token.getValue()); @@ -673,8 +668,7 @@ private TSURL surlFromString(String s) { return TSURL.makeFromStringWellFormed(s); } catch (InvalidTSURLAttributesException e) { - throw new IllegalArgumentException("Error creating surl from string: " - + s, e); + throw new IllegalArgumentException("Error creating surl from string: " + s, e); } } @@ -691,11 +685,12 @@ public boolean surlHasOngoingPtGs(TSURL surl) { // We basically check whether there are active requests // that have the SURL in SRM_FILE_PINNED status - String query = "SELECT rq.ID, rg.ID, sg.statusCode " - + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " - + "ON (rg.request_queueID = rq.ID AND sg.request_GetID = rg.ID) " - + "WHERE ( rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ? " - + "and sg.statusCode = 22 )"; + String query = + "SELECT rq.ID, rg.ID, sg.statusCode " + + "FROM request_queue rq JOIN (request_Get rg, status_Get sg) " + + "ON (rg.request_queueID = rq.ID AND sg.request_GetID = rg.ID) " + + "WHERE ( rg.sourceSURL = ? and rg.sourceSURL_uniqueID = ? " + + "and sg.statusCode = 22 )"; stat = con.prepareStatement(query); stat.setString(1, surl.getSURLString()); @@ -704,8 +699,7 @@ public boolean surlHasOngoingPtGs(TSURL surl) { rs = stat.executeQuery(); return rs.next(); } catch (SQLException e) { - String msg = String.format("surlHasOngoingPtGs: SQL error: %s", - e.getMessage()); + String msg = String.format("surlHasOngoingPtGs: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); } finally { @@ -728,11 +722,12 @@ public boolean surlHasOngoingPtPs(TSURL surl, TRequestToken ptpRequestToken) { con = getConnection(); // We basically check whether there are active requests // that have the SURL in SRM_SPACE_AVAILABLE status - String query = "SELECT rq.ID, rp.ID, sp.statusCode " - + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " - + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " - + "WHERE ( rp.targetSURL = ? and rp.targetSURL_uniqueID = ? " - + "and sp.statusCode=24 )"; + String query = + "SELECT rq.ID, rp.ID, sp.statusCode " + + "FROM request_queue rq JOIN (request_Put rp, status_Put sp) " + + "ON (rp.request_queueID=rq.ID AND sp.request_PutID=rp.ID) " + + "WHERE ( rp.targetSURL = ? and rp.targetSURL_uniqueID = ? " + + "and sp.statusCode=24 )"; if (ptpRequestToken != null) { query += " AND rq.r_token != ?"; @@ -749,8 +744,7 @@ public boolean surlHasOngoingPtPs(TSURL surl, TRequestToken ptpRequestToken) { rs = stat.executeQuery(); return rs.next(); } catch (SQLException e) { - String msg = String.format("surlHasOngoingPtPs: SQL error: %s", - e.getMessage()); + String msg = String.format("surlHasOngoingPtPs: SQL error: %s", e.getMessage()); LOGGER.error(msg, e); throw new RuntimeException(msg, e); } finally { @@ -758,13 +752,11 @@ public boolean surlHasOngoingPtPs(TSURL surl, TRequestToken ptpRequestToken) { closeResultSet(rs); closeConnection(con); } - } private void surlSanityChecks(List surls) { - if (surls == null) - throw new IllegalArgumentException("surls must be non-null."); + if (surls == null) throw new IllegalArgumentException("surls must be non-null."); for (TSURL s : surls) { surlSanityChecks(s); @@ -778,7 +770,6 @@ private void surlSanityChecks(TSURL surl) { if (surl.getSURLString().isEmpty()) throw new IllegalArgumentException("surl must be non-empty."); - } private void tokenSanityChecks(TRequestToken token) { @@ -786,15 +777,11 @@ private void tokenSanityChecks(TRequestToken token) { if (token == null || token.getValue() == null) throw new IllegalArgumentException("token must be non-null."); - if (token.getValue().isEmpty()) - throw new IllegalArgumentException("token must be non-empty."); - + if (token.getValue().isEmpty()) throw new IllegalArgumentException("token must be non-empty."); } private void userSanityChecks(GridUserInterface user) { - if (user == null) - throw new IllegalArgumentException("user must be non-null."); + if (user == null) throw new IllegalArgumentException("user must be non-null."); } - } diff --git a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManager.java b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManager.java index 427892b2..2cc3d3d9 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManager.java +++ b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; @@ -9,220 +8,167 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.util.List; import java.util.Map; -/** - * This interface provides methods to check and update the status of SURLs - * managed by StoRM. - * - */ +/** This interface provides methods to check and update the status of SURLs managed by StoRM. */ public interface SURLStatusManager { /** * Aborts all ongoing get requests for a given surl - * - * @param surl - * the surl for which the requests must be aborted - * @param explanation - * a string explaining why the request was aborted - * - * @return true if some request was aborted, false - * otherwise + * + * @param surl the surl for which the requests must be aborted + * @param explanation a string explaining why the request was aborted + * @return true if some request was aborted, false otherwise */ - public boolean abortAllGetRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation); + public boolean abortAllGetRequestsForSURL(GridUserInterface user, TSURL surl, String explanation); /** * Aborts all ongoing put requests for a given surl - * - * @param surl - * the surl for which the requests must be aborted - * @param explanation - * a string explaining why the request was aborted - * - * @return true if some request was aborted, false - * otherwise + * + * @param surl the surl for which the requests must be aborted + * @param explanation a string explaining why the request was aborted + * @return true if some request was aborted, false otherwise */ - public boolean abortAllPutRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation); + public boolean abortAllPutRequestsForSURL(GridUserInterface user, TSURL surl, String explanation); /** * Aborts a request identified by a given token * - * @param user - * the authenticated user - * @param token - * the request token - * @param explanation - * a string explaining why the request was aborted - * - * @return true if some request was aborted, false - * otherwise + * @param user the authenticated user + * @param token the request token + * @param explanation a string explaining why the request was aborted + * @return true if some request was aborted, false otherwise */ - public boolean abortRequest(GridUserInterface user, TRequestToken token, - String explanation); + public boolean abortRequest(GridUserInterface user, TRequestToken token, String explanation); /** * Aborts a request for a given SURL with the given explanation - * - * @param user - * the authenticated user - * @param token - * the request token - * @param surl - * the surl for which the request must be aborted - * @param explanation - * a string explaining why the request was aborted - * - * @return true if some request was aborted, false - * otherwise + * + * @param user the authenticated user + * @param token the request token + * @param surl the surl for which the request must be aborted + * @param explanation a string explaining why the request was aborted + * @return true if some request was aborted, false otherwise */ - public boolean abortRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, String explanation); + public boolean abortRequestForSURL( + GridUserInterface user, TRequestToken token, TSURL surl, String explanation); /** * Sets the failed state for a SURL in the request with the given token. - * - * @param user - * the authenticated user - * @param token - * the request token - * @param surl - * the surl that has the failure - * @param code - * the error code - * @param explanation - * a string explaining the reason behind the failure - * - * @return true if some request was failed, false - * otherwise + * + * @param user the authenticated user + * @param token the request token + * @param surl the surl that has the failure + * @param code the error code + * @param explanation a string explaining the reason behind the failure + * @return true if some request was failed, false otherwise */ - public boolean failRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, TStatusCode code, String explanation); + public boolean failRequestForSURL( + GridUserInterface user, + TRequestToken token, + TSURL surl, + TStatusCode code, + String explanation); /** - * Checks that the input surls are pinned in requests coming from user user - * and returns a map of the status where surls may be pinned or have an error - * status (which happens when one of the surl passed as argument is not found - * in the storm db) - * + * Checks that the input surls are pinned in requests coming from user user and returns a map of + * the status where surls may be pinned or have an error status (which happens when one of the + * surl passed as argument is not found in the storm db) + * * @param user * @param surls * @return */ - public Map getPinnedSURLsForUser( - GridUserInterface user, List surls); + public Map getPinnedSURLsForUser(GridUserInterface user, List surls); /** - * Checks that the input surls are pinned in the request coming from a user - * with a given token and returns a map of the status where surls may be - * pinned or have an error status (which happens when one of the surl passed - * as argument is not linked to the passed token) - * + * Checks that the input surls are pinned in the request coming from a user with a given token and + * returns a map of the status where surls may be pinned or have an error status (which happens + * when one of the surl passed as argument is not linked to the passed token) + * * @param user * @param surls * @return */ public Map getPinnedSURLsForUser( - GridUserInterface user, TRequestToken token, List surls); + GridUserInterface user, TRequestToken token, List surls); /** * Return a map of the surls status associated to the request token - * - * + * * @param token * @return */ - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token); + public Map getSURLStatuses(GridUserInterface user, TRequestToken token); /** * Return a map of the surls status associated to the request token - * + * * @param token * @param surls - * * @return */ - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token, - List surls); + public Map getSURLStatuses( + GridUserInterface user, TRequestToken token, List surls); /** - * - * Checks if there is an ongoing prepare to put operation on a given SURL - * registered in the StoRM database. The search for the ongoing PtP will - * exclude the request whose token is equal to the request token passed as - * argument. - * - * @param requestTokenToExclude - * the token of the request that will not be considered in the search - * for ongoing prepare to put operations on the given surl - * @param surl - * the surl to be checked - * - * @return true if an ongoing PtP has been found for the surl, - * false otherwise + * Checks if there is an ongoing prepare to put operation on a given SURL registered in the StoRM + * database. The search for the ongoing PtP will exclude the request whose token is equal to the + * request token passed as argument. + * + * @param requestTokenToExclude the token of the request that will not be considered in the search + * for ongoing prepare to put operations on the given surl + * @param surl the surl to be checked + * @return true if an ongoing PtP has been found for the surl, false + * otherwise */ public boolean isSURLBusy(TRequestToken requestTokenToExclude, TSURL surl); /** - * Checks if there is an ongoing prepare to put operation on a given SURL - * registered in the StoRM database. - * - * @param surl - * the surl to be checked - * @return true if an ongoing PtP has been found for the surl, - * false otherwise - * + * Checks if there is an ongoing prepare to put operation on a given SURL registered in the StoRM + * database. + * + * @param surl the surl to be checked + * @return true if an ongoing PtP has been found for the surl, false + * otherwise */ public boolean isSURLBusy(TSURL surl); /** - * Checks if there is an ongoing prepare to get operation for a given SURL - * registered in the StoRM database. - * - * @param surl - * the surl to be checked - * @return true if an ongoing PtG has been found for the surl, - * false otherwise + * Checks if there is an ongoing prepare to get operation for a given SURL registered in the StoRM + * database. + * + * @param surl the surl to be checked + * @return true if an ongoing PtG has been found for the surl, false + * otherwise */ public boolean isSURLPinned(TSURL surl); /** - * Marks the list of surls passed as argument as ready for read operations for - * the given request token - * - * @param token - * the request token - * @param surls - * the surl to be marked as ready for read operations + * Marks the list of surls passed as argument as ready for read operations for the given request + * token + * + * @param token the request token + * @param surls the surl to be marked as ready for read operations * @return the number of surls updated on db */ public int markSURLsReadyForRead(TRequestToken token, List surls); /** - * Releases a list of surls in all ongoing PtG requests registered in the - * StoRM database and assigned to a specific user. - * - * @param user - * the user - * @param surls - * the surls to be released + * Releases a list of surls in all ongoing PtG requests registered in the StoRM database and + * assigned to a specific user. + * + * @param user the user + * @param surls the surls to be released */ public void releaseSURLs(GridUserInterface user, List surls); /** - * Releases a list of surls for the PtG request identified by the given token - * StoRM database. - * - * @param token - * the request token - * @param surls - * the surls to be released + * Releases a list of surls for the PtG request identified by the given token StoRM database. + * + * @param token the request token + * @param surls the surls to be released */ public void releaseSURLs(TRequestToken token, List surls); - } diff --git a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerFactory.java b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerFactory.java index 0ad1fe49..203961cd 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerFactory.java +++ b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerFactory.java @@ -1,16 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; - public class SURLStatusManagerFactory { - public static SURLStatusManager newSURLStatusManager(){ - - SURLStatusManager delegate = new SURLStatusManagerImpl(); + public static SURLStatusManager newSURLStatusManager() { + + SURLStatusManager delegate = new SURLStatusManagerImpl(); return new InMemorySURLStatusManager(delegate); } - } diff --git a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerImpl.java b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerImpl.java index 3f1463e3..58a7a1b9 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerImpl.java +++ b/src/main/java/it/grid/storm/catalogs/surl/SURLStatusManagerImpl.java @@ -1,9 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.surl; +import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; + import it.grid.storm.authz.AuthzException; import it.grid.storm.catalogs.BoLChunkCatalog; import it.grid.storm.catalogs.CopyChunkCatalog; @@ -16,80 +17,75 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - -import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; - import java.util.List; import java.util.Map; public class SURLStatusManagerImpl implements SURLStatusManager { @Override - public boolean abortAllGetRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllGetRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.abortActivePtGsForSURL(user, surl, explanation); - } @Override - public boolean abortAllPutRequestsForSURL(GridUserInterface user, TSURL surl, - String explanation) { + public boolean abortAllPutRequestsForSURL( + GridUserInterface user, TSURL surl, String explanation) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.abortActivePtPsForSURL(user, surl, explanation); - } @Override - public boolean abortRequest(GridUserInterface user, TRequestToken token, - String explanation) { + public boolean abortRequest(GridUserInterface user, TRequestToken token, String explanation) { RequestSummaryData request = lookupAndCheckRequest(user, token); switch (request.requestType()) { - case PREPARE_TO_GET: - - PtGChunkCatalog.getInstance().updateFromPreviousStatus(token, - TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); - break; - - case PREPARE_TO_PUT: - PtPChunkCatalog.getInstance().updateFromPreviousStatus(token, - TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); - break; + case PREPARE_TO_GET: + PtGChunkCatalog.getInstance() + .updateFromPreviousStatus( + token, TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); + break; - case BRING_ON_LINE: - BoLChunkCatalog.getInstance().updateFromPreviousStatus(token, - TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); - break; + case PREPARE_TO_PUT: + PtPChunkCatalog.getInstance() + .updateFromPreviousStatus( + token, TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); + break; - case COPY: - CopyChunkCatalog.getInstance().updateFromPreviousStatus(token, - TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); - break; + case BRING_ON_LINE: + BoLChunkCatalog.getInstance() + .updateFromPreviousStatus( + token, TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); + break; - case EMPTY: - break; + case COPY: + CopyChunkCatalog.getInstance() + .updateFromPreviousStatus( + token, TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, explanation); + break; - default: - throw new IllegalArgumentException( - "Abort not supported for request type: " + request.requestType()); + case EMPTY: + break; + default: + throw new IllegalArgumentException( + "Abort not supported for request type: " + request.requestType()); } return true; } @Override - public boolean abortRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, String explanation) { + public boolean abortRequestForSURL( + GridUserInterface user, TRequestToken token, TSURL surl, String explanation) { RequestSummaryData request = lookupAndCheckRequest(user, token); switch (request.requestType()) { - case PREPARE_TO_GET: PtGChunkCatalog.getInstance().updateStatus(token, surl, SRM_ABORTED, explanation); break; @@ -109,29 +105,32 @@ public boolean abortRequestForSURL(GridUserInterface user, private void authzCheck(GridUserInterface user, RequestSummaryData request) { if (!request.gridUser().getDn().equals(user.getDn())) { - String errorMsg = String.format("User %s is not authorized to abort " - + "request %s", user.getDn(), request.requestToken()); + String errorMsg = + String.format( + "User %s is not authorized to abort " + "request %s", + user.getDn(), request.requestToken()); throw new AuthzException(errorMsg); } } @Override - public boolean failRequestForSURL(GridUserInterface user, - TRequestToken token, TSURL surl, TStatusCode code, String explanation) { + public boolean failRequestForSURL( + GridUserInterface user, + TRequestToken token, + TSURL surl, + TStatusCode code, + String explanation) { RequestSummaryData request = lookupAndCheckRequest(user, token); switch (request.requestType()) { + case PREPARE_TO_PUT: + PtPChunkCatalog.getInstance() + .updateStatus(token, surl, TStatusCode.SRM_AUTHORIZATION_FAILURE, explanation); + break; - case PREPARE_TO_PUT: - PtPChunkCatalog.getInstance().updateStatus(token, surl, - TStatusCode.SRM_AUTHORIZATION_FAILURE, explanation); - break; - - default: - throw new IllegalArgumentException("Unsupported request type: " - + request.requestType()); - + default: + throw new IllegalArgumentException("Unsupported request type: " + request.requestType()); } return true; @@ -139,7 +138,7 @@ public boolean failRequestForSURL(GridUserInterface user, @Override public Map getPinnedSURLsForUser( - GridUserInterface user, List surls) { + GridUserInterface user, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.getPinnedSURLsForUser(user, surls); @@ -147,25 +146,22 @@ public Map getPinnedSURLsForUser( @Override public Map getPinnedSURLsForUser( - GridUserInterface user, TRequestToken token, List surls) { + GridUserInterface user, TRequestToken token, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.getPinnedSURLsForUser(user, token, surls); - } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token) { + public Map getSURLStatuses(GridUserInterface user, TRequestToken token) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.getSURLStatuses(token); } @Override - public Map getSURLStatuses(GridUserInterface user, - TRequestToken token, - List surls) { + public Map getSURLStatuses( + GridUserInterface user, TRequestToken token, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.getSURLStatuses(token, surls); @@ -192,8 +188,7 @@ public boolean isSURLPinned(TSURL surl) { return dao.surlHasOngoingPtGs(surl); } - private RequestSummaryData lookupAndCheckRequest(GridUserInterface user, - TRequestToken token) { + private RequestSummaryData lookupAndCheckRequest(GridUserInterface user, TRequestToken token) { RequestSummaryData request = lookupRequest(token); authzCheck(user, request); @@ -202,12 +197,10 @@ private RequestSummaryData lookupAndCheckRequest(GridUserInterface user, private RequestSummaryData lookupRequest(TRequestToken token) { - RequestSummaryData request = RequestSummaryCatalog.getInstance() - .find(token); + RequestSummaryData request = RequestSummaryCatalog.getInstance().find(token); if (request == null) { - throw new IllegalArgumentException("No request found matching token " - + token); + throw new IllegalArgumentException("No request found matching token " + token); } return request; @@ -218,7 +211,6 @@ public int markSURLsReadyForRead(TRequestToken token, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); return dao.markSURLsReadyForRead(token, surls); - } @Override @@ -226,7 +218,6 @@ public void releaseSURLs(GridUserInterface user, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); dao.releaseSURLs(user, surls); - } @Override @@ -235,5 +226,4 @@ public void releaseSURLs(TRequestToken token, List surls) { final SURLStatusDAO dao = new SURLStatusDAO(); dao.releaseSURLs(token, surls); } - } diff --git a/src/main/java/it/grid/storm/catalogs/surl/package-info.java b/src/main/java/it/grid/storm/catalogs/surl/package-info.java index 110158a4..68589529 100644 --- a/src/main/java/it/grid/storm/catalogs/surl/package-info.java +++ b/src/main/java/it/grid/storm/catalogs/surl/package-info.java @@ -1,12 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ -/** - * - */ -/** - * @author andreaceccanti - * - */ -package it.grid.storm.catalogs.surl; \ No newline at end of file +/** */ +/** @author andreaceccanti */ +package it.grid.storm.catalogs.surl; diff --git a/src/main/java/it/grid/storm/catalogs/timertasks/ExpiredPutRequestsAgent.java b/src/main/java/it/grid/storm/catalogs/timertasks/ExpiredPutRequestsAgent.java index 2d7e12ef..4f167a3d 100644 --- a/src/main/java/it/grid/storm/catalogs/timertasks/ExpiredPutRequestsAgent.java +++ b/src/main/java/it/grid/storm/catalogs/timertasks/ExpiredPutRequestsAgent.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.timertasks; @@ -9,89 +8,90 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.command.datatransfer.PutDoneCommand; import it.grid.storm.synchcall.command.datatransfer.PutDoneCommandException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.List; import java.util.Map; import java.util.TimerTask; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ExpiredPutRequestsAgent extends TimerTask { - private static final Logger log = LoggerFactory.getLogger(ExpiredPutRequestsAgent.class); + private static final Logger log = LoggerFactory.getLogger(ExpiredPutRequestsAgent.class); - private static final String NAME = "Expired-PutRequests-Agent"; + private static final String NAME = "Expired-PutRequests-Agent"; - private long inProgressRequestsExpirationTime; + private long inProgressRequestsExpirationTime; - public ExpiredPutRequestsAgent(long inProgressRequestsExpirationTime) { + public ExpiredPutRequestsAgent(long inProgressRequestsExpirationTime) { - this.inProgressRequestsExpirationTime = inProgressRequestsExpirationTime; - log.info("{} created.", NAME); - } - - @Override - public synchronized void run() { + this.inProgressRequestsExpirationTime = inProgressRequestsExpirationTime; + log.info("{} created.", NAME); + } - log.debug("{} run.", NAME); - try { + @Override + public synchronized void run() { - transitExpiredLifetimeRequests(); - transitExpiredInProgressRequests(); + log.debug("{} run.", NAME); + try { - } catch (Exception e) { + transitExpiredLifetimeRequests(); + transitExpiredInProgressRequests(); - log.error("{}: {}", e.getClass(), e.getMessage(), e); + } catch (Exception e) { - } + log.error("{}: {}", e.getClass(), e.getMessage(), e); } + } - private void transitExpiredLifetimeRequests() { + private void transitExpiredLifetimeRequests() { - PtPChunkDAO dao = PtPChunkDAO.getInstance(); - Map expiredRequests = dao.getExpiredSRM_SPACE_AVAILABLE(); - log.debug("{} lifetime-expired requests found ... ", NAME, expiredRequests.size()); + PtPChunkDAO dao = PtPChunkDAO.getInstance(); + Map expiredRequests = dao.getExpiredSRM_SPACE_AVAILABLE(); + log.debug("{} lifetime-expired requests found ... ", NAME, expiredRequests.size()); - if (expiredRequests.isEmpty()) { - return; - } + if (expiredRequests.isEmpty()) { + return; + } - expiredRequests.entrySet().forEach(e -> executePutDone(e.getKey(), e.getValue())); + expiredRequests.entrySet().forEach(e -> executePutDone(e.getKey(), e.getValue())); - int count = dao.transitExpiredSRM_SPACE_AVAILABLEtoSRM_FILE_LIFETIME_EXPIRED( - expiredRequests.keySet()); - log.info("{} updated expired put requests - {} db rows affected", NAME, count); - } + int count = + dao.transitExpiredSRM_SPACE_AVAILABLEtoSRM_FILE_LIFETIME_EXPIRED(expiredRequests.keySet()); + log.info("{} updated expired put requests - {} db rows affected", NAME, count); + } - private void executePutDone(Long id, String surl) { + private void executePutDone(Long id, String surl) { - try { + try { - if (PutDoneCommand.executePutDone(TSURL.makeFromStringValidate(surl))) { - log.info("{} successfully executed a srmPutDone on surl {}", NAME, surl); - } + if (PutDoneCommand.executePutDone(TSURL.makeFromStringValidate(surl))) { + log.info("{} successfully executed a srmPutDone on surl {}", NAME, surl); + } - } catch (InvalidTSURLAttributesException | PutDoneCommandException e) { + } catch (InvalidTSURLAttributesException | PutDoneCommandException e) { - log.error("{}. Unable to execute PutDone on request with id {} and surl {}: ", NAME, id, - surl, e.getMessage(), e); - } + log.error( + "{}. Unable to execute PutDone on request with id {} and surl {}: ", + NAME, + id, + surl, + e.getMessage(), + e); } + } - private void transitExpiredInProgressRequests() { - - PtPChunkDAO dao = PtPChunkDAO.getInstance(); - List expiredRequestsIds = - dao.getExpiredSRM_REQUEST_INPROGRESS(inProgressRequestsExpirationTime); - log.debug("{} expired in-progress requests found.", expiredRequestsIds.size()); + private void transitExpiredInProgressRequests() { - if (expiredRequestsIds.isEmpty()) { - return; - } + PtPChunkDAO dao = PtPChunkDAO.getInstance(); + List expiredRequestsIds = + dao.getExpiredSRM_REQUEST_INPROGRESS(inProgressRequestsExpirationTime); + log.debug("{} expired in-progress requests found.", expiredRequestsIds.size()); - int count = dao.transitExpiredSRM_REQUEST_INPROGRESStoSRM_FAILURE(expiredRequestsIds); - log.info("{} moved in-progress put requests to failure - {} db rows affected", NAME, count); + if (expiredRequestsIds.isEmpty()) { + return; } + + int count = dao.transitExpiredSRM_REQUEST_INPROGRESStoSRM_FAILURE(expiredRequestsIds); + log.info("{} moved in-progress put requests to failure - {} db rows affected", NAME, count); + } } diff --git a/src/main/java/it/grid/storm/catalogs/timertasks/RequestsGarbageCollector.java b/src/main/java/it/grid/storm/catalogs/timertasks/RequestsGarbageCollector.java index c23d77d5..bb5ccbe7 100644 --- a/src/main/java/it/grid/storm/catalogs/timertasks/RequestsGarbageCollector.java +++ b/src/main/java/it/grid/storm/catalogs/timertasks/RequestsGarbageCollector.java @@ -1,177 +1,169 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.timertasks; -import java.util.Timer; -import java.util.TimerTask; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.BoLChunkCatalog; import it.grid.storm.catalogs.PtGChunkCatalog; import it.grid.storm.catalogs.RequestSummaryDAO; import it.grid.storm.config.Configuration; import it.grid.storm.tape.recalltable.TapeRecallCatalog; +import java.util.Timer; +import java.util.TimerTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RequestsGarbageCollector extends TimerTask { - private static final Logger log = LoggerFactory.getLogger(RequestsGarbageCollector.class); - - private final Configuration config = Configuration.getInstance(); - private final RequestSummaryDAO dao = RequestSummaryDAO.getInstance(); - private final PtGChunkCatalog ptgCat = PtGChunkCatalog.getInstance(); - private final BoLChunkCatalog bolCat = BoLChunkCatalog.getInstance(); - - private Timer handler; - private long delay; - - public RequestsGarbageCollector(Timer handlerTimer, long delay) { - - this.delay = delay; - handler = handlerTimer; - } + private static final Logger log = LoggerFactory.getLogger(RequestsGarbageCollector.class); - @Override - public void run() { + private final Configuration config = Configuration.getInstance(); + private final RequestSummaryDAO dao = RequestSummaryDAO.getInstance(); + private final PtGChunkCatalog ptgCat = PtGChunkCatalog.getInstance(); + private final BoLChunkCatalog bolCat = BoLChunkCatalog.getInstance(); - try { + private Timer handler; + private long delay; - TGarbageData gd = purgeExpiredRequests(); + public RequestsGarbageCollector(Timer handlerTimer, long delay) { - if (gd.getTotalPurged() == 0) { + this.delay = delay; + handler = handlerTimer; + } - log.trace("GARBAGE COLLECTOR didn't find completed requests older than {} seconds", - config.getExpiredRequestTime()); + @Override + public void run() { - } else { + try { - log.info( - "GARBAGE COLLECTOR removed < {} > completed requests (< {} > recall) older than {} seconds", - gd.getTotalPurgedRequests(), gd.getTotalPurgedRecalls(), - config.getExpiredRequestTime()); + TGarbageData gd = purgeExpiredRequests(); - } + if (gd.getTotalPurged() == 0) { - long nextDelay = computeNextDelay(gd); + log.trace( + "GARBAGE COLLECTOR didn't find completed requests older than {} seconds", + config.getExpiredRequestTime()); - if (nextDelay != delay) { + } else { - log.info("GARBAGE COLLECTOR: tuning new interval to {} seconds", nextDelay / 1000); - delay = nextDelay; + log.info( + "GARBAGE COLLECTOR removed < {} > completed requests (< {} > recall) older than {} seconds", + gd.getTotalPurgedRequests(), + gd.getTotalPurgedRecalls(), + config.getExpiredRequestTime()); + } - } + long nextDelay = computeNextDelay(gd); - } catch (Exception t) { + if (nextDelay != delay) { - /* useful to prevent unexpected exceptions that would kill the GC */ - log.error(t.getMessage(), t); + log.info("GARBAGE COLLECTOR: tuning new interval to {} seconds", nextDelay / 1000); + delay = nextDelay; + } - } finally { + } catch (Exception t) { - reschedule(); - } - } + /* useful to prevent unexpected exceptions that would kill the GC */ + log.error(t.getMessage(), t); - /** - * Delete from database the completed requests older than a specified and configurable value. - * - * @return A TGarbageData object containing info about the deleted requests - */ - private TGarbageData purgeExpiredRequests() { + } finally { - if (!enabled()) { - return TGarbageData.EMPTY; - } + reschedule(); + } + } - long expirationTime = config.getExpiredRequestTime(); - int purgeSize = config.getPurgeBatchSize(); + /** + * Delete from database the completed requests older than a specified and configurable value. + * + * @return A TGarbageData object containing info about the deleted requests + */ + private TGarbageData purgeExpiredRequests() { - int nRequests = purgeExpiredRequests(expirationTime, purgeSize); - int nRecalls = purgeExpiredRecallRequests(expirationTime, purgeSize); + if (!enabled()) { + return TGarbageData.EMPTY; + } - return new TGarbageData(nRequests, nRecalls); - } + long expirationTime = config.getExpiredRequestTime(); + int purgeSize = config.getPurgeBatchSize(); - /** - * Check if Garbage Collector is enabled or not. - * - * @return If the purger is enabled. False otherwise. - */ - private boolean enabled() { + int nRequests = purgeExpiredRequests(expirationTime, purgeSize); + int nRecalls = purgeExpiredRecallRequests(expirationTime, purgeSize); - return config.getExpiredRequestPurging(); - } + return new TGarbageData(nRequests, nRecalls); + } - /** - * Method used to purge from db a bunch of completed requests, older than the - * specified @expiredRequestTime. - * - * @param purgeSize The maximum size of the bunch of expired requests that must be deleted - * @param expiredRequestTime The number of seconds after that a request can be considered - * expired - * @return The number of requests involved. - */ - private synchronized int purgeExpiredRequests(long expiredRequestTime, int purgeSize) { + /** + * Check if Garbage Collector is enabled or not. + * + * @return If the purger is enabled. False otherwise. + */ + private boolean enabled() { - ptgCat.transitExpiredSRM_FILE_PINNED(); - bolCat.transitExpiredSRM_SUCCESS(); + return config.getExpiredRequestPurging(); + } - return dao.purgeExpiredRequests(expiredRequestTime, purgeSize).size(); + /** + * Method used to purge from db a bunch of completed requests, older than the + * specified @expiredRequestTime. + * + * @param purgeSize The maximum size of the bunch of expired requests that must be deleted + * @param expiredRequestTime The number of seconds after that a request can be considered expired + * @return The number of requests involved. + */ + private synchronized int purgeExpiredRequests(long expiredRequestTime, int purgeSize) { - } + ptgCat.transitExpiredSRM_FILE_PINNED(); + bolCat.transitExpiredSRM_SUCCESS(); - /** - * Method used to clear a bunch of completed recall requests from database. - * - * @param expirationTime The number of seconds that must pass before considering a request as - * expired - * @param purgeSize The maximum size of the bunch of expired requests that must be deleted - * @return The number of requests involved. - */ - private synchronized int purgeExpiredRecallRequests(long expirationTime, int purgeSize) { + return dao.purgeExpiredRequests(expiredRequestTime, purgeSize).size(); + } - return new TapeRecallCatalog().purgeCatalog(expirationTime, purgeSize); - } + /** + * Method used to clear a bunch of completed recall requests from database. + * + * @param expirationTime The number of seconds that must pass before considering a request as + * expired + * @param purgeSize The maximum size of the bunch of expired requests that must be deleted + * @return The number of requests involved. + */ + private synchronized int purgeExpiredRecallRequests(long expirationTime, int purgeSize) { - /** - * Compute a new delay. It will be decreased if the number of purged requests is equal to the - * purge.size value. Otherwise, it will be increased until default value. - * - * @return the computed next interval predicted from last removed requests info - */ - private long computeNextDelay(TGarbageData gd) { + return new TapeRecallCatalog().purgeCatalog(expirationTime, purgeSize); + } - /* max delay from configuration in milliseconds */ - long maxDelay = config.getRequestPurgerPeriod() * 1000L; - /* min delay accepted in milliseconds */ - long minDelay = 10000L; + /** + * Compute a new delay. It will be decreased if the number of purged requests is equal to the + * purge.size value. Otherwise, it will be increased until default value. + * + * @return the computed next interval predicted from last removed requests info + */ + private long computeNextDelay(TGarbageData gd) { - long nextDelay; + /* max delay from configuration in milliseconds */ + long maxDelay = config.getRequestPurgerPeriod() * 1000L; + /* min delay accepted in milliseconds */ + long minDelay = 10000L; - /* Check purged requests value */ - if (gd.getTotalPurged() >= config.getPurgeBatchSize()) { + long nextDelay; - /* bunch size reached: decrease interval */ - nextDelay = Math.max(delay / 2, minDelay); + /* Check purged requests value */ + if (gd.getTotalPurged() >= config.getPurgeBatchSize()) { - } else { + /* bunch size reached: decrease interval */ + nextDelay = Math.max(delay / 2, minDelay); - /* bunch size not reached: increase interval */ - nextDelay = Math.min(delay * 2, maxDelay); + } else { - } + /* bunch size not reached: increase interval */ + nextDelay = Math.min(delay * 2, maxDelay); + } + + return nextDelay; + } - return nextDelay; - } - - /** - * Schedule another task after @delay milliseconds. - */ - private void reschedule() { - - handler.schedule(new RequestsGarbageCollector(handler, delay), delay); - } + /** Schedule another task after @delay milliseconds. */ + private void reschedule() { + + handler.schedule(new RequestsGarbageCollector(handler, delay), delay); + } } diff --git a/src/main/java/it/grid/storm/catalogs/timertasks/TGarbageData.java b/src/main/java/it/grid/storm/catalogs/timertasks/TGarbageData.java index a6caccfb..5d9a55e4 100644 --- a/src/main/java/it/grid/storm/catalogs/timertasks/TGarbageData.java +++ b/src/main/java/it/grid/storm/catalogs/timertasks/TGarbageData.java @@ -1,33 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.catalogs.timertasks; public class TGarbageData { - private final int nPurgedRequests; - private final int nPurgedRecalls; + private final int nPurgedRequests; + private final int nPurgedRecalls; - public static final TGarbageData EMPTY = new TGarbageData(0, 0); + public static final TGarbageData EMPTY = new TGarbageData(0, 0); - public TGarbageData(int nPurgedRequests, int nPurgedRecalls) { - this.nPurgedRequests = nPurgedRequests; - this.nPurgedRecalls = nPurgedRecalls; - } + public TGarbageData(int nPurgedRequests, int nPurgedRecalls) { + this.nPurgedRequests = nPurgedRequests; + this.nPurgedRecalls = nPurgedRecalls; + } - public int getTotalPurged() { + public int getTotalPurged() { - return nPurgedRequests + nPurgedRecalls; - } + return nPurgedRequests + nPurgedRecalls; + } - public int getTotalPurgedRequests() { + public int getTotalPurgedRequests() { - return nPurgedRequests; - } + return nPurgedRequests; + } - public int getTotalPurgedRecalls() { + public int getTotalPurgedRecalls() { - return nPurgedRecalls; - } -} \ No newline at end of file + return nPurgedRecalls; + } +} diff --git a/src/main/java/it/grid/storm/check/Check.java b/src/main/java/it/grid/storm/check/Check.java index 98573eac..636d940c 100644 --- a/src/main/java/it/grid/storm/check/Check.java +++ b/src/main/java/it/grid/storm/check/Check.java @@ -1,44 +1,39 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public interface Check { - /** - * Provides the name of the check - * - * @return - */ - public String getName(); + /** + * Provides the name of the check + * + * @return + */ + public String getName(); - /** - * Provides the description of the check - * - * @return - */ - public String getDescription(); + /** + * Provides the description of the check + * + * @return + */ + public String getDescription(); - /** - * Return true if the check is critical, if a critical check fails storm - * backend must not start - * - * @return - */ - public boolean isCritical(); + /** + * Return true if the check is critical, if a critical check fails storm backend must not start + * + * @return + */ + public boolean isCritical(); - /** - * Executes the check - * - * @return a successful CheckResponse if the check succeeds, a non successful - * with an error message otherwise - * @throws GenericCheckException - * if an error occurs during check execution that prevents check - * execution - */ - public CheckResponse execute() throws GenericCheckException; + /** + * Executes the check + * + * @return a successful CheckResponse if the check succeeds, a non successful with an error + * message otherwise + * @throws GenericCheckException if an error occurs during check execution that prevents check + * execution + */ + public CheckResponse execute() throws GenericCheckException; } diff --git a/src/main/java/it/grid/storm/check/CheckManager.java b/src/main/java/it/grid/storm/check/CheckManager.java index e45c4175..35129af3 100644 --- a/src/main/java/it/grid/storm/check/CheckManager.java +++ b/src/main/java/it/grid/storm/check/CheckManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; @@ -8,79 +7,71 @@ import java.util.List; import org.slf4j.Logger; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public abstract class CheckManager { - /** - * An ordered list of checks to be executed - */ - private final List checkSchedule = new ArrayList(); + /** An ordered list of checks to be executed */ + private final List checkSchedule = new ArrayList(); - /** - * Initializes the CheckManager loading the checks and organizing them in a - * checkSchedule - */ - public void init() { + /** Initializes the CheckManager loading the checks and organizing them in a checkSchedule */ + public void init() { - getLogger().debug("Initializing Check Manager"); - loadChecks(); - checkSchedule.addAll(prepareSchedule()); - getLogger().debug("Initialization completed"); - } + getLogger().debug("Initializing Check Manager"); + loadChecks(); + checkSchedule.addAll(prepareSchedule()); + getLogger().debug("Initialization completed"); + } - /** - * Load the check classes and eventually initialize them - */ - protected abstract void loadChecks(); + /** Load the check classes and eventually initialize them */ + protected abstract void loadChecks(); - /** - * Provides an ordered schedule of the loaded checks - * - * @return - */ - protected abstract List prepareSchedule(); + /** + * Provides an ordered schedule of the loaded checks + * + * @return + */ + protected abstract List prepareSchedule(); - /** - * @return a logger - */ - protected abstract Logger getLogger(); + /** @return a logger */ + protected abstract Logger getLogger(); - /** - * Executes the checkSchedule - * - * @return true if all the checks succeeds, false otherwise - */ - public CheckResponse lauchChecks() { + /** + * Executes the checkSchedule + * + * @return true if all the checks succeeds, false otherwise + */ + public CheckResponse lauchChecks() { - getLogger().debug("Executing check schedule"); - CheckResponse result = new CheckResponse(CheckStatus.SUCCESS, ""); - for (Check check : checkSchedule) { - getLogger().info("Executing check : {}", check.getName()); - getLogger().info("Check description : {}", check.getDescription()); - CheckResponse response; - try { - response = check.execute(); - } catch (GenericCheckException e) { - getLogger().warn("Received a GenericCheckException during {} check " - + "execution : {}", check.getName(), e.getMessage()); - response = new CheckResponse(CheckStatus.INDETERMINATE, - "Received a GenericCheckException during " + check.getName() - + " check execution : " + e.getMessage()); - } - getLogger().info("Check '{}' response is : {}", check.getName(), - response.toString()); - if (!response.isSuccessfull() && check.isCritical()) { - result.setStatus(CheckStatus.and(result.getStatus(), - CheckStatus.CRITICAL_FAILURE)); - } else { - result.setStatus(CheckStatus.and(result.getStatus(), - response.getStatus())); - } - getLogger().debug("Partial result is {}", - result.isSuccessfull() ? "success" : "failure"); - } - return result; - } + getLogger().debug("Executing check schedule"); + CheckResponse result = new CheckResponse(CheckStatus.SUCCESS, ""); + for (Check check : checkSchedule) { + getLogger().info("Executing check : {}", check.getName()); + getLogger().info("Check description : {}", check.getDescription()); + CheckResponse response; + try { + response = check.execute(); + } catch (GenericCheckException e) { + getLogger() + .warn( + "Received a GenericCheckException during {} check " + "execution : {}", + check.getName(), + e.getMessage()); + response = + new CheckResponse( + CheckStatus.INDETERMINATE, + "Received a GenericCheckException during " + + check.getName() + + " check execution : " + + e.getMessage()); + } + getLogger().info("Check '{}' response is : {}", check.getName(), response.toString()); + if (!response.isSuccessfull() && check.isCritical()) { + result.setStatus(CheckStatus.and(result.getStatus(), CheckStatus.CRITICAL_FAILURE)); + } else { + result.setStatus(CheckStatus.and(result.getStatus(), response.getStatus())); + } + getLogger().debug("Partial result is {}", result.isSuccessfull() ? "success" : "failure"); + } + return result; + } } diff --git a/src/main/java/it/grid/storm/check/CheckResponse.java b/src/main/java/it/grid/storm/check/CheckResponse.java index 9eb4ad36..ad2dc44e 100644 --- a/src/main/java/it/grid/storm/check/CheckResponse.java +++ b/src/main/java/it/grid/storm/check/CheckResponse.java @@ -1,64 +1,55 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class CheckResponse { - /** - * The final status of a check - */ - private CheckStatus status; + /** The final status of a check */ + private CheckStatus status; - /** - * An error message describing a check failure - */ - private final String errorMessage; + /** An error message describing a check failure */ + private final String errorMessage; - public CheckResponse(CheckStatus status, String message) { + public CheckResponse(CheckStatus status, String message) { - this.status = status; - this.errorMessage = message; - } + this.status = status; + this.errorMessage = message; + } - /** - * Returns true if the check status is successfull - * - * @return the successful - */ - public boolean isSuccessfull() { + /** + * Returns true if the check status is successfull + * + * @return the successful + */ + public boolean isSuccessfull() { - return this.status.equals(CheckStatus.SUCCESS); - } + return this.status.equals(CheckStatus.SUCCESS); + } - public CheckStatus getStatus() { + public CheckStatus getStatus() { - return this.status; - } + return this.status; + } - public void setStatus(CheckStatus status) { + public void setStatus(CheckStatus status) { - this.status = status; - } + this.status = status; + } - /** - * @return the error message (eventually blank) - */ - public String getMessage() { + /** @return the error message (eventually blank) */ + public String getMessage() { - return errorMessage; - } + return errorMessage; + } - public String toString() { + public String toString() { - if (errorMessage == null || errorMessage.trim().length() == 0) { - return status.toString(); - } else { - return "<" + status.toString() + " , " + errorMessage + ">"; - } - } + if (errorMessage == null || errorMessage.trim().length() == 0) { + return status.toString(); + } else { + return "<" + status.toString() + " , " + errorMessage + ">"; + } + } } diff --git a/src/main/java/it/grid/storm/check/CheckStatus.java b/src/main/java/it/grid/storm/check/CheckStatus.java index 5029ed8f..13b74d23 100644 --- a/src/main/java/it/grid/storm/check/CheckStatus.java +++ b/src/main/java/it/grid/storm/check/CheckStatus.java @@ -1,60 +1,59 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public enum CheckStatus { - SUCCESS, FAILURE, CRITICAL_FAILURE, NOT_APPLICABLE, INDETERMINATE; + SUCCESS, + FAILURE, + CRITICAL_FAILURE, + NOT_APPLICABLE, + INDETERMINATE; - /** - * Performs the logic "and" between status and bol - * - * @param status - * @param bol - * @return a CheckStatus that is successful only if bol is true and status is - * successful - */ - public static CheckStatus and(CheckStatus status, boolean bol) { + /** + * Performs the logic "and" between status and bol + * + * @param status + * @param bol + * @return a CheckStatus that is successful only if bol is true and status is successful + */ + public static CheckStatus and(CheckStatus status, boolean bol) { - CheckStatus otherStatus; - if (bol) { - otherStatus = SUCCESS; - } else { - otherStatus = FAILURE; - } - return and(status, otherStatus); - } + CheckStatus otherStatus; + if (bol) { + otherStatus = SUCCESS; + } else { + otherStatus = FAILURE; + } + return and(status, otherStatus); + } - /** - * Performs the logic "and" between status and bol - * - * @param status - * @param otherStatus - * @return a successful CheckStatus if the provided status and otherStatus are - * successful, a failed CheckStatus if at least one of status and - * otherStatus is failed, a notApplicable status if status and - * otherStatus are notApplicable, an indeterminate status otherwise - */ - public static CheckStatus and(CheckStatus status, CheckStatus otherStatus) { + /** + * Performs the logic "and" between status and bol + * + * @param status + * @param otherStatus + * @return a successful CheckStatus if the provided status and otherStatus are successful, a + * failed CheckStatus if at least one of status and otherStatus is failed, a notApplicable + * status if status and otherStatus are notApplicable, an indeterminate status otherwise + */ + public static CheckStatus and(CheckStatus status, CheckStatus otherStatus) { - if (SUCCESS.equals(status) && SUCCESS.equals(otherStatus)) { - return SUCCESS; - } - if ((FAILURE.equals(status) && FAILURE.equals(otherStatus)) - || ((FAILURE.equals(status) || FAILURE.equals(otherStatus)) && (SUCCESS - .equals(status) || SUCCESS.equals(otherStatus)))) { - return FAILURE; - } - if (CRITICAL_FAILURE.equals(status) || CRITICAL_FAILURE.equals(otherStatus)) { - return CRITICAL_FAILURE; - } - if (NOT_APPLICABLE.equals(status) && NOT_APPLICABLE.equals(otherStatus)) { - return NOT_APPLICABLE; - } - return INDETERMINATE; - } + if (SUCCESS.equals(status) && SUCCESS.equals(otherStatus)) { + return SUCCESS; + } + if ((FAILURE.equals(status) && FAILURE.equals(otherStatus)) + || ((FAILURE.equals(status) || FAILURE.equals(otherStatus)) + && (SUCCESS.equals(status) || SUCCESS.equals(otherStatus)))) { + return FAILURE; + } + if (CRITICAL_FAILURE.equals(status) || CRITICAL_FAILURE.equals(otherStatus)) { + return CRITICAL_FAILURE; + } + if (NOT_APPLICABLE.equals(status) && NOT_APPLICABLE.equals(otherStatus)) { + return NOT_APPLICABLE; + } + return INDETERMINATE; + } } diff --git a/src/main/java/it/grid/storm/check/GenericCheckException.java b/src/main/java/it/grid/storm/check/GenericCheckException.java index 993475f3..46ec80a6 100644 --- a/src/main/java/it/grid/storm/check/GenericCheckException.java +++ b/src/main/java/it/grid/storm/check/GenericCheckException.java @@ -1,18 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class GenericCheckException extends Exception { - private static final long serialVersionUID = -5467729262145881935L; + private static final long serialVersionUID = -5467729262145881935L; - public GenericCheckException(String message) { + public GenericCheckException(String message) { - super(message); - } + super(message); + } } diff --git a/src/main/java/it/grid/storm/check/SimpleCheckManager.java b/src/main/java/it/grid/storm/check/SimpleCheckManager.java index 1bf61e8c..c752e23d 100644 --- a/src/main/java/it/grid/storm/check/SimpleCheckManager.java +++ b/src/main/java/it/grid/storm/check/SimpleCheckManager.java @@ -1,100 +1,89 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; - import it.grid.storm.check.sanity.filesystem.NamespaceFSAssociationCheck; import it.grid.storm.check.sanity.filesystem.NamespaceFSExtendedACLUsageCheck; import it.grid.storm.check.sanity.filesystem.NamespaceFSExtendedAttributeUsageCheck; import it.grid.storm.filesystem.MtabUtil; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.model.VirtualFS; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class SimpleCheckManager extends CheckManager { - private static final Logger log = LoggerFactory - .getLogger(SimpleCheckManager.class); - - /** - * A list of checks to be executed - */ - private List checks = Lists.newArrayList(); - - @Override - protected Logger getLogger() { - - return log; - } - - @Override - protected void loadChecks() { - - /* Add by hand a new element for each requested check */ - try { - checks.add(getNamespaceFSAssociationCheck()); - } catch (IllegalStateException e) { - log.warn("Skipping NamespaceFSAssociationCheck. " - + "IllegalStateException: {}", e.getMessage()); - } - // checks.add(new NamespaceFSExtendedAttributeDeclarationCheck()); Removed - checks.add(new NamespaceFSExtendedAttributeUsageCheck()); - checks.add(new NamespaceFSExtendedACLUsageCheck()); - } - - /** - * - */ - private Check getNamespaceFSAssociationCheck() { - - Map mountPoints; - // load mstab mount points and file system types - try { - mountPoints = MtabUtil.getFSMountPoints(); - } catch (Exception e) { - log.error("Unable to get filesystem mount points. Exception: {}", e.getMessage()); - throw new IllegalStateException("Unable to get filesystem mount points"); - } - if (log.isDebugEnabled()) { - log.debug("Retrieved MountPoints: {}", printMapCouples(mountPoints)); - } - List vfsSet = NamespaceDirector.getNamespace().getAllDefinedVFS(); - return new NamespaceFSAssociationCheck(mountPoints, vfsSet); - } - - /** - * Prints the couple from a Map - * - * @param map - * @return - */ - private String printMapCouples(Map map) { - - String output = ""; - for (Entry couple : map.entrySet()) { - if (output.trim().length() != 0) { - output += " ; "; - } - output += "<" + couple.getKey() + "," + couple.getValue() + ">"; - } - return output; - } - - @Override - protected List prepareSchedule() { - - return checks; - } + private static final Logger log = LoggerFactory.getLogger(SimpleCheckManager.class); + + /** A list of checks to be executed */ + private List checks = Lists.newArrayList(); + + @Override + protected Logger getLogger() { + + return log; + } + + @Override + protected void loadChecks() { + + /* Add by hand a new element for each requested check */ + try { + checks.add(getNamespaceFSAssociationCheck()); + } catch (IllegalStateException e) { + log.warn( + "Skipping NamespaceFSAssociationCheck. " + "IllegalStateException: {}", e.getMessage()); + } + // checks.add(new NamespaceFSExtendedAttributeDeclarationCheck()); Removed + checks.add(new NamespaceFSExtendedAttributeUsageCheck()); + checks.add(new NamespaceFSExtendedACLUsageCheck()); + } + + /** */ + private Check getNamespaceFSAssociationCheck() { + + Map mountPoints; + // load mstab mount points and file system types + try { + mountPoints = MtabUtil.getFSMountPoints(); + } catch (Exception e) { + log.error("Unable to get filesystem mount points. Exception: {}", e.getMessage()); + throw new IllegalStateException("Unable to get filesystem mount points"); + } + if (log.isDebugEnabled()) { + log.debug("Retrieved MountPoints: {}", printMapCouples(mountPoints)); + } + List vfsSet = NamespaceDirector.getNamespace().getAllDefinedVFS(); + return new NamespaceFSAssociationCheck(mountPoints, vfsSet); + } + + /** + * Prints the couple from a Map + * + * @param map + * @return + */ + private String printMapCouples(Map map) { + + String output = ""; + for (Entry couple : map.entrySet()) { + if (output.trim().length() != 0) { + output += " ; "; + } + output += "<" + couple.getKey() + "," + couple.getValue() + ">"; + } + return output; + } + + @Override + protected List prepareSchedule() { + + return checks; + } } diff --git a/src/main/java/it/grid/storm/check/SimpleClassLoaderCheckManager.java b/src/main/java/it/grid/storm/check/SimpleClassLoaderCheckManager.java index bf838457..70fe3303 100644 --- a/src/main/java/it/grid/storm/check/SimpleClassLoaderCheckManager.java +++ b/src/main/java/it/grid/storm/check/SimpleClassLoaderCheckManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check; @@ -17,96 +16,96 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto THIS CLASS HAS TO BE TESTED - */ +/** @author Michele Dibenedetto THIS CLASS HAS TO BE TESTED */ public class SimpleClassLoaderCheckManager extends CheckManager { - private static final Logger log = LoggerFactory - .getLogger(SimpleClassLoaderCheckManager.class); + private static final Logger log = LoggerFactory.getLogger(SimpleClassLoaderCheckManager.class); - private ArrayList checks = new ArrayList(); + private ArrayList checks = new ArrayList(); - @Override - protected Logger getLogger() { + @Override + protected Logger getLogger() { - return log; - } + return log; + } - @Override - protected void loadChecks() { + @Override + protected void loadChecks() { - CodeSource source = SimpleClassLoaderCheckManager.class - .getProtectionDomain().getCodeSource(); - URL location = null; - if (source != null) { - location = source.getLocation(); - log.info("location: {}", location); - } - String packageResourcePath = "it" + File.separatorChar + "grid" - + File.separatorChar + "storm" + File.separatorChar + "check" - + File.separatorChar + "sanity"; - List classes = getClasseNamesInPackage(location.toString(), - packageResourcePath); - for (String className : classes) { - Class classe = null; - try { - classe = Class.forName(className); - } catch (ClassNotFoundException e) { - log.error(e.getMessage()); - } - Constructor constructor; - try { - constructor = classe.getConstructor(); - try { - Check check = (Check) constructor.newInstance(); - checks.add(check); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - } catch (InstantiationException e) { - log.error(e.getMessage(), e); - } catch (IllegalAccessException e) { - log.error(e.getMessage(), e); - } catch (InvocationTargetException e) { - log.error(e.getMessage(), e); - } - } catch (SecurityException e1) { - log.error(e1.getMessage(), e1); - } catch (NoSuchMethodException e1) { - log.error(e1.getMessage(), e1); - } - } - } + CodeSource source = SimpleClassLoaderCheckManager.class.getProtectionDomain().getCodeSource(); + URL location = null; + if (source != null) { + location = source.getLocation(); + log.info("location: {}", location); + } + String packageResourcePath = + "it" + + File.separatorChar + + "grid" + + File.separatorChar + + "storm" + + File.separatorChar + + "check" + + File.separatorChar + + "sanity"; + List classes = getClasseNamesInPackage(location.toString(), packageResourcePath); + for (String className : classes) { + Class classe = null; + try { + classe = Class.forName(className); + } catch (ClassNotFoundException e) { + log.error(e.getMessage()); + } + Constructor constructor; + try { + constructor = classe.getConstructor(); + try { + Check check = (Check) constructor.newInstance(); + checks.add(check); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + } catch (InstantiationException e) { + log.error(e.getMessage(), e); + } catch (IllegalAccessException e) { + log.error(e.getMessage(), e); + } catch (InvocationTargetException e) { + log.error(e.getMessage(), e); + } + } catch (SecurityException e1) { + log.error(e1.getMessage(), e1); + } catch (NoSuchMethodException e1) { + log.error(e1.getMessage(), e1); + } + } + } - private List getClasseNamesInPackage(String jarName, - String packageName) { + private List getClasseNamesInPackage(String jarName, String packageName) { - ArrayList arrayList = new ArrayList(); - packageName = packageName.replaceAll("\\.", "" + File.separatorChar); - try { - JarInputStream jarFile = new JarInputStream(new FileInputStream(jarName)); - JarEntry jarEntry; - while (true) { - jarEntry = jarFile.getNextJarEntry(); - if (jarEntry == null) { - break; - } - if ((jarEntry.getName().startsWith(packageName)) - && (jarEntry.getName().endsWith(".class"))) { - arrayList.add(jarEntry.getName().replaceAll("" + File.separatorChar, - "\\.")); - } - } - jarFile.close(); - } catch (Exception e) { - e.printStackTrace(); - } - return arrayList; - } + ArrayList arrayList = new ArrayList(); + packageName = packageName.replaceAll("\\.", "" + File.separatorChar); + try { + JarInputStream jarFile = new JarInputStream(new FileInputStream(jarName)); + JarEntry jarEntry; + while (true) { + jarEntry = jarFile.getNextJarEntry(); + if (jarEntry == null) { + break; + } + if ((jarEntry.getName().startsWith(packageName)) + && (jarEntry.getName().endsWith(".class"))) { + arrayList.add(jarEntry.getName().replaceAll("" + File.separatorChar, "\\.")); + } + } + jarFile.close(); + } catch (Exception e) { + e.printStackTrace(); + } + return arrayList; + } - @Override - protected List prepareSchedule() { + @Override + protected List prepareSchedule() { - return checks; - } + return checks; + } } diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/FakeGridUser.java b/src/main/java/it/grid/storm/check/sanity/filesystem/FakeGridUser.java index 79933492..64734ce0 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/FakeGridUser.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/FakeGridUser.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; @@ -11,22 +10,16 @@ public class FakeGridUser implements GridUserInterface { - /** - * - */ + /** */ private DistinguishedName dn; - /** - * @param dn - */ + /** @param dn */ public FakeGridUser(String dn) { this.setDN(dn); } - /** - * @param dnString - */ + /** @param dnString */ private void setDN(String dnString) { this.dn = new DistinguishedName(dnString); diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSAssociationCheck.java b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSAssociationCheck.java index 7056e2be..3403ac5a 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSAssociationCheck.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSAssociationCheck.java @@ -1,275 +1,275 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; -import java.io.File; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.check.Check; import it.grid.storm.check.CheckResponse; import it.grid.storm.check.CheckStatus; import it.grid.storm.check.GenericCheckException; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.namespace.naming.NamespaceUtil; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class NamespaceFSAssociationCheck implements Check { - private static final Logger log = LoggerFactory - .getLogger(NamespaceFSAssociationCheck.class); - - private static final String CHECK_NAME = "NamespaceFSvalidation"; - - private static final String CHECK_DESCRIPTION = "This check verifies that the file system type declared in namespace.xml and phisical filesystem type matches"; - - private static final boolean criticalCheck = true; - - private static final String POSIX_FILESYSTEM_TYPE = "ext3"; - - private Map mountPoints; - - private Collection vfsSet; - - private NamespaceFSAssociationCheck() { - - }; - - /** - * @param mountPoints - * @param vfsSet - * @throws IllegalArgumentException - */ - public NamespaceFSAssociationCheck(Map mountPoints, - Collection vfsSet) throws IllegalArgumentException { - - this(); - if (mountPoints == null || vfsSet == null) { - log.error("Unable to create NamespaceFSAssociationCheck received null " - + "arguments: mountPoints={} vfsSet={}", mountPoints, vfsSet); - throw new IllegalArgumentException( - "Unable to create NamespaceFSAssociationCheck received null arguments"); - } - if (!verifyMountPoints(mountPoints)) { - log.error("Unable to create NamespaceFSAssociationCheck received " - + "invalid mountPoints"); - throw new IllegalArgumentException( - "Unable to create NamespaceFSAssociationCheck received invalid mountPoints"); - } - if (!verifyVfsSet(vfsSet)) { - log.error("Unable to create NamespaceFSAssociationCheck received " - + "invalid vfsSet"); - throw new IllegalArgumentException( - "Unable to create NamespaceFSAssociationCheck received invalid vfsSet"); - } - this.mountPoints = mountPoints; - this.vfsSet = vfsSet; - }; - - /** - * @param vfsSet - * @return - */ - private boolean verifyVfsSet(Collection vfsSet) { - - for (VirtualFS vfs : vfsSet) { - if (vfs == null) { - log.info("The vfsSet contains null entries"); - return false; - } - if (vfs.getFSType() == null) { - log.info("The vfs {} has null FSType", vfs.getAliasName()); - return false; - } - - if (vfs.getRootPath() == null) { - log.info("The vfs {} has null rootPath", vfs.getAliasName()); - return false; - } - - } - log.debug("verifyVfsSet: vfsSet is valid"); - return true; - } - - /** - * @param mountPoints - * @return - */ - private boolean verifyMountPoints(Map mountPoints) { - - for (String key : mountPoints.keySet()) { - if (key == null) { - log.info("The mountPoints map contains null keys"); - return false; - } - if (mountPoints.get(key) == null) { - log.info("The mountPoint key {} points to a null value", key); - return false; - } - } - log.debug("verifyMountPoints: mountPoints is valid"); - return true; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.check.Check#execute() - */ - @Override - public CheckResponse execute() throws GenericCheckException { - - CheckStatus status = CheckStatus.SUCCESS; - String errorMessage = ""; - - for (VirtualFS vfs : vfsSet) { - // check if is simple posix FS - boolean currentResponse = verifyPosixDeclaredFS(vfs.getFSType()); - if (!currentResponse) { - // check their association against mtab - currentResponse = this.check(vfs.getRootPath(), vfs.getFSType(), - mountPoints); - } - if (!currentResponse) { - log.error("Check on VFS {} failed. FSType={}, rootPath={}", - vfs.getAliasName(), vfs.getFSType(), vfs.getRootPath()); - errorMessage += "Check on VFS " + vfs.getAliasName() - + " failed. Type =" + vfs.getFSType() + " , root path =" - + vfs.getRootPath(); - } - log.debug("Check response for path {} is {}", vfs.getRootPath(), - currentResponse ? "success" : "failure"); - status = CheckStatus.and(status, currentResponse); - log.debug("Partial result is {}", status.toString()); - } - - return new CheckResponse(status, errorMessage); - } - - /** - * @param string - * @return - */ - private boolean verifyPosixDeclaredFS(String FSType) - throws IllegalArgumentException { - - if (FSType == null) { - log.error("Unable to check posix filesystem declaration received null " - + "argument: FSType={}", FSType); - throw new IllegalArgumentException( - "Unable to check posix filesystem declaration received null argument"); - } - return POSIX_FILESYSTEM_TYPE.equals(FSType.trim()); - } - - /** - * Checks if the provided fsRootPath in the provided mountPoints map has the - * provided fsType - * - * @param fsRootPath - * @param fsType - * @param mountPoints - * @return - */ - private boolean check(String fsRootPath, String fsType, - Map mountPoints) { - - boolean response = false; - log.debug("Checking fs at {} as a {}", fsRootPath, fsType); - String canonicalpath; - try { - canonicalpath = new File(fsRootPath).getCanonicalPath(); - } catch (IOException e) { - log.error("unable to build the canonical path for root '{}'. " - + "IOException: {}", fsRootPath, e.getMessage()); - return false; - } - String mountPointFSType = getMountPointFSTypeBestmatch(canonicalpath, - mountPoints); - if (mountPointFSType != null) { - log.debug("Found on a mountPoint of a '{}' FS", mountPointFSType); - if (fsType.equals(mountPointFSType)) { - response = true; - } else { - log.warn("Mount point File System type {} differs from the declared. " - + "Check failed", mountPointFSType, fsType); - } - } else { - log.warn("No file systems are mounted at path! Check failed", fsRootPath); - } - return response; - } - - /** - * @param fsRootPath - * @param mountPoints - * @return - */ - private String getMountPointFSTypeBestmatch(String fsRootPath, - Map mountPoints) { - - log.debug("Retrieving mout point for path {}", fsRootPath); - String fsType = null; - int minDistance = -1; - int pathSize = conputePathSize(fsRootPath); - for (String mountPoint : mountPoints.keySet()) { - if (conputePathSize(mountPoint) > pathSize) { - continue; - } - int distance = NamespaceUtil.computeDistanceFromPath(fsRootPath, - mountPoint); - if (distance >= 0 && distance <= pathSize) { - // is contained - if (fsType == null || distance < minDistance) { - minDistance = distance; - fsType = mountPoints.get(mountPoint).trim(); - } - } - } - return fsType; - } - - /** - * @param fsRootPath - * @return - */ - private int conputePathSize(String fsRootPath) { - - // If on windows...this split can be a problem - String[] elements = fsRootPath.split("/"); - int counter = 0; - for (String element : elements) { - if (element.trim().length() > 0) { - counter++; - } - } - return counter; - } - - @Override - public String getName() { - - return CHECK_NAME; - } - - @Override - public String getDescription() { - - return CHECK_DESCRIPTION; - } - - @Override - public boolean isCritical() { - - return criticalCheck; - } + private static final Logger log = LoggerFactory.getLogger(NamespaceFSAssociationCheck.class); + + private static final String CHECK_NAME = "NamespaceFSvalidation"; + + private static final String CHECK_DESCRIPTION = + "This check verifies that the file system type declared in namespace.xml and phisical filesystem type matches"; + + private static final boolean criticalCheck = true; + + private static final String POSIX_FILESYSTEM_TYPE = "ext3"; + + private Map mountPoints; + + private Collection vfsSet; + + private NamespaceFSAssociationCheck() {}; + + /** + * @param mountPoints + * @param vfsSet + * @throws IllegalArgumentException + */ + public NamespaceFSAssociationCheck(Map mountPoints, Collection vfsSet) + throws IllegalArgumentException { + + this(); + if (mountPoints == null || vfsSet == null) { + log.error( + "Unable to create NamespaceFSAssociationCheck received null " + + "arguments: mountPoints={} vfsSet={}", + mountPoints, + vfsSet); + throw new IllegalArgumentException( + "Unable to create NamespaceFSAssociationCheck received null arguments"); + } + if (!verifyMountPoints(mountPoints)) { + log.error("Unable to create NamespaceFSAssociationCheck received " + "invalid mountPoints"); + throw new IllegalArgumentException( + "Unable to create NamespaceFSAssociationCheck received invalid mountPoints"); + } + if (!verifyVfsSet(vfsSet)) { + log.error("Unable to create NamespaceFSAssociationCheck received " + "invalid vfsSet"); + throw new IllegalArgumentException( + "Unable to create NamespaceFSAssociationCheck received invalid vfsSet"); + } + this.mountPoints = mountPoints; + this.vfsSet = vfsSet; + }; + + /** + * @param vfsSet + * @return + */ + private boolean verifyVfsSet(Collection vfsSet) { + + for (VirtualFS vfs : vfsSet) { + if (vfs == null) { + log.info("The vfsSet contains null entries"); + return false; + } + if (vfs.getFSType() == null) { + log.info("The vfs {} has null FSType", vfs.getAliasName()); + return false; + } + + if (vfs.getRootPath() == null) { + log.info("The vfs {} has null rootPath", vfs.getAliasName()); + return false; + } + } + log.debug("verifyVfsSet: vfsSet is valid"); + return true; + } + + /** + * @param mountPoints + * @return + */ + private boolean verifyMountPoints(Map mountPoints) { + + for (String key : mountPoints.keySet()) { + if (key == null) { + log.info("The mountPoints map contains null keys"); + return false; + } + if (mountPoints.get(key) == null) { + log.info("The mountPoint key {} points to a null value", key); + return false; + } + } + log.debug("verifyMountPoints: mountPoints is valid"); + return true; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.check.Check#execute() + */ + @Override + public CheckResponse execute() throws GenericCheckException { + + CheckStatus status = CheckStatus.SUCCESS; + String errorMessage = ""; + + for (VirtualFS vfs : vfsSet) { + // check if is simple posix FS + boolean currentResponse = verifyPosixDeclaredFS(vfs.getFSType()); + if (!currentResponse) { + // check their association against mtab + currentResponse = this.check(vfs.getRootPath(), vfs.getFSType(), mountPoints); + } + if (!currentResponse) { + log.error( + "Check on VFS {} failed. FSType={}, rootPath={}", + vfs.getAliasName(), + vfs.getFSType(), + vfs.getRootPath()); + errorMessage += + "Check on VFS " + + vfs.getAliasName() + + " failed. Type =" + + vfs.getFSType() + + " , root path =" + + vfs.getRootPath(); + } + log.debug( + "Check response for path {} is {}", + vfs.getRootPath(), + currentResponse ? "success" : "failure"); + status = CheckStatus.and(status, currentResponse); + log.debug("Partial result is {}", status.toString()); + } + + return new CheckResponse(status, errorMessage); + } + + /** + * @param string + * @return + */ + private boolean verifyPosixDeclaredFS(String FSType) throws IllegalArgumentException { + + if (FSType == null) { + log.error( + "Unable to check posix filesystem declaration received null " + "argument: FSType={}", + FSType); + throw new IllegalArgumentException( + "Unable to check posix filesystem declaration received null argument"); + } + return POSIX_FILESYSTEM_TYPE.equals(FSType.trim()); + } + + /** + * Checks if the provided fsRootPath in the provided mountPoints map has the provided fsType + * + * @param fsRootPath + * @param fsType + * @param mountPoints + * @return + */ + private boolean check(String fsRootPath, String fsType, Map mountPoints) { + + boolean response = false; + log.debug("Checking fs at {} as a {}", fsRootPath, fsType); + String canonicalpath; + try { + canonicalpath = new File(fsRootPath).getCanonicalPath(); + } catch (IOException e) { + log.error( + "unable to build the canonical path for root '{}'. " + "IOException: {}", + fsRootPath, + e.getMessage()); + return false; + } + String mountPointFSType = getMountPointFSTypeBestmatch(canonicalpath, mountPoints); + if (mountPointFSType != null) { + log.debug("Found on a mountPoint of a '{}' FS", mountPointFSType); + if (fsType.equals(mountPointFSType)) { + response = true; + } else { + log.warn( + "Mount point File System type {} differs from the declared. " + "Check failed", + mountPointFSType, + fsType); + } + } else { + log.warn("No file systems are mounted at path! Check failed", fsRootPath); + } + return response; + } + + /** + * @param fsRootPath + * @param mountPoints + * @return + */ + private String getMountPointFSTypeBestmatch(String fsRootPath, Map mountPoints) { + + log.debug("Retrieving mout point for path {}", fsRootPath); + String fsType = null; + int minDistance = -1; + int pathSize = conputePathSize(fsRootPath); + for (String mountPoint : mountPoints.keySet()) { + if (conputePathSize(mountPoint) > pathSize) { + continue; + } + int distance = NamespaceUtil.computeDistanceFromPath(fsRootPath, mountPoint); + if (distance >= 0 && distance <= pathSize) { + // is contained + if (fsType == null || distance < minDistance) { + minDistance = distance; + fsType = mountPoints.get(mountPoint).trim(); + } + } + } + return fsType; + } + + /** + * @param fsRootPath + * @return + */ + private int conputePathSize(String fsRootPath) { + + // If on windows...this split can be a problem + String[] elements = fsRootPath.split("/"); + int counter = 0; + for (String element : elements) { + if (element.trim().length() > 0) { + counter++; + } + } + return counter; + } + + @Override + public String getName() { + + return CHECK_NAME; + } + + @Override + public String getDescription() { + + return CHECK_DESCRIPTION; + } + + @Override + public boolean isCritical() { + + return criticalCheck; + } } diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedACLUsageCheck.java b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedACLUsageCheck.java index e9b9350d..b35b026a 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedACLUsageCheck.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedACLUsageCheck.java @@ -1,16 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; -import java.io.File; -import java.io.IOException; -import java.util.Calendar; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.check.Check; import it.grid.storm.check.CheckResponse; import it.grid.storm.check.CheckStatus; @@ -23,21 +15,22 @@ import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.VirtualFS; +import java.io.File; +import java.io.IOException; +import java.util.Calendar; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class NamespaceFSExtendedACLUsageCheck implements Check { private static final Logger log = LoggerFactory.getLogger(NamespaceFSExtendedACLUsageCheck.class); private static final String CHECK_NAME = "NamespaceFSEACLTest"; private static final String CHECK_DESCRIPTION = "This check tries to use file system extended ACL on all the file systems declared in namespace.xml"; - /** - * The maximum number of attempts of temporary file creation - */ + /** The maximum number of attempts of temporary file creation */ private static final int MAX_FILE_CREATION_ATTEMPTS = 10; + private static final GridUserInterface TEST_USER = new FakeGridUser("/C=IT/O=INFN/L=CNAF/CN=Fake User"); private static LocalUser TEST_LOCAL_USER = null; @@ -70,10 +63,13 @@ public CheckResponse execute() throws GenericCheckException { try { checkFile = provideCheckFile(fsRootPath, TEST_FILE_INFIX); } catch (GenericCheckException e) { - log.warn("Unable to obtain a check temporary file. " + "GenericCheckException: {}", + log.warn( + "Unable to obtain a check temporary file. " + "GenericCheckException: {}", e.getMessage()); - errorMessage += "Unable to obtain a check temporary file. GenericCheckException : " - + e.getMessage() + "; "; + errorMessage += + "Unable to obtain a check temporary file. GenericCheckException : " + + e.getMessage() + + "; "; status = CheckStatus.INDETERMINATE; continue; } @@ -84,17 +80,30 @@ public CheckResponse execute() throws GenericCheckException { log.error( "Check on VFS {} to add an extended ACL on file {} failed. " + "File System type = {}, root path = {}", - vfs.getAliasName(), checkFile.getAbsolutePath(), vfs.getFSType(), fsRootPath); - errorMessage += "Check on VFS " + vfs.getAliasName() + " to add an extended ACL on file " - + checkFile.getAbsolutePath() + " failed. File System type =" + vfs.getFSType() - + " , root path =" + fsRootPath + "; "; + vfs.getAliasName(), + checkFile.getAbsolutePath(), + vfs.getFSType(), + fsRootPath); + errorMessage += + "Check on VFS " + + vfs.getAliasName() + + " to add an extended ACL on file " + + checkFile.getAbsolutePath() + + " failed. File System type =" + + vfs.getFSType() + + " , root path =" + + fsRootPath + + "; "; } - log.debug("Check response for path {} is {}", fsRootPath, + log.debug( + "Check response for path {} is {}", + fsRootPath, currentResponse ? "success" : "failure"); status = CheckStatus.and(status, currentResponse); log.debug("Partial result is {}", status.toString()); if (!checkFile.delete()) { - log.warn("Unable to delete the temporary file used for the check {}", + log.warn( + "Unable to delete the temporary file used for the check {}", checkFile.getAbsolutePath()); } } @@ -110,7 +119,7 @@ public CheckResponse execute() throws GenericCheckException { /** * Provides a File located in rootPath with a pseudo-random name. It tries to provide the file and * in case of error retries for MAX_FILE_CREATION_ATTEMPTS times changing file name - * + * * @param rootPath * @return * @throws GenericCheckException if is unable to provide a valid file @@ -126,8 +135,8 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck if (checkFile.exists()) { if (checkFile.isFile()) { fileAvailable = true; - log.debug("A good check temporary file already exists at {}", - checkFile.getAbsolutePath()); + log.debug( + "A good check temporary file already exists at {}", checkFile.getAbsolutePath()); } else { log.warn( "Unable to create check file, it already exists but is not " + "a simple file : {}", @@ -140,14 +149,17 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck log.debug("Created check temporary file at {}", checkFile.getAbsolutePath()); } } catch (IOException e) { - log.warn("Unable to create the check file : {}. IOException: {}", - checkFile.getAbsolutePath(), e.getMessage()); + log.warn( + "Unable to create the check file : {}. IOException: {}", + checkFile.getAbsolutePath(), + e.getMessage()); } } attempCount++; } if (!fileAvailable) { - log.warn("Unable to create check file, reaced maximum iterations at " + "path : {}", + log.warn( + "Unable to create check file, reaced maximum iterations at " + "path : {}", checkFile.getAbsolutePath()); throw new GenericCheckException( "Unable to create the check file for root path '" + rootPath + "'"); @@ -158,11 +170,11 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck /** * Tries to write CHECK_ATTRIBUTE_NAME EA on file with value CHECK_ATTRIBUTE_VALUE, retrieve its * value and remove it - * + * * @param file * @param filesystem * @return true if the write, read and remove operations succeeds and the retrieved value matches - * CHECK_ATTRIBUTE_VALUE + * CHECK_ATTRIBUTE_VALUE */ private boolean checkEACL(File file, FilesystemIF filesystem) { @@ -175,20 +187,27 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { oldPermisssion = FilesystemPermission.None; } FilesystemPermission testPermission = TEST_PERMISSION.deny(oldPermisssion); - log.debug("Trying to set the extended ACL {} to group {} on file {}", testPermission, - TEST_LOCAL_USER.getPrimaryGid(), file.getAbsolutePath()); + log.debug( + "Trying to set the extended ACL {} to group {} on file {}", + testPermission, + TEST_LOCAL_USER.getPrimaryGid(), + file.getAbsolutePath()); filesystem.grantGroupPermission(TEST_LOCAL_USER, file.getAbsolutePath(), testPermission); log.debug("Original group permission : {}", oldPermisssion); - log.debug("Trying to get the extended ACL of group {} from file {}", - TEST_LOCAL_USER.getPrimaryGid(), file.getAbsolutePath()); + log.debug( + "Trying to get the extended ACL of group {} from file {}", + TEST_LOCAL_USER.getPrimaryGid(), + file.getAbsolutePath()); FilesystemPermission currentPermission = filesystem.getGroupPermission(TEST_LOCAL_USER, file.getAbsolutePath()); if (currentPermission == null) { currentPermission = FilesystemPermission.None; } log.debug("Returned value is '{}'", currentPermission); - log.debug("Trying to remove the extended group ACL {} from file {}", testPermission, + log.debug( + "Trying to remove the extended group ACL {} from file {}", + testPermission, file.getAbsolutePath()); FilesystemPermission previousPermission = filesystem.revokeGroupPermission(TEST_LOCAL_USER, file.getAbsolutePath(), testPermission); @@ -197,8 +216,11 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { } log.debug("Revoked group permission is : {}", previousPermission); if (currentPermission.getInt() != previousPermission.getInt()) { - log.warn("Undesired behaviour! The revoked extended group ACL value '{}' " - + "differs from the one setted '{}'", previousPermission, currentPermission); + log.warn( + "Undesired behaviour! The revoked extended group ACL value '{}' " + + "differs from the one setted '{}'", + previousPermission, + currentPermission); response &= false; } else { response &= true; @@ -209,8 +231,11 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { } log.debug("Final group permission is : {}", currentPermission); if (currentPermission.getInt() != oldPermisssion.getInt()) { - log.warn("Undesired behaviour! The final extended group ACL value '{}' " - + "differs from the original '{}'", currentPermission, oldPermisssion); + log.warn( + "Undesired behaviour! The final extended group ACL value '{}' " + + "differs from the original '{}'", + currentPermission, + oldPermisssion); response &= false; } else { response &= true; @@ -220,18 +245,25 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { oldPermisssion = FilesystemPermission.None; } testPermission = TEST_PERMISSION.deny(oldPermisssion); - log.debug("Trying to set the extended ACL {} to user {} on file {}", testPermission, - TEST_LOCAL_USER.getUid(), file.getAbsolutePath()); + log.debug( + "Trying to set the extended ACL {} to user {} on file {}", + testPermission, + TEST_LOCAL_USER.getUid(), + file.getAbsolutePath()); filesystem.grantUserPermission(TEST_LOCAL_USER, file.getAbsolutePath(), testPermission); log.debug("Original user permission : {}", oldPermisssion); - log.debug("Trying to get the extended ACL of user {} from file {}", TEST_LOCAL_USER.getUid(), + log.debug( + "Trying to get the extended ACL of user {} from file {}", + TEST_LOCAL_USER.getUid(), file.getAbsolutePath()); currentPermission = filesystem.getUserPermission(TEST_LOCAL_USER, file.getAbsolutePath()); if (currentPermission == null) { currentPermission = FilesystemPermission.None; } log.debug("Returned value is '{}'", currentPermission); - log.debug("Trying to remove the extended user ACL {} from file {}", testPermission, + log.debug( + "Trying to remove the extended user ACL {} from file {}", + testPermission, file.getAbsolutePath()); previousPermission = filesystem.revokeUserPermission(TEST_LOCAL_USER, file.getAbsolutePath(), testPermission); @@ -240,8 +272,11 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { } log.debug("Revoked user permission is : {}", previousPermission); if (currentPermission.getInt() != previousPermission.getInt()) { - log.warn("Undesired behaviour! The removed extended user ACL value '{}' " - + "differs from the one setted '{}'", previousPermission, currentPermission); + log.warn( + "Undesired behaviour! The removed extended user ACL value '{}' " + + "differs from the one setted '{}'", + previousPermission, + currentPermission); response &= false; } else { response &= true; @@ -252,8 +287,11 @@ private boolean checkEACL(File file, FilesystemIF filesystem) { } log.debug("Final user permission is : {}", currentPermission); if (currentPermission.getInt() != oldPermisssion.getInt()) { - log.warn("Undesired behaviour! The final extended user ACL value '{}' " - + "differs from the original '{}'", currentPermission, oldPermisssion); + log.warn( + "Undesired behaviour! The final extended user ACL value '{}' " + + "differs from the original '{}'", + currentPermission, + oldPermisssion); response &= false; } else { response &= true; diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeDeclarationCheck.java b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeDeclarationCheck.java index 7fa8d10b..cc379b3d 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeDeclarationCheck.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeDeclarationCheck.java @@ -1,15 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; -import java.io.IOException; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.check.Check; import it.grid.storm.check.CheckResponse; import it.grid.storm.check.CheckStatus; @@ -18,10 +11,12 @@ import it.grid.storm.filesystem.MtabUtil; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.model.VirtualFS; +import java.io.IOException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class NamespaceFSExtendedAttributeDeclarationCheck implements Check { private static final Logger log = @@ -47,7 +42,8 @@ public CheckResponse execute() throws GenericCheckException { rows = MtabUtil.getRows(); } catch (IOException e) { log.warn("Unable to get the rows from mtab. IOException : {}", e.getMessage()); - return new CheckResponse(CheckStatus.INDETERMINATE, + return new CheckResponse( + CheckStatus.INDETERMINATE, "Check not performed. Unable to get the rows from mtab. IOException : " + e.getMessage()); } log.debug("Retrieved Mtab : {}", rows.toString()); @@ -58,7 +54,9 @@ public CheckResponse execute() throws GenericCheckException { if (fsTypeName == null || fsRootPath == null) { log.warn( "Skipping chek on VFS with alias '{}' has null type ->{}<- " + "or root path ->{}<-", - vfs.getAliasName(), vfs.getFSType(), vfs.getRootPath()); + vfs.getAliasName(), + vfs.getFSType(), + vfs.getRootPath()); continue; } log.debug("Checking fs at {} with type {}", fsRootPath, fsTypeName); @@ -73,11 +71,17 @@ public CheckResponse execute() throws GenericCheckException { try { fsType = SupportedFSType.parseFS(fsTypeName); } catch (IllegalArgumentException e) { - log.warn("Unable to get the SupportedFSType for file system '{}'. " - + "IllegalArgumentException: {}", fsTypeName, e.getMessage()); + log.warn( + "Unable to get the SupportedFSType for file system '{}'. " + + "IllegalArgumentException: {}", + fsTypeName, + e.getMessage()); throw new GenericCheckException( - "Unable to get the " + "SupportedFSType for file system \'" + fsTypeName - + "\' IllegalArgumentException: " + e.getMessage()); + "Unable to get the " + + "SupportedFSType for file system \'" + + fsTypeName + + "\' IllegalArgumentException: " + + e.getMessage()); } // given the file system specified in the row check if the @@ -90,12 +94,13 @@ public CheckResponse execute() throws GenericCheckException { case GPFS: retrievedStatus = checkGPFS(row.getMountOptions()); break; - default: { - log.error("Unable to switch on the provided SupportedFSType " + "(unknown): {}", - fsType); - throw new GenericCheckException( - "Unable to switch on the " + "provided SupportedFSType (unknown) : " + fsType); - } + default: + { + log.error( + "Unable to switch on the provided SupportedFSType " + "(unknown): {}", fsType); + throw new GenericCheckException( + "Unable to switch on the " + "provided SupportedFSType (unknown) : " + fsType); + } } if (!retrievedStatus.equals(CheckStatus.SUCCESS)) { log.error("Check failed for file system at {} with type {}", fsRootPath, fsType); @@ -118,13 +123,14 @@ public CheckResponse execute() throws GenericCheckException { /** * Checks if the ext3 mount option POSIX_EXTENDED_ATTRIBUTES_OPTION_NAME is in the provided mount * options list - * + * * @param fsOptions a comma separated list of mount options * @return a successful CheckStatus if the option is available */ private CheckStatus checkEXT3(List fsOptions) { - log.debug("Checking ext3 file system estended attribute options " + "against '{}'", + log.debug( + "Checking ext3 file system estended attribute options " + "against '{}'", fsOptions.toString()); CheckStatus response = CheckStatus.FAILURE; if (fsOptions.contains(POSIX_EXTENDED_ATTRIBUTES_OPTION_NAME)) { @@ -136,13 +142,14 @@ private CheckStatus checkEXT3(List fsOptions) { /** * Checks if the gpfs mount option is in the provided mount options list - * + * * @param fsOptions a comma separated list of mount options * @return always a successful CheckStatus, gpfs has always EA enabled */ private CheckStatus checkGPFS(List fsOptions) { - log.debug("Checking gpfs file system estended attribute options " + "against '{}'", + log.debug( + "Checking gpfs file system estended attribute options " + "against '{}'", fsOptions.toString()); /* * According to Vladimir for GPFS the EA are enabled by default and their status doesn't have diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeUsageCheck.java b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeUsageCheck.java index b607e96e..bfd44fbd 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeUsageCheck.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/NamespaceFSExtendedAttributeUsageCheck.java @@ -1,16 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; -import java.io.File; -import java.io.IOException; -import java.util.Calendar; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.check.Check; import it.grid.storm.check.CheckResponse; import it.grid.storm.check.CheckStatus; @@ -20,10 +12,13 @@ import it.grid.storm.ea.ExtendedAttributesFactory; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.model.VirtualFS; +import java.io.File; +import java.io.IOException; +import java.util.Calendar; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class NamespaceFSExtendedAttributeUsageCheck implements Check { private static final Logger log = @@ -34,21 +29,15 @@ public class NamespaceFSExtendedAttributeUsageCheck implements Check { private static final String CHECK_DESCRIPTION = "This check tries to use file system extended attributes on all the file systems declared in namespace.xml"; - /** - * The maximum number of attempts of temporary file creation - */ + /** The maximum number of attempts of temporary file creation */ private static final int MAX_FILE_CREATION_ATTEMPTS = 10; private static final String TEST_FILE_INFIX = "EA-check-file-N_"; - /** - * An extended attribute to be used in the check - */ + /** An extended attribute to be used in the check */ private static final String CHECK_ATTRIBUTE_NAME = "user.Are.you.a.check"; - /** - * THe value to be assigned to the extended attribute CHECK_ATTRIBUTE_NAME in the check - */ + /** THe value to be assigned to the extended attribute CHECK_ATTRIBUTE_NAME in the check */ private static final String CHECK_ATTRIBUTE_VALUE = "Yes.I.am"; private final ExtendedAttributes extendedAttribute = @@ -72,10 +61,14 @@ public CheckResponse execute() throws GenericCheckException { try { checkFile = provideCheckFile(fsRootPath, TEST_FILE_INFIX); } catch (GenericCheckException e) { - log.warn("Unable to obtain a check temporary file. " + "GenericCheckException: {}", + log.warn( + "Unable to obtain a check temporary file. " + "GenericCheckException: {}", e.getMessage()); - errorMessage += "Unable to obtain a check temporary file. " + "GenericCheckException : " - + e.getMessage() + "; "; + errorMessage += + "Unable to obtain a check temporary file. " + + "GenericCheckException : " + + e.getMessage() + + "; "; status = CheckStatus.INDETERMINATE; continue; } @@ -85,17 +78,28 @@ public CheckResponse execute() throws GenericCheckException { log.warn( "Check on VFS {} to add EA on file {} failed. File System " + "type = {}, root path = {}", - vfs.getAliasName(), checkFile.getAbsolutePath(), vfs.getFSType(), fsRootPath); - errorMessage += "Check on VFS " + vfs.getAliasName() + " to add EA on file " - + checkFile.getAbsolutePath() + " failed. File System type =" + vfs.getFSType() - + " , root path =" + fsRootPath + "; "; + vfs.getAliasName(), + checkFile.getAbsolutePath(), + vfs.getFSType(), + fsRootPath); + errorMessage += + "Check on VFS " + + vfs.getAliasName() + + " to add EA on file " + + checkFile.getAbsolutePath() + + " failed. File System type =" + + vfs.getFSType() + + " , root path =" + + fsRootPath + + "; "; } - log.debug("Check response for path {} is {}", fsRootPath, - currentResponse ? "success" : "failure"); + log.debug( + "Check response for path {} is {}", fsRootPath, currentResponse ? "success" : "failure"); status = CheckStatus.and(status, currentResponse); log.debug("Partial result is {}", status.toString()); if (!checkFile.delete()) { - log.warn("Unable to delete the temporary file used for the check {}", + log.warn( + "Unable to delete the temporary file used for the check {}", checkFile.getAbsolutePath()); } } @@ -105,7 +109,7 @@ public CheckResponse execute() throws GenericCheckException { /** * Provides a File located in rootPath with a pseudo-random name. It tries to provide the file and * in case of error retries for MAX_FILE_CREATION_ATTEMPTS times changing file name - * + * * @param rootPath * @param infix * @return @@ -122,8 +126,8 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck if (checkFile.exists()) { if (checkFile.isFile()) { fileAvailable = true; - log.debug("A good check temporary file already exists at {}", - checkFile.getAbsolutePath()); + log.debug( + "A good check temporary file already exists at {}", checkFile.getAbsolutePath()); } else { log.warn( "Unable to create check file, it already exists but is not " + "a simple file: {}", @@ -136,14 +140,17 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck log.debug("Created check temporary file at {}", checkFile.getAbsolutePath()); } } catch (IOException e) { - log.warn("Unable to create the check file: {}. IOException: {}", - checkFile.getAbsolutePath(), e.getMessage()); + log.warn( + "Unable to create the check file: {}. IOException: {}", + checkFile.getAbsolutePath(), + e.getMessage()); } } attempCount++; } if (!fileAvailable) { - log.warn("Unable to create check file, reached maximum iterations at " + "path: {}", + log.warn( + "Unable to create check file, reached maximum iterations at " + "path: {}", checkFile.getAbsolutePath()); throw new GenericCheckException( "Unable to create the check file for root path '" + rootPath + "'"); @@ -154,42 +161,52 @@ private File provideCheckFile(String rootPath, String infix) throws GenericCheck /** * Tries to write CHECK_ATTRIBUTE_NAME EA on file with value CHECK_ATTRIBUTE_VALUE, retrieve its * value and remove it - * + * * @param file * @return true if the write, read and remove operations succeeds and the retrieved value matches - * CHECK_ATTRIBUTE_VALUE + * CHECK_ATTRIBUTE_VALUE */ private boolean checkEA(File file) { boolean response = false; log.debug("Testing extended attribute management on file {}", file.getAbsolutePath()); try { - log.debug("Trying to set the extended attribute {} to value {} on file {}", - CHECK_ATTRIBUTE_NAME, CHECK_ATTRIBUTE_VALUE, file.getAbsolutePath()); + log.debug( + "Trying to set the extended attribute {} to value {} on file {}", + CHECK_ATTRIBUTE_NAME, + CHECK_ATTRIBUTE_VALUE, + file.getAbsolutePath()); - extendedAttribute.setXAttr(file.getAbsolutePath(), CHECK_ATTRIBUTE_NAME, - CHECK_ATTRIBUTE_VALUE); + extendedAttribute.setXAttr( + file.getAbsolutePath(), CHECK_ATTRIBUTE_NAME, CHECK_ATTRIBUTE_VALUE); - log.debug("Trying to get the extended attribute {} from file {}", CHECK_ATTRIBUTE_NAME, + log.debug( + "Trying to get the extended attribute {} from file {}", + CHECK_ATTRIBUTE_NAME, file.getAbsolutePath()); String value = extendedAttribute.getXAttr(file.getAbsolutePath(), CHECK_ATTRIBUTE_NAME); log.debug("Returned value is '{}'", value); - log.debug("Trying to remove the extended attribute {} from file {}", CHECK_ATTRIBUTE_NAME, + log.debug( + "Trying to remove the extended attribute {} from file {}", + CHECK_ATTRIBUTE_NAME, file.getAbsolutePath()); extendedAttribute.rmXAttr(file.getAbsolutePath(), CHECK_ATTRIBUTE_NAME); if (!CHECK_ATTRIBUTE_VALUE.equals(value)) { - log.warn("Undesired behaviour! The returned extended attribute " - + "value '{}' differs from the one setted '{}'", value, CHECK_ATTRIBUTE_VALUE); + log.warn( + "Undesired behaviour! The returned extended attribute " + + "value '{}' differs from the one setted '{}'", + value, + CHECK_ATTRIBUTE_VALUE); } else { response = true; } } catch (ExtendedAttributesException e) { log.warn( "Unable to manage extended attributes on file {}. " + "ExtendedAttributesException: {}", - file.getAbsolutePath(), e.getMessage()); + file.getAbsolutePath(), + e.getMessage()); } return response; - } @Override diff --git a/src/main/java/it/grid/storm/check/sanity/filesystem/SupportedFSType.java b/src/main/java/it/grid/storm/check/sanity/filesystem/SupportedFSType.java index 2bbb821b..e04b67b9 100644 --- a/src/main/java/it/grid/storm/check/sanity/filesystem/SupportedFSType.java +++ b/src/main/java/it/grid/storm/check/sanity/filesystem/SupportedFSType.java @@ -1,33 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.check.sanity.filesystem; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public enum SupportedFSType { - EXT3, GPFS; + EXT3, + GPFS; - /** - * Parses the provided fsString and returns the matching SupportedFSType - * - * @param fsString - * @return - * @throws IllegalArgumentException - * if the provided fsString does not match any SupportedFSType - */ - public static SupportedFSType parseFS(String fsString) - throws IllegalArgumentException { + /** + * Parses the provided fsString and returns the matching SupportedFSType + * + * @param fsString + * @return + * @throws IllegalArgumentException if the provided fsString does not match any SupportedFSType + */ + public static SupportedFSType parseFS(String fsString) throws IllegalArgumentException { - if (fsString.trim().equals("gpfs")) { - return SupportedFSType.GPFS; - } - if (fsString.trim().equals("ext3")) { - return SupportedFSType.EXT3; - } - throw new IllegalArgumentException("Unable to parse file system string \'" - + fsString + "\' No matching value available"); - } + if (fsString.trim().equals("gpfs")) { + return SupportedFSType.GPFS; + } + if (fsString.trim().equals("ext3")) { + return SupportedFSType.EXT3; + } + throw new IllegalArgumentException( + "Unable to parse file system string \'" + fsString + "\' No matching value available"); + } } diff --git a/src/main/java/it/grid/storm/checksum/ChecksumAlgorithm.java b/src/main/java/it/grid/storm/checksum/ChecksumAlgorithm.java index 992e6f31..afa7691c 100644 --- a/src/main/java/it/grid/storm/checksum/ChecksumAlgorithm.java +++ b/src/main/java/it/grid/storm/checksum/ChecksumAlgorithm.java @@ -1,12 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.checksum; public enum ChecksumAlgorithm { - CRC32("CRC32"), ADLER32("ADLER32"), MD2("MD2"), MD5("MD5"), SHA_1("SHA-1"), SHA_256( - "SHA-256"), SHA_384("SHA-384"), SHA_512("SHA-512"); + CRC32("CRC32"), + ADLER32("ADLER32"), + MD2("MD2"), + MD5("MD5"), + SHA_1("SHA-1"), + SHA_256("SHA-256"), + SHA_384("SHA-384"), + SHA_512("SHA-512"); public static ChecksumAlgorithm getChecksumAlgorithm(String algorithm) { diff --git a/src/main/java/it/grid/storm/checksum/ChecksumManager.java b/src/main/java/it/grid/storm/checksum/ChecksumManager.java index c387dafb..0c92fe3b 100644 --- a/src/main/java/it/grid/storm/checksum/ChecksumManager.java +++ b/src/main/java/it/grid/storm/checksum/ChecksumManager.java @@ -1,16 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.checksum; import it.grid.storm.config.DefaultValue; import it.grid.storm.ea.ExtendedAttributesException; import it.grid.storm.ea.StormEA; - import java.io.FileNotFoundException; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,9 +34,9 @@ public static synchronized ChecksumManager getInstance() { /** * Return the algorithm used to compute checksums as well as retrieve the value from extended * attributes. - * + * * @return the algorithm used to compute checksums as well as retrieve the value from extended - * attributes. + * attributes. */ public ChecksumAlgorithm getDefaultAlgorithm() { @@ -52,10 +49,10 @@ public ChecksumAlgorithm getDefaultAlgorithm() { * checksum is enabled. - if ENABLED then the checksum is computed by an external service and * stored in an extended attribute. - if NOT ENABLED return with a NULL value. This method is * blocking (i.e. waits for the checksum to be computed, if it is enabled). - * + * * @param fileName file absolute path. * @return the computed checksum for the given file or null if some error occurred. - * The error is logged. + * The error is logged. * @throws FileNotFoundException */ public String getDefaultChecksum(String fileName) throws FileNotFoundException { @@ -74,10 +71,10 @@ public String getDefaultChecksum(String fileName) throws FileNotFoundException { /** * Checks whether the given file has a checksum stored in an extended attribute. - * + * * @param fileName file absolute path. - * @return true if an extended attribute storing the checksum was found, - * false otherwise. + * @return true if an extended attribute storing the checksum was found, false + * otherwise. * @throws ExtendedAttributesException * @throws NotSupportedException * @throws FileNotFoundException @@ -91,7 +88,9 @@ public boolean hasDefaultChecksum(String fileName) throws FileNotFoundException } catch (ExtendedAttributesException e) { log.warn( "Error manipulating EA for default algorithm {} on file: {} ExtendedAttributesException: {}", - defaultAlgorithm, fileName, e.getMessage()); + defaultAlgorithm, + fileName, + e.getMessage()); } return (value != null); @@ -101,5 +100,4 @@ public Map getChecksums(String fileName) throws FileN return StormEA.getChecksums(fileName); } - } diff --git a/src/main/java/it/grid/storm/checksum/ChecksumRuntimeException.java b/src/main/java/it/grid/storm/checksum/ChecksumRuntimeException.java index dd84382e..2dcfde74 100644 --- a/src/main/java/it/grid/storm/checksum/ChecksumRuntimeException.java +++ b/src/main/java/it/grid/storm/checksum/ChecksumRuntimeException.java @@ -1,29 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.checksum; public class ChecksumRuntimeException extends RuntimeException { - private static final long serialVersionUID = -6992922355763921291L; + private static final long serialVersionUID = -6992922355763921291L; - public ChecksumRuntimeException() { - } + public ChecksumRuntimeException() {} - public ChecksumRuntimeException(String message) { + public ChecksumRuntimeException(String message) { - super(message); - } + super(message); + } - public ChecksumRuntimeException(Throwable cause) { + public ChecksumRuntimeException(Throwable cause) { - super(cause); - } + super(cause); + } - public ChecksumRuntimeException(String message, Throwable cause) { - - super(message, cause); - } + public ChecksumRuntimeException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/common/GUID.java b/src/main/java/it/grid/storm/common/GUID.java index 2cb0de38..f51f30ce 100644 --- a/src/main/java/it/grid/storm/common/GUID.java +++ b/src/main/java/it/grid/storm/common/GUID.java @@ -1,196 +1,183 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common; import java.io.Serializable; import java.net.InetAddress; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class GUID implements Serializable { - private static final long serialVersionUID = -1750955753676929827L; - - private static final Logger log = LoggerFactory.getLogger(GUID.class); - - private byte guidValue[] = new byte[16]; - - public GUID() { - buildNewGUID(); - } - - public GUID(String guidString) { - - int pos = 0; - int count = 0; - - while (pos < guidString.length()) { - guidValue[count] = getByteValue(guidString.substring(pos, pos + 2)); - pos += 2; - count++; - - if (pos == guidString.length()) { - continue; - } - - if (guidString.charAt(pos) == '-') { - pos++; - } - } - } - - /** - * Calculates the byte from a hex string. - * - * @param hex - * A string hex value. - * @return a byte value. - */ - private byte getByteValue(String hex) { - - return (byte) Integer.parseInt(hex, 16); - } - - /** - * Calculates the hex string from a byte. - * - * @param val - * A byte value. - * @return a string hex value. - */ - private String getHexString(byte val) { - - String hexString; - if (val < 0) { - hexString = Integer.toHexString(val + 256); - } else { - hexString = Integer.toHexString(val); - } - - if (hexString.length() < 2) { - return "0" + hexString.toUpperCase(); - } - return hexString.toUpperCase(); - } - - /** - * Set the bytes in the array from another array. - * - * @param lg - * The other array of bytes. - * @param count - * How many there are. - * @param startPos - * The point in the main byte array these should go. - */ - private void setByteValues(byte[] lg, int startPos, int count) { - - for (int i = 0; i < count; i++) { - guidValue[i + startPos] = lg[i]; - } - } - - /** - * Sets the byte in the array to the bytes in a long value. In reverse - * order(IE Least significant byte goes first) - * - * @param lg - * The long value. - * @param count - * How many there are. - * @param startPos - * The point in the main byte array these should go. - */ - private void setByteValues(long lg, int startPos, int count) { - - for (int i = 0; i < count; i++) { - guidValue[i + startPos] = (byte) (lg & 0xFF); - lg = lg / 0xFF; - } - } - - /** - * Creates a new GUID from nowhere. Actually it uses the time, IPAddress and a - * random number. Stores it all in the main byte array. - */ - private void buildNewGUID() { - - try { - // The time in milli seconds for six bytes - // gives us until the year 10000ish. - long lg = System.currentTimeMillis(); - setByteValues(lg, 0, 6); - - // The hash code for this object for two bytes (As a why not option?) - lg = this.hashCode(); - setByteValues(lg, 6, 2); - - // The ip address for this computer (as we cannot get to the MAC address) - InetAddress inet = InetAddress.getLocalHost(); - byte[] bytes = inet.getAddress(); - setByteValues(bytes, 8, 4); - - // A random number for two bytes - lg = (long) ((Math.random() * 0xFFFF)); - setByteValues(lg, 12, 2); - - // Another random number for two bytes - lg = (long) ((Math.random() * 0xFFFF)); - setByteValues(lg, 14, 2); - - } catch (Exception e) { - log.error("Error while generating a GUID", e); - } - } - - /** - * Stores the 16 bytes seperately, this returns that array Not sure why - * though. - * - * @return array of bytes. - */ - public byte[] getBytes() { - - return guidValue; - } - - /** - * Overrides toString(). Returns the array of bytes in the standard form: - * xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - * - * @return the string format - */ - @Override - public String toString() { - - StringBuilder buf = new StringBuilder(); - - buf.append(getHexString(guidValue[0])); - buf.append(getHexString(guidValue[1])); - buf.append(getHexString(guidValue[2])); - buf.append(getHexString(guidValue[3])); - buf.append('-'); - buf.append(getHexString(guidValue[4])); - buf.append(getHexString(guidValue[5])); - buf.append('-'); - buf.append(getHexString(guidValue[6])); - buf.append(getHexString(guidValue[7])); - buf.append('-'); - buf.append(getHexString(guidValue[8])); - buf.append(getHexString(guidValue[9])); - buf.append('-'); - buf.append(getHexString(guidValue[10])); - buf.append(getHexString(guidValue[11])); - buf.append(getHexString(guidValue[12])); - buf.append(getHexString(guidValue[13])); - buf.append(getHexString(guidValue[14])); - buf.append(getHexString(guidValue[15])); - - return buf.toString(); - } - + private static final long serialVersionUID = -1750955753676929827L; + + private static final Logger log = LoggerFactory.getLogger(GUID.class); + + private byte guidValue[] = new byte[16]; + + public GUID() { + buildNewGUID(); + } + + public GUID(String guidString) { + + int pos = 0; + int count = 0; + + while (pos < guidString.length()) { + guidValue[count] = getByteValue(guidString.substring(pos, pos + 2)); + pos += 2; + count++; + + if (pos == guidString.length()) { + continue; + } + + if (guidString.charAt(pos) == '-') { + pos++; + } + } + } + + /** + * Calculates the byte from a hex string. + * + * @param hex A string hex value. + * @return a byte value. + */ + private byte getByteValue(String hex) { + + return (byte) Integer.parseInt(hex, 16); + } + + /** + * Calculates the hex string from a byte. + * + * @param val A byte value. + * @return a string hex value. + */ + private String getHexString(byte val) { + + String hexString; + if (val < 0) { + hexString = Integer.toHexString(val + 256); + } else { + hexString = Integer.toHexString(val); + } + + if (hexString.length() < 2) { + return "0" + hexString.toUpperCase(); + } + return hexString.toUpperCase(); + } + + /** + * Set the bytes in the array from another array. + * + * @param lg The other array of bytes. + * @param count How many there are. + * @param startPos The point in the main byte array these should go. + */ + private void setByteValues(byte[] lg, int startPos, int count) { + + for (int i = 0; i < count; i++) { + guidValue[i + startPos] = lg[i]; + } + } + + /** + * Sets the byte in the array to the bytes in a long value. In reverse order(IE Least significant + * byte goes first) + * + * @param lg The long value. + * @param count How many there are. + * @param startPos The point in the main byte array these should go. + */ + private void setByteValues(long lg, int startPos, int count) { + + for (int i = 0; i < count; i++) { + guidValue[i + startPos] = (byte) (lg & 0xFF); + lg = lg / 0xFF; + } + } + + /** + * Creates a new GUID from nowhere. Actually it uses the time, IPAddress and a random number. + * Stores it all in the main byte array. + */ + private void buildNewGUID() { + + try { + // The time in milli seconds for six bytes + // gives us until the year 10000ish. + long lg = System.currentTimeMillis(); + setByteValues(lg, 0, 6); + + // The hash code for this object for two bytes (As a why not option?) + lg = this.hashCode(); + setByteValues(lg, 6, 2); + + // The ip address for this computer (as we cannot get to the MAC address) + InetAddress inet = InetAddress.getLocalHost(); + byte[] bytes = inet.getAddress(); + setByteValues(bytes, 8, 4); + + // A random number for two bytes + lg = (long) ((Math.random() * 0xFFFF)); + setByteValues(lg, 12, 2); + + // Another random number for two bytes + lg = (long) ((Math.random() * 0xFFFF)); + setByteValues(lg, 14, 2); + + } catch (Exception e) { + log.error("Error while generating a GUID", e); + } + } + + /** + * Stores the 16 bytes seperately, this returns that array Not sure why though. + * + * @return array of bytes. + */ + public byte[] getBytes() { + + return guidValue; + } + + /** + * Overrides toString(). Returns the array of bytes in the standard form: + * xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + * + * @return the string format + */ + @Override + public String toString() { + + StringBuilder buf = new StringBuilder(); + + buf.append(getHexString(guidValue[0])); + buf.append(getHexString(guidValue[1])); + buf.append(getHexString(guidValue[2])); + buf.append(getHexString(guidValue[3])); + buf.append('-'); + buf.append(getHexString(guidValue[4])); + buf.append(getHexString(guidValue[5])); + buf.append('-'); + buf.append(getHexString(guidValue[6])); + buf.append(getHexString(guidValue[7])); + buf.append('-'); + buf.append(getHexString(guidValue[8])); + buf.append(getHexString(guidValue[9])); + buf.append('-'); + buf.append(getHexString(guidValue[10])); + buf.append(getHexString(guidValue[11])); + buf.append(getHexString(guidValue[12])); + buf.append(getHexString(guidValue[13])); + buf.append(getHexString(guidValue[14])); + buf.append(getHexString(guidValue[15])); + + return buf.toString(); + } } diff --git a/src/main/java/it/grid/storm/common/HostLookup.java b/src/main/java/it/grid/storm/common/HostLookup.java index cde4555d..b1b67856 100644 --- a/src/main/java/it/grid/storm/common/HostLookup.java +++ b/src/main/java/it/grid/storm/common/HostLookup.java @@ -1,30 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common; import java.net.InetAddress; import java.net.UnknownHostException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HostLookup { - private static final Logger log = LoggerFactory.getLogger(HostLookup.class); - - public HostLookup() { - - } + private static final Logger log = LoggerFactory.getLogger(HostLookup.class); - public String lookup(String hostname) throws UnknownHostException { + public HostLookup() {} - InetAddress ia = InetAddress.getByName(hostname); - log.debug("Lookup for hostname: {} resulted in {}", - hostname, - ia.getHostAddress()); - return ia.getHostAddress(); - } + public String lookup(String hostname) throws UnknownHostException { + InetAddress ia = InetAddress.getByName(hostname); + log.debug("Lookup for hostname: {} resulted in {}", hostname, ia.getHostAddress()); + return ia.getHostAddress(); + } } diff --git a/src/main/java/it/grid/storm/common/OperationType.java b/src/main/java/it/grid/storm/common/OperationType.java index 54474755..9ac56fd2 100644 --- a/src/main/java/it/grid/storm/common/OperationType.java +++ b/src/main/java/it/grid/storm/common/OperationType.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common; @@ -9,13 +8,11 @@ import com.codahale.metrics.Timer; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * * @author lucamag @date May 28, 2008 - * */ - public enum OperationType { UNDEF("undefined"), PTG("synch.ptg"), @@ -58,5 +55,4 @@ public Timer getTimer() { return timer; } - } diff --git a/src/main/java/it/grid/storm/common/SRMConstants.java b/src/main/java/it/grid/storm/common/SRMConstants.java index 30ea8094..0e22ffd5 100644 --- a/src/main/java/it/grid/storm/common/SRMConstants.java +++ b/src/main/java/it/grid/storm/common/SRMConstants.java @@ -1,30 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common; /** - * This class represents the default value for SRM parameter as specified by SRM - * v 2.1 interface. - * + * This class represents the default value for SRM parameter as specified by SRM v 2.1 interface. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - public class SRMConstants { - /** - * Default parameter for SrmRmdir function. - */ - public static final boolean recursiveFlag = false; - - /** - * Default Parameter for SrmLS function. - */ - public static final boolean fullDetailedList = false; - + /** Default parameter for SrmRmdir function. */ + public static final boolean recursiveFlag = false; + /** Default Parameter for SrmLS function. */ + public static final boolean fullDetailedList = false; } diff --git a/src/main/java/it/grid/storm/common/exception/StoRMException.java b/src/main/java/it/grid/storm/common/exception/StoRMException.java index 58576b86..69cc7ebf 100644 --- a/src/main/java/it/grid/storm/common/exception/StoRMException.java +++ b/src/main/java/it/grid/storm/common/exception/StoRMException.java @@ -1,13 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.exception; public class StoRMException extends Exception { - public StoRMException() { - - } - + public StoRMException() {} } diff --git a/src/main/java/it/grid/storm/common/types/EndPoint.java b/src/main/java/it/grid/storm/common/types/EndPoint.java index b8904494..b0b1025d 100644 --- a/src/main/java/it/grid/storm/common/types/EndPoint.java +++ b/src/main/java/it/grid/storm/common/types/EndPoint.java @@ -1,140 +1,120 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; -import java.util.Iterator; import java.util.ArrayList; +import java.util.Iterator; /** * This class represents an EndPoint of a SURL: it must begin with a /. - * + * * @author EGRID ICTP * @version 1.0 * @date August 2006 */ public class EndPoint { - private static final String ROOT_ENDPOINT = "/"; - - private ArrayList name = new ArrayList(); - private boolean empty = true; - - private EndPoint(ArrayList name, boolean empty) { - - this.name.clear(); - this.name.addAll(name); - this.empty = empty; - } - - /** - * Public static method that returns an empty EndPoint. - */ - public static EndPoint makeEmpty() { - - return new EndPoint(new ArrayList(), true); - } - - /** - * Public static method that requires a String representing the EndPoint: it - * cannot be null or empty otherwise an InvalidEndPointAttributeException is - * thrown. Likewise if it does not begin with a slash (/), or if it contains - * two consecutive dots (..). - */ - public static EndPoint make(String name) - throws InvalidEndPointAttributeException { - - if (invalid(name)) - throw new InvalidEndPointAttributeException(name); - return new EndPoint(normalize(name), false); - } - - /** - * Private method that returns true if the supplied string is null, or is - * empty, or does not begin with a slash (/), or contains two consecutive dots - * (..). - */ - static private boolean invalid(String name) { - - return (name == null) || (name.equals("")) || (name.charAt(0) != '/'); - } - - /** - * Private method that accepts a valid String as defined by the private valid - * method, and returns an ordered ArrayList of all slash-separated elemets, - * trimmed of leading and trailing white spaces. Multiple consecutive slashes - * are treated as a single slash. - * - * Example1: /a/ b /c/d Result: a b c d - * - * Example2: /////a///b//////////// c/d///////// Result: a b c d - * - * Example3: / Result: empty ArrayList! - * - */ - static private ArrayList normalize(String s) { - - // split around slash! - String[] pieces = s.split("/"); - // remove all empty Strings which may have been produced because of - // consecutive slashes! - ArrayList auxList = new ArrayList(); - int pos = 0; - String aux = null; - for (int k = 0; k < pieces.length; k++) { - aux = pieces[k]; // get the element - aux = aux.trim(); // remove all leading and trailing white spaces - if (!aux.equals("")) - auxList.add(pos++, aux); - } - return auxList; - } - - /** - * Method that returns true if this StFN is empty. - */ - public boolean isEmpty() { - - return empty; - } - - public String toString() { - - if (empty) - return "Empty EndPoint"; - int size = this.name.size(); - if (size == 0) - return ROOT_ENDPOINT; - StringBuilder sb = new StringBuilder(); - for (Iterator i = this.name.iterator(); i.hasNext();) { - sb.append("/"); - sb.append(i.next()); - } - return sb.toString(); - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof EndPoint)) - return false; - EndPoint po = (EndPoint) o; - if (po.empty && empty) - return true; - if ((!empty) && (!po.empty) && (name.size() == 0) && (po.name.size() == 0)) - return true; - return (!empty) && (!po.empty) && name.equals(po.name); - } - - public int hashCode() { - - if (empty) - return 0; - int hash = 17; - if (name.size() == 0) - return hash; - return 37 * hash + name.hashCode(); - } + private static final String ROOT_ENDPOINT = "/"; + + private ArrayList name = new ArrayList(); + private boolean empty = true; + + private EndPoint(ArrayList name, boolean empty) { + + this.name.clear(); + this.name.addAll(name); + this.empty = empty; + } + + /** Public static method that returns an empty EndPoint. */ + public static EndPoint makeEmpty() { + + return new EndPoint(new ArrayList(), true); + } + + /** + * Public static method that requires a String representing the EndPoint: it cannot be null or + * empty otherwise an InvalidEndPointAttributeException is thrown. Likewise if it does not begin + * with a slash (/), or if it contains two consecutive dots (..). + */ + public static EndPoint make(String name) throws InvalidEndPointAttributeException { + + if (invalid(name)) throw new InvalidEndPointAttributeException(name); + return new EndPoint(normalize(name), false); + } + + /** + * Private method that returns true if the supplied string is null, or is empty, or does not begin + * with a slash (/), or contains two consecutive dots (..). + */ + private static boolean invalid(String name) { + + return (name == null) || (name.equals("")) || (name.charAt(0) != '/'); + } + + /** + * Private method that accepts a valid String as defined by the private valid method, and returns + * an ordered ArrayList of all slash-separated elemets, trimmed of leading and trailing white + * spaces. Multiple consecutive slashes are treated as a single slash. + * + *

Example1: /a/ b /c/d Result: a b c d + * + *

Example2: /////a///b//////////// c/d///////// Result: a b c d + * + *

Example3: / Result: empty ArrayList! + */ + private static ArrayList normalize(String s) { + + // split around slash! + String[] pieces = s.split("/"); + // remove all empty Strings which may have been produced because of + // consecutive slashes! + ArrayList auxList = new ArrayList(); + int pos = 0; + String aux = null; + for (int k = 0; k < pieces.length; k++) { + aux = pieces[k]; // get the element + aux = aux.trim(); // remove all leading and trailing white spaces + if (!aux.equals("")) auxList.add(pos++, aux); + } + return auxList; + } + + /** Method that returns true if this StFN is empty. */ + public boolean isEmpty() { + + return empty; + } + + public String toString() { + + if (empty) return "Empty EndPoint"; + int size = this.name.size(); + if (size == 0) return ROOT_ENDPOINT; + StringBuilder sb = new StringBuilder(); + for (Iterator i = this.name.iterator(); i.hasNext(); ) { + sb.append("/"); + sb.append(i.next()); + } + return sb.toString(); + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof EndPoint)) return false; + EndPoint po = (EndPoint) o; + if (po.empty && empty) return true; + if ((!empty) && (!po.empty) && (name.size() == 0) && (po.name.size() == 0)) return true; + return (!empty) && (!po.empty) && name.equals(po.name); + } + + public int hashCode() { + + if (empty) return 0; + int hash = 17; + if (name.size() == 0) return hash; + return 37 * hash + name.hashCode(); + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidEndPointAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidEndPointAttributeException.java index c3b18f24..fc267cdf 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidEndPointAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidEndPointAttributeException.java @@ -1,40 +1,33 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception throw nwhen attempting to create a - * StFNRoot with a null or empty String, or with a String that does not begin a - * /. - * + * This class represents an Exception throw nwhen attempting to create a StFNRoot with a null or + * empty String, or with a String that does not begin a /. + * * @author EGRID ICTP Trieste * @version 1.0 * @date August, 2006 */ public class InvalidEndPointAttributeException extends Exception { - private boolean nullName; // boolean true if the supplied String is null - private boolean emptyName; // boolean true if the supplied String is empty - private boolean wrong = false; // boolean true if the supplied String does not - // begin with a / + private boolean nullName; // boolean true if the supplied String is null + private boolean emptyName; // boolean true if the supplied String is empty + private boolean wrong = false; // boolean true if the supplied String does not + // begin with a / - /** - * Constructor requiring the String that caused the exception to be thrown. - */ - public InvalidEndPointAttributeException(String name) { + /** Constructor requiring the String that caused the exception to be thrown. */ + public InvalidEndPointAttributeException(String name) { - this.nullName = (name == null); - this.emptyName = (name.equals("")); - if (!nullName) - wrong = (name.charAt(0) == '/'); - } + this.nullName = (name == null); + this.emptyName = (name.equals("")); + if (!nullName) wrong = (name.charAt(0) == '/'); + } - public String toString() { - - return "nullName=" + nullName + "; emptyName=" + emptyName - + "; not-beginning-with-/=" + wrong; - } + public String toString() { + return "nullName=" + nullName + "; emptyName=" + emptyName + "; not-beginning-with-/=" + wrong; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidMachineAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidMachineAttributeException.java index 99372873..d3f1f863 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidMachineAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidMachineAttributeException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception thrown when the String supplied to the - * constructor of Machine is null or empty. - * + * This class represents an Exception thrown when the String supplied to the constructor of Machine + * is null or empty. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 25th, 2005 @@ -15,21 +14,18 @@ */ public class InvalidMachineAttributeException extends Exception { - private boolean nullName; // boolean representing a null name String - private boolean emptyName; // boolean true if the supplied String is empty + private boolean nullName; // boolean representing a null name String + private boolean emptyName; // boolean true if the supplied String is empty - /** - * Constructor that requires the String that caused the exception to be - * thrown. - */ - public InvalidMachineAttributeException(String name) { + /** Constructor that requires the String that caused the exception to be thrown. */ + public InvalidMachineAttributeException(String name) { - nullName = name == null; - emptyName = (name.equals("")); - } + nullName = name == null; + emptyName = (name.equals("")); + } - public String toString() { + public String toString() { - return "nullName=" + nullName + "; emptyName=" + emptyName; - } + return "nullName=" + nullName + "; emptyName=" + emptyName; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidPFNAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidPFNAttributeException.java index 48795fde..1982973c 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidPFNAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidPFNAttributeException.java @@ -1,14 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception throw nwhen attempting to create a - * PathName with a null or empty String, or with a String that does not begin a - * /. - * + * This class represents an Exception throw nwhen attempting to create a PathName with a null or + * empty String, or with a String that does not begin a /. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 25th, 2005 @@ -16,27 +14,26 @@ */ public class InvalidPFNAttributeException extends Exception { - private boolean nullName; // boolean true if the supplied String is null - private boolean emptyName; // boolean true if the supplied String is empty - private boolean wrong = false; // boolean true if the supplied String does not - // begin with a / + private boolean nullName; // boolean true if the supplied String is null + private boolean emptyName; // boolean true if the supplied String is empty + private boolean wrong = false; // boolean true if the supplied String does not + // begin with a / - /** - * Constructor requiring the String that caused the exception to be thrown. - */ - public InvalidPFNAttributeException(String name) { + /** Constructor requiring the String that caused the exception to be thrown. */ + public InvalidPFNAttributeException(String name) { - this.nullName = (name == null); - this.emptyName = (name.equals("")); - if (!nullName && !emptyName) - this.wrong = (name.charAt(0) != '/'); - } + this.nullName = (name == null); + this.emptyName = (name.equals("")); + if (!nullName && !emptyName) this.wrong = (name.charAt(0) != '/'); + } - public String toString() { - - return "Attempt to create PFN with invalid attributes: nullName=" - + nullName + "; emptyName=" + emptyName + "; not-beginning-with-/=" - + wrong; - } + public String toString() { + return "Attempt to create PFN with invalid attributes: nullName=" + + nullName + + "; emptyName=" + + emptyName + + "; not-beginning-with-/=" + + wrong; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidPFNRootAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidPFNRootAttributeException.java index 509b11aa..0ff0e484 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidPFNRootAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidPFNRootAttributeException.java @@ -1,37 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception throw nwhen attempting to create a - * PFNRootRoot with a null or empty String, or with a String that does not begin - * a /. - * + * This class represents an Exception throw nwhen attempting to create a PFNRootRoot with a null or + * empty String, or with a String that does not begin a /. */ public class InvalidPFNRootAttributeException extends Exception { - private boolean nullName; // boolean true if the supplied String is null - private boolean emptyName; // boolean true if the supplied String is empty - private boolean wrong = false; // boolean true if the supplied String does not - // begin with a / + private boolean nullName; // boolean true if the supplied String is null + private boolean emptyName; // boolean true if the supplied String is empty + private boolean wrong = false; // boolean true if the supplied String does not + // begin with a / - /** - * Constructor requiring the String that caused the exception to be thrown. - */ - public InvalidPFNRootAttributeException(String name) { + /** Constructor requiring the String that caused the exception to be thrown. */ + public InvalidPFNRootAttributeException(String name) { - this.nullName = (name == null); - this.emptyName = (name.equals("")); - if (!nullName) - wrong = (name.charAt(0) == '/'); - } + this.nullName = (name == null); + this.emptyName = (name.equals("")); + if (!nullName) wrong = (name.charAt(0) == '/'); + } - public String toString() { - - return "nullName=" + nullName + "; emptyName=" + emptyName - + "; not-beginning-with-/=" + wrong; - } + public String toString() { + return "nullName=" + nullName + "; emptyName=" + emptyName + "; not-beginning-with-/=" + wrong; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidPortAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidPortAttributeException.java index b1c1b88a..703d4076 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidPortAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidPortAttributeException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an exception thrown if a Port is attempted to be built - * with an int <0 or >65535. - * + * This class represents an exception thrown if a Port is attempted to be built with an int <0 or + * >65535. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 25th, 2005 @@ -15,18 +14,16 @@ */ public class InvalidPortAttributeException extends Exception { - private int port; + private int port; - /** - * Constructor requiring the port that caused the exception. - */ - public InvalidPortAttributeException(int port) { + /** Constructor requiring the port that caused the exception. */ + public InvalidPortAttributeException(int port) { - this.port = port; - } + this.port = port; + } - public String toString() { + public String toString() { - return "Port exceeded limits; supplied port was: " + port; - } + return "Port exceeded limits; supplied port was: " + port; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidSFNAttributesException.java b/src/main/java/it/grid/storm/common/types/InvalidSFNAttributesException.java index d6f25920..4143b17b 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidSFNAttributesException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidSFNAttributesException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an exception thrown when the SFN constructor is invoked - * with null Machine, Port or PathName. - * + * This class represents an exception thrown when the SFN constructor is invoked with null Machine, + * Port or PathName. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 26th, 2005 @@ -15,79 +14,92 @@ */ public class InvalidSFNAttributesException extends Exception { - private boolean nullMachine; // boolean true if Machine is null - private boolean nullPort; // boolean true if Port is null - private boolean nullEndPoint; // boolean true if EndPoint is null - private boolean nullStFN; // boolean true if PathName is null - private boolean emptyMachine = false; // boolean indicating if Machine is - // empty - private boolean emptyPort = false; // boolean indicating if Port is empty - private boolean emptyEndPoint = false; // boolean indicating if EndPoint is - // empty - private boolean emptyStFN = false; // boolean indicating if StFN is empty + private boolean nullMachine; // boolean true if Machine is null + private boolean nullPort; // boolean true if Port is null + private boolean nullEndPoint; // boolean true if EndPoint is null + private boolean nullStFN; // boolean true if PathName is null + private boolean emptyMachine = false; // boolean indicating if Machine is + // empty + private boolean emptyPort = false; // boolean indicating if Port is empty + private boolean emptyEndPoint = false; // boolean indicating if EndPoint is + // empty + private boolean emptyStFN = false; // boolean indicating if StFN is empty - private boolean queryForm = false; + private boolean queryForm = false; - /** - * Constructor that requires the Machine m, the Port p and the PathName pn - * that caused the Exception to be thrown. - */ - public InvalidSFNAttributesException(Machine m, Port p, StFN s) { + /** + * Constructor that requires the Machine m, the Port p and the PathName pn that caused the + * Exception to be thrown. + */ + public InvalidSFNAttributesException(Machine m, Port p, StFN s) { - nullMachine = (m == null); - if (!nullMachine) - emptyMachine = m.isEmpty(); - nullPort = (p == null); - if (!nullPort) - emptyPort = p.isEmpty(); - nullStFN = (s == null); - if (!nullStFN) - emptyStFN = s.isEmpty(); - } + nullMachine = (m == null); + if (!nullMachine) emptyMachine = m.isEmpty(); + nullPort = (p == null); + if (!nullPort) emptyPort = p.isEmpty(); + nullStFN = (s == null); + if (!nullStFN) emptyStFN = s.isEmpty(); + } - public InvalidSFNAttributesException(Machine m, Port p, EndPoint e, StFN s) { + public InvalidSFNAttributesException(Machine m, Port p, EndPoint e, StFN s) { - nullMachine = (m == null); - if (!nullMachine) - emptyMachine = m.isEmpty(); - nullPort = (p == null); - if (!nullPort) - emptyPort = p.isEmpty(); - nullEndPoint = (e == null); - if (!nullEndPoint) - emptyEndPoint = e.isEmpty(); - nullStFN = (s == null); - if (!nullStFN) - emptyStFN = s.isEmpty(); - queryForm = true; - } + nullMachine = (m == null); + if (!nullMachine) emptyMachine = m.isEmpty(); + nullPort = (p == null); + if (!nullPort) emptyPort = p.isEmpty(); + nullEndPoint = (e == null); + if (!nullEndPoint) emptyEndPoint = e.isEmpty(); + nullStFN = (s == null); + if (!nullStFN) emptyStFN = s.isEmpty(); + queryForm = true; + } - /** - * Constructor that makes an InvalidSFNAttributesException with Machine, Port - * and StFN, as though they had been supplied all null. - */ - public InvalidSFNAttributesException() { + /** + * Constructor that makes an InvalidSFNAttributesException with Machine, Port and StFN, as though + * they had been supplied all null. + */ + public InvalidSFNAttributesException() { - nullMachine = true; - nullPort = true; - nullEndPoint = true; - nullStFN = true; - } + nullMachine = true; + nullPort = true; + nullEndPoint = true; + nullStFN = true; + } - public String toString() { - - if (queryForm) { - return "Invalid SFN Attributes: nullMachine=" + nullMachine - + "; nullPort=" + nullPort + "; nullEndPoint=" + nullEndPoint - + "; nullStFN=" + nullStFN + "; emptyMachine=" + emptyMachine - + "; emptyPort=" + emptyPort + "; emptyEndPoint=" + emptyEndPoint - + "; emptyStFN=" + emptyStFN + "."; - } else { - return "Invalid SFN Attributes: nullMachine=" + nullMachine - + "; nullPort=" + nullPort + "; nullStFN=" + nullStFN - + "; emptyMachine=" + emptyMachine + "; emptyPort=" + emptyPort - + "; emptyStFN=" + emptyStFN + "."; - } - } + public String toString() { + if (queryForm) { + return "Invalid SFN Attributes: nullMachine=" + + nullMachine + + "; nullPort=" + + nullPort + + "; nullEndPoint=" + + nullEndPoint + + "; nullStFN=" + + nullStFN + + "; emptyMachine=" + + emptyMachine + + "; emptyPort=" + + emptyPort + + "; emptyEndPoint=" + + emptyEndPoint + + "; emptyStFN=" + + emptyStFN + + "."; + } else { + return "Invalid SFN Attributes: nullMachine=" + + nullMachine + + "; nullPort=" + + nullPort + + "; nullStFN=" + + nullStFN + + "; emptyMachine=" + + emptyMachine + + "; emptyPort=" + + emptyPort + + "; emptyStFN=" + + emptyStFN + + "."; + } + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidStFNAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidStFNAttributeException.java index 92975357..f181e11b 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidStFNAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidStFNAttributeException.java @@ -1,42 +1,44 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception thrown when attempting to create a StFN - * with a null or empty String, or with a String that does not begin a /. - * + * This class represents an Exception thrown when attempting to create a StFN with a null or empty + * String, or with a String that does not begin a /. + * * @author EGRID ICTP - CNAF Bologna * @version 2.0 * @date March 2005 */ public class InvalidStFNAttributeException extends Exception { - private boolean nullName; // boolean true if the supplied String is null - private boolean emptyName; // boolean true if the supplied String is empty - private boolean noBeginningSlash = false; // boolean true if the supplied - // String does not begin with a / - private boolean hasDot = false; // boolean true is string contains a . + private boolean nullName; // boolean true if the supplied String is null + private boolean emptyName; // boolean true if the supplied String is empty + private boolean noBeginningSlash = false; // boolean true if the supplied + // String does not begin with a / + private boolean hasDot = false; // boolean true is string contains a . - /** - * Constructor requiring the String that caused the exception to be thrown. - */ - public InvalidStFNAttributeException(String name) { + /** Constructor requiring the String that caused the exception to be thrown. */ + public InvalidStFNAttributeException(String name) { - this.nullName = (name == null); - this.emptyName = (name.equals("")); - if (!nullName && !emptyName) { - noBeginningSlash = (name.charAt(0) != '/'); - hasDot = (name.indexOf("..") != -1); - } - } + this.nullName = (name == null); + this.emptyName = (name.equals("")); + if (!nullName && !emptyName) { + noBeginningSlash = (name.charAt(0) != '/'); + hasDot = (name.indexOf("..") != -1); + } + } - public String toString() { + public String toString() { - return "Invalid StFN Attributes: nullName=" + nullName + "; emptyName=" - + emptyName + "; doesn't beginning with slash=" + noBeginningSlash - + "; has dots=" + hasDot; - } + return "Invalid StFN Attributes: nullName=" + + nullName + + "; emptyName=" + + emptyName + + "; doesn't beginning with slash=" + + noBeginningSlash + + "; has dots=" + + hasDot; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidStFNRootAttributeException.java b/src/main/java/it/grid/storm/common/types/InvalidStFNRootAttributeException.java index 440d10f6..3f1e22e1 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidStFNRootAttributeException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidStFNRootAttributeException.java @@ -1,37 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an Exception throw nwhen attempting to create a - * StFNRoot with a null or empty String, or with a String that does not begin a - * /. - * + * This class represents an Exception throw nwhen attempting to create a StFNRoot with a null or + * empty String, or with a String that does not begin a /. */ public class InvalidStFNRootAttributeException extends Exception { - private boolean nullName; // boolean true if the supplied String is null - private boolean emptyName; // boolean true if the supplied String is empty - private boolean wrong = false; // boolean true if the supplied String does not - // begin with a / + private boolean nullName; // boolean true if the supplied String is null + private boolean emptyName; // boolean true if the supplied String is empty + private boolean wrong = false; // boolean true if the supplied String does not + // begin with a / - /** - * Constructor requiring the String that caused the exception to be thrown. - */ - public InvalidStFNRootAttributeException(String name) { + /** Constructor requiring the String that caused the exception to be thrown. */ + public InvalidStFNRootAttributeException(String name) { - this.nullName = (name == null); - this.emptyName = (name.equals("")); - if (!nullName) - wrong = (name.charAt(0) == '/'); - } + this.nullName = (name == null); + this.emptyName = (name.equals("")); + if (!nullName) wrong = (name.charAt(0) == '/'); + } - public String toString() { - - return "nullName=" + nullName + "; emptyName=" + emptyName - + "; not-beginning-with-/=" + wrong; - } + public String toString() { + return "nullName=" + nullName + "; emptyName=" + emptyName + "; not-beginning-with-/=" + wrong; + } } diff --git a/src/main/java/it/grid/storm/common/types/InvalidTFNAttributesException.java b/src/main/java/it/grid/storm/common/types/InvalidTFNAttributesException.java index 7e158d41..32759080 100644 --- a/src/main/java/it/grid/storm/common/types/InvalidTFNAttributesException.java +++ b/src/main/java/it/grid/storm/common/types/InvalidTFNAttributesException.java @@ -1,47 +1,53 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents an exception thrown when the TFN constructor is invoked - * with null Machine, Port or PathName, or if any is empty. - * + * This class represents an exception thrown when the TFN constructor is invoked with null Machine, + * Port or PathName, or if any is empty. + * * @author EGRID - ICTP Trieste * @date March 26th, 2005 * @version 2.0 */ public class InvalidTFNAttributesException extends Exception { - private boolean nullMachine; // boolean true if Machine is null - private boolean nullPort; // boolean true if Port is null - private boolean nullPFN; // boolean true if PathName is null - private boolean emptyMachine = false; // boolean true if Machine is empty - private boolean emptyPort = false; // boolean true if Port is empty - private boolean emptyPFN = false; // boolean true if PFN is empty + private boolean nullMachine; // boolean true if Machine is null + private boolean nullPort; // boolean true if Port is null + private boolean nullPFN; // boolean true if PathName is null + private boolean emptyMachine = false; // boolean true if Machine is empty + private boolean emptyPort = false; // boolean true if Port is empty + private boolean emptyPFN = false; // boolean true if PFN is empty - /** - * Constructor that requires the Machine m, the Port p and the PathName pn - * that caused the Exception to be thrown. - */ - public InvalidTFNAttributesException(Machine m, Port p, PFN pfn) { + /** + * Constructor that requires the Machine m, the Port p and the PathName pn that caused the + * Exception to be thrown. + */ + public InvalidTFNAttributesException(Machine m, Port p, PFN pfn) { - nullMachine = (m == null); - nullPort = (p == null); - nullPFN = (pfn == null); - if (!nullMachine) - emptyMachine = m.isEmpty(); - if (!nullPort) - emptyPort = p.isEmpty(); - if (!nullPFN) - emptyPFN = pfn.isEmpty(); - } + nullMachine = (m == null); + nullPort = (p == null); + nullPFN = (pfn == null); + if (!nullMachine) emptyMachine = m.isEmpty(); + if (!nullPort) emptyPort = p.isEmpty(); + if (!nullPFN) emptyPFN = pfn.isEmpty(); + } - public String toString() { + public String toString() { - return "nullMachine=" + nullMachine + "; emptyMachine=" + emptyMachine - + "; nullPort=" + nullPort + "; emptyPort=" + emptyPort + "; nullPFN=" - + nullPFN + "; emptyPFN=" + emptyPFN + "."; - } + return "nullMachine=" + + nullMachine + + "; emptyMachine=" + + emptyMachine + + "; nullPort=" + + nullPort + + "; emptyPort=" + + emptyPort + + "; nullPFN=" + + nullPFN + + "; emptyPFN=" + + emptyPFN + + "."; + } } diff --git a/src/main/java/it/grid/storm/common/types/Machine.java b/src/main/java/it/grid/storm/common/types/Machine.java index 19dea60f..faf2481d 100644 --- a/src/main/java/it/grid/storm/common/types/Machine.java +++ b/src/main/java/it/grid/storm/common/types/Machine.java @@ -1,85 +1,74 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** * This class represents the name of a machine in a SFN. - * + * * @author EGRID - ICTP Trieste; CNAF - Bologna * @date March 25th, 2005 * @version 2.0 */ public class Machine { - private String name = ""; // name of the machine in the SFN - private boolean empty = true; // boolean true if this object is the empty - // object + private String name = ""; // name of the machine in the SFN + private boolean empty = true; // boolean true if this object is the empty + // object - private Machine(String name, boolean empty) { + private Machine(String name, boolean empty) { - this.name = name.replaceAll(" ", ""); - this.empty = empty; - } + this.name = name.replaceAll(" ", ""); + this.empty = empty; + } - /** - * Static method that returns an empty Machine. - */ - public static Machine makeEmpty() { + /** Static method that returns an empty Machine. */ + public static Machine makeEmpty() { - return new Machine("", true); - } + return new Machine("", true); + } - /** - * Static method requiring the name of the machine: it cannot be null or the - * empty String, otherwise an InvalidMachineAttributeException is thrown. - * Beware that any empty space is removed. - */ - public static Machine make(String s) throws InvalidMachineAttributeException { + /** + * Static method requiring the name of the machine: it cannot be null or the empty String, + * otherwise an InvalidMachineAttributeException is thrown. Beware that any empty space is + * removed. + */ + public static Machine make(String s) throws InvalidMachineAttributeException { - if ((s == null) || (s.equals(""))) - throw new InvalidMachineAttributeException(s); - return new Machine(s, false); - } + if ((s == null) || (s.equals(""))) throw new InvalidMachineAttributeException(s); + return new Machine(s, false); + } - /** - * Return true if Empty instance of machine object - */ - public boolean isEmpty() { + /** Return true if Empty instance of machine object */ + public boolean isEmpty() { - return empty; - } + return empty; + } - public String getValue() { + public String getValue() { - return name; - } + return name; + } - public String toString() { + public String toString() { - if (empty) - return "Empty Machine"; - return name; - } + if (empty) return "Empty Machine"; + return name; + } - public boolean equals(Object o) { + public boolean equals(Object o) { - if (o == this) - return true; - if (!(o instanceof Machine)) - return false; - Machine mo = (Machine) o; - if (mo.empty && empty) - return true; - return (!mo.empty && !empty && mo.getValue().equals(name)); - } + if (o == this) return true; + if (!(o instanceof Machine)) return false; + Machine mo = (Machine) o; + if (mo.empty && empty) return true; + return (!mo.empty && !empty && mo.getValue().equals(name)); + } - public int hashCode() { + public int hashCode() { - if (empty) - return 0; - int hash = 17; - return 37 * hash + name.hashCode(); - } + if (empty) return 0; + int hash = 17; + return 37 * hash + name.hashCode(); + } } diff --git a/src/main/java/it/grid/storm/common/types/PFN.java b/src/main/java/it/grid/storm/common/types/PFN.java index be8efa92..a5491ffc 100644 --- a/src/main/java/it/grid/storm/common/types/PFN.java +++ b/src/main/java/it/grid/storm/common/types/PFN.java @@ -1,91 +1,80 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents the PFN of a TTURL: it must begin with a /, and it - * cannot be an empty or null string. Any white spaces are automatically - * removed. For Empty PFN there is the appropriate method to be used. - * + * This class represents the PFN of a TTURL: it must begin with a /, and it cannot be an empty or + * null string. Any white spaces are automatically removed. For Empty PFN there is the appropriate + * method to be used. + * * @author CNAF - Bologna * @version 1.0 * @date April 2005 */ public class PFN { - private String name; // String containing the PFN - private boolean empty = true; // boolean indicating whether this is an Empty - // PFN - - /** - * Private constructor that requires a String representing the pathname of the - * TTURL. Empty spaces are automatically removed. - */ - private PFN(String name, boolean empty) { - - this.name = name.replaceAll(" ", ""); - this.empty = empty; - } - - /** - * Method that returns an Empty PFN. - */ - public static PFN makeEmpty() { - - return new PFN("", true); - } - - /** - * Method that returns a PFN corresponding to the supplied String. The String - * cannot be null or empty otherwise an InvalidPFNAttributeException is - * thrown. Likewise if it does not begin with a /. - */ - public static PFN make(String name) throws InvalidPFNAttributeException { - - if ((name == null) || (name.equals(""))) - throw new InvalidPFNAttributeException(name); - return new PFN(name, false); - } - - public String getValue() { - - return name; - } - - /** - * Method that returns true if this PFN is an Empty object. - */ - public boolean isEmpty() { - - return empty; - } - - public String toString() { - - if (empty) - return "Empty PFN"; - return name; - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof PFN)) - return false; - PFN po = (PFN) o; - if (po.empty && empty) - return true; - return (!po.empty) && (!empty) && (name.equals(po.name)); - } - - public int hashCode() { - - if (empty) - return 0; - int hash = 17; - return hash + 37 * name.hashCode(); - } + private String name; // String containing the PFN + private boolean empty = true; // boolean indicating whether this is an Empty + // PFN + + /** + * Private constructor that requires a String representing the pathname of the TTURL. Empty spaces + * are automatically removed. + */ + private PFN(String name, boolean empty) { + + this.name = name.replaceAll(" ", ""); + this.empty = empty; + } + + /** Method that returns an Empty PFN. */ + public static PFN makeEmpty() { + + return new PFN("", true); + } + + /** + * Method that returns a PFN corresponding to the supplied String. The String cannot be null or + * empty otherwise an InvalidPFNAttributeException is thrown. Likewise if it does not begin with a + * /. + */ + public static PFN make(String name) throws InvalidPFNAttributeException { + + if ((name == null) || (name.equals(""))) throw new InvalidPFNAttributeException(name); + return new PFN(name, false); + } + + public String getValue() { + + return name; + } + + /** Method that returns true if this PFN is an Empty object. */ + public boolean isEmpty() { + + return empty; + } + + public String toString() { + + if (empty) return "Empty PFN"; + return name; + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof PFN)) return false; + PFN po = (PFN) o; + if (po.empty && empty) return true; + return (!po.empty) && (!empty) && (name.equals(po.name)); + } + + public int hashCode() { + + if (empty) return 0; + int hash = 17; + return hash + 37 * name.hashCode(); + } } diff --git a/src/main/java/it/grid/storm/common/types/PFNRoot.java b/src/main/java/it/grid/storm/common/types/PFNRoot.java index 5b56d146..490f84cf 100644 --- a/src/main/java/it/grid/storm/common/types/PFNRoot.java +++ b/src/main/java/it/grid/storm/common/types/PFNRoot.java @@ -1,53 +1,46 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represent a Physical File Name Root, the directory entry in FIle - * System assigned to a Virtual Organization. - * + * This class represent a Physical File Name Root, the directory entry in FIle System assigned to a + * Virtual Organization. */ - public class PFNRoot { - private String pfnroot; - - public PFNRoot(String pfnroot) throws InvalidPFNRootAttributeException { - - if ((pfnroot == null) || (pfnroot.equals("")) || (pfnroot.charAt(0) != '/')) - throw new InvalidPFNRootAttributeException(pfnroot); - this.pfnroot = pfnroot.replaceAll(" ", ""); + private String pfnroot; - } + public PFNRoot(String pfnroot) throws InvalidPFNRootAttributeException { - public String getValue() { + if ((pfnroot == null) || (pfnroot.equals("")) || (pfnroot.charAt(0) != '/')) + throw new InvalidPFNRootAttributeException(pfnroot); + this.pfnroot = pfnroot.replaceAll(" ", ""); + } - return pfnroot; - } + public String getValue() { - public String toString() { + return pfnroot; + } - return pfnroot; - } + public String toString() { - public boolean equals(Object o) { + return pfnroot; + } - if (o == this) - return true; - if (!(o instanceof PFNRoot)) - return false; - PFNRoot po = (PFNRoot) o; - return pfnroot.equals(po.pfnroot); - } + public boolean equals(Object o) { - @Override - public int hashCode() { + if (o == this) return true; + if (!(o instanceof PFNRoot)) return false; + PFNRoot po = (PFNRoot) o; + return pfnroot.equals(po.pfnroot); + } - int result = 17; - result = 31 * result + (pfnroot != null ? pfnroot.hashCode() : 0); - return result; - } + @Override + public int hashCode() { + int result = 17; + result = 31 * result + (pfnroot != null ? pfnroot.hashCode() : 0); + return result; + } } diff --git a/src/main/java/it/grid/storm/common/types/ParsingSFNAttributesException.java b/src/main/java/it/grid/storm/common/types/ParsingSFNAttributesException.java index f3e16f7e..9b6091c2 100644 --- a/src/main/java/it/grid/storm/common/types/ParsingSFNAttributesException.java +++ b/src/main/java/it/grid/storm/common/types/ParsingSFNAttributesException.java @@ -1,37 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * Class that represents an Exception thrown when making an SFN from a String - * representation. - * + * Class that represents an Exception thrown when making an SFN from a String representation. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date September, 2006 */ -public class ParsingSFNAttributesException extends - InvalidSFNAttributesException { +public class ParsingSFNAttributesException extends InvalidSFNAttributesException { - private String explanation = ""; - private String sfn = ""; + private String explanation = ""; + private String sfn = ""; - /** - * Constructor that requires the String that caused the exception to be - * thrown, and an explanation String that describes the problem encountered. - */ - public ParsingSFNAttributesException(String sfn, String explanation) { + /** + * Constructor that requires the String that caused the exception to be thrown, and an explanation + * String that describes the problem encountered. + */ + public ParsingSFNAttributesException(String sfn, String explanation) { - if ((sfn != null) && (explanation != null)) { - this.sfn = sfn; - this.explanation = explanation; - } - } + if ((sfn != null) && (explanation != null)) { + this.sfn = sfn; + this.explanation = explanation; + } + } - public String toString() { + public String toString() { - return sfn + " is malformed: " + explanation; - } + return sfn + " is malformed: " + explanation; + } } diff --git a/src/main/java/it/grid/storm/common/types/Port.java b/src/main/java/it/grid/storm/common/types/Port.java index 7dfb1c59..01e2620b 100644 --- a/src/main/java/it/grid/storm/common/types/Port.java +++ b/src/main/java/it/grid/storm/common/types/Port.java @@ -1,101 +1,83 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represents a port in a SFN. An int between 0 and 65535 is - * required: if the limits are exceeded then an InvalidPortAttributeException is - * thrown. - * + * This class represents a port in a SFN. An int between 0 and 65535 is required: if the limits are + * exceeded then an InvalidPortAttributeException is thrown. + * * @author EGRID - ICTP Trieste; CNAF - Bologna * @date March 25th, 2005 * @version 2.0 */ public class Port { - private int port; // int representing the port number - private boolean empty = true; // boolean true id this object refers to the - // empty port - - /** - * Private constructor. - */ - private Port(int port, boolean empty) { - - this.port = port; - this.empty = empty; - } - - /** - * Static method to make an empty port. - */ - public static Port makeEmpty() { - - return new Port(-1, true); - } - - /** - * Static method used to make a non empty Port object. It requires an int - * between 0 and 65535 representing the port: if the limits are exceeded then - * an InvalidPortAttributeException is thrown. - */ - public static Port make(int port) throws InvalidPortAttributeException { - - if ((port < 0) || (port > 65535)) - throw new InvalidPortAttributeException(port); - return new Port(port, false); - } - - /** - * Method that returns whether this object refers to the empty port or not. - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns an int representing this port. An empty port will - * return -1. - */ - public int toInt() { - - if (empty) - return -1; - return port; - } - - public int getValue() { - - return port; - } - - public String toString() { - - if (empty) - return "Empty Port"; - return "" + port; - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof Port)) - return false; - Port po = (Port) o; - if (po.empty && empty) - return true; - return (!po.empty) && (!empty) && (port == po.port); - } - - public int hashCode() { - - if (empty) - return -1; - int hash = 17; - return 37 * hash + port; - } + private int port; // int representing the port number + private boolean empty = true; // boolean true id this object refers to the + // empty port + + /** Private constructor. */ + private Port(int port, boolean empty) { + + this.port = port; + this.empty = empty; + } + + /** Static method to make an empty port. */ + public static Port makeEmpty() { + + return new Port(-1, true); + } + + /** + * Static method used to make a non empty Port object. It requires an int between 0 and 65535 + * representing the port: if the limits are exceeded then an InvalidPortAttributeException is + * thrown. + */ + public static Port make(int port) throws InvalidPortAttributeException { + + if ((port < 0) || (port > 65535)) throw new InvalidPortAttributeException(port); + return new Port(port, false); + } + + /** Method that returns whether this object refers to the empty port or not. */ + public boolean isEmpty() { + + return empty; + } + + /** Method that returns an int representing this port. An empty port will return -1. */ + public int toInt() { + + if (empty) return -1; + return port; + } + + public int getValue() { + + return port; + } + + public String toString() { + + if (empty) return "Empty Port"; + return "" + port; + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof Port)) return false; + Port po = (Port) o; + if (po.empty && empty) return true; + return (!po.empty) && (!empty) && (port == po.port); + } + + public int hashCode() { + + if (empty) return -1; + int hash = 17; + return 37 * hash + port; + } } diff --git a/src/main/java/it/grid/storm/common/types/SFN.java b/src/main/java/it/grid/storm/common/types/SFN.java index 963c1277..ef635143 100644 --- a/src/main/java/it/grid/storm/common/types/SFN.java +++ b/src/main/java/it/grid/storm/common/types/SFN.java @@ -1,593 +1,580 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a SFN, that is a Site File Name. It is used as part of - * a SURL. - * + * This class represents a SFN, that is a Site File Name. It is used as part of a SURL. + * * @author EGRID ICTP - CNAF Bologna * @version 2.0 * @date March 2005 */ public class SFN { - private static final Logger log = LoggerFactory.getLogger(SFN.class); - - private Machine m = null; - private Port p = null; - private EndPoint ep = null; - private StFN pn = null; - private boolean empty = true; - - private SFN(Machine m, Port p, EndPoint ep, StFN pn, boolean empty) { - - this.m = m; - this.p = p; - this.ep = ep; - this.pn = pn; - this.empty = empty; - } - - /** - * Static method that returns an empty SFN. - */ - public static SFN makeEmpty() { - - return new SFN(Machine.makeEmpty(), Port.makeEmpty(), EndPoint.makeEmpty(), - StFN.makeEmpty(), true); - } - - /** - * Static method that requires a Machine m, the Port p on that Machine, and - * the StFN stfn. An InvalidSFNAttributesException is thrown if any is null or - * empty. - */ - public static SFN makeInSimpleForm(Machine m, Port p, StFN stfn) - throws InvalidSFNAttributesException { - - if ((m == null) || (p == null) || (stfn == null) || m.isEmpty() - || p.isEmpty() || stfn.isEmpty()) { - throw new InvalidSFNAttributesException(m, p, stfn); - } - return new SFN(m, p, EndPoint.makeEmpty(), stfn, false); - } - - /** - * Static method that requires a Machine m, the Port p on that Machine, and - * the StFN stfn. An InvalidSFNAttributesException is thrown if any is null or - * empty. - */ - public static SFN makeInQueryForm(Machine m, Port p, EndPoint ep, StFN stfn) - throws InvalidSFNAttributesException { - - if ((m == null) || (p == null) || (ep == null) || (stfn == null) - || m.isEmpty() || p.isEmpty() || (ep.isEmpty()) || stfn.isEmpty()) { - throw new InvalidSFNAttributesException(m, p, ep, stfn); - } - return new SFN(m, p, ep, stfn, false); - } - - /** - * Static method that requires a Machine m, and the StFN stfn. An - * InvalidSFNAttributesException is thrown if any is null or empty. - */ - public static SFN makeInSimpleForm(Machine m, StFN stfn) - throws InvalidSFNAttributesException { - - if ((m == null) || (stfn == null) || m.isEmpty() || stfn.isEmpty()) { - throw new InvalidSFNAttributesException(m, null, stfn); - } - return new SFN(m, Port.makeEmpty(), EndPoint.makeEmpty(), stfn, false); - } - - /** - * Static method that requires a Machine m, the EndPoint ep, and the StFN - * stfn. An InvalidSFNAttributesException is thrown if any is null or empty. - */ - public static SFN makeInQueryForm(Machine m, EndPoint ep, StFN stfn) - throws InvalidSFNAttributesException { - - if ((m == null) || (stfn == null) || (ep == null) || m.isEmpty() - || stfn.isEmpty() || (ep.isEmpty())) { - throw new InvalidSFNAttributesException(m, null, stfn); - } - return new SFN(m, Port.makeEmpty(), ep, stfn, false); - } - - /** - * Static method that returns an SFN from a String representation. If the - * supplied String is null or malformed, an InvalidSFNAttributesException is - * thrown. - * - * @param surlString - * a surl string without the protocol schema part - * @return - * @throws ParsingSFNAttributesException - * @throws InvalidSFNAttributesException - */ - public static SFN makeFromString(String surlString) - throws ParsingSFNAttributesException, InvalidSFNAttributesException { - - if (surlString == null) { - throw new ParsingSFNAttributesException(surlString, - "Supplied SFN String was null!"); - } - int colon = surlString.indexOf(":"); // first occurence of : - int slash = surlString.indexOf("/"); // first occurence of / - /* First occurence of ?SFN= */ - int question = surlString.toUpperCase().indexOf("?SFN="); - // TODO MICHELE USER_SURL refactored - if (colon > 0) { - if (question < 0) { - /* - * Supplied string does not contain a colon, and does not contain - * question mark! Treat it as optional port specification, _in_ simple - * form! - */ - if ((slash == -1) || (slash == 0)) { - /* Slash not found or right at the beginning! */ - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as omitting the optional port specification, and as referring to query form;" - + " but the first slash was either not found or right at the beginning!"); - } - return makeFromSimpleFormNoPort(surlString, slash); - } else { - /* - * Supplied string does not contain a colon! Treat it as optional port - * specification, _in_ query form! - */ - if ((slash == -1) || (slash == 0) || (slash > question)) { - /* - * Slash not found or right at the beginning! Or, slash follows - * question! - */ - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as omitting the optional port specification," - + " and as referring to query form; but the first slash was either not found, " - + "or right at the beginning, or only followed the question mark!"); - } - return makeFromQueryFormNoPort(surlString, question, slash); - } - } else { - if (question < 0) { - /* - * Supplied string contains a colon! Treat it as if port _is_ specified, - * and _not_ in query form! - */ - - if ((colon == 0) || (colon > slash)) { - /* - * Solon or slash not found or right at the beginning! or, colon - * follows slash! - */ - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as specifying port, and as not referring to query form; " - + "but either the colon is missing, or it follows the first slash!"); - } - return makeFromSimpleForm(surlString, colon, slash); - } else { - /* - * Supplied string contains a port and it also is in query form! - */ - if ((colon == 0) || (colon > slash) || (slash > question)) { - /* - * Colon or slash not found or right at the beginning! Or, colon - * follows slash! Or slash follows question! - */ - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as having the optional port specification, " - + "and as referring to query form; but either colon is missing, " - + "colon follows first slash, or first slash follows question mark!"); - } - return makeFromQueryForm(surlString, colon, slash, question); - } - } - } - - /** - * Returns an SFN from the received string that is supposed to contain the - * port and to be in simple form - * - * @param surlString - * @param colon - * @param slash - * @param question - * @return - * @throws ParsingSFNAttributesException - * @throws InvalidSFNAttributesException - */ - private static SFN makeFromQueryForm(String surlString, int colon, int slash, - int question) throws ParsingSFNAttributesException, - InvalidSFNAttributesException { - - String machineString = surlString.substring(0, colon); - Machine machine = null; - try { - machine = Machine.make(machineString); - } catch (InvalidMachineAttributeException e) { - log.warn("SFN: Unable to build -machine- attribute from {}. {}", - machineString, e.getMessage()); - } - if ((colon + 1) == slash) { - // slash found right after colon! There is no port! - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as specifying the optional port, and as referring to query form; but the port number is missing since the first slash was found right after the colon!"); - } - String portString = surlString.substring(colon + 1, slash); - Port port = null; - try { - port = Port.make(Integer.parseInt(portString)); - } catch (Throwable e) { - log.warn("SFN: Unable to build -port- attribute from {}. {}", portString, e.getMessage()); - } - // EndPoint - String endpointString = surlString.substring(slash, question); - EndPoint endpoint = null; - try { - endpoint = EndPoint.make(endpointString); - } catch (InvalidEndPointAttributeException e) { - log.warn("SFN: Unable to build -endpoint- attribute from {}. {}", - endpointString, e.getMessage()); - } - // StFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - if (question + 5 >= surlString.length()) { - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as omitting the optional port specification, and as referring to query form; but theere is nothing left after the question mark!"); // nothing - // left - // after - // question!!! - } - String stfnString = surlString.substring(question + 5, surlString.length()); - StFN stfn = null; - try { - stfn = StFN.make(stfnString); - } catch (InvalidStFNAttributeException e) { - log.warn("SFN: Unable to build -stfn- attribute from {}. {}", - stfnString, - e.getMessage()); - } - return SFN.makeInQueryForm(machine, port, endpoint, stfn); - } - - /** - * - * Returns an SFN from the received string that is supposed to contain the - * port and to be in simple form - * - * @param surlString - * @param colon - * @param slash - * @return - * @throws ParsingSFNAttributesException - * @throws InvalidSFNAttributesException - */ - private static SFN makeFromSimpleForm(String surlString, int colon, int slash) - throws ParsingSFNAttributesException, InvalidSFNAttributesException { - - String machineString = surlString.substring(0, colon); - Machine machine = null; - try { - machine = Machine.make(machineString); - } catch (InvalidMachineAttributeException e) { - - log.warn("SFN: Unable to build -machine- attribute from {}. {}", - machineString, e.getMessage()); - } - if ((colon + 1) == slash) { - /* Slash found right after colon! There is no port! */ - throw new ParsingSFNAttributesException(surlString, - "String interpreted as specifying port, and as not referring to query form;" - + " but the actual port number is missing since the first slash is " - + "found right after the colon"); - } - String portString = surlString.substring(colon + 1, slash); - Port port = null; - try { - port = Port.make(Integer.parseInt(portString)); - } catch (Throwable e) { - log.warn("SFN: Unable to build -port- attribute from {}. {}", - portString, e.getMessage()); - } - // StFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - String stfnString = surlString.substring(slash, surlString.length()); - StFN stfn = null; - try { - stfn = StFN.make(stfnString); - } catch (InvalidStFNAttributeException e) { - log.warn("SFN: Unable to build -stfn- attribute from {}. {}", - stfnString, - e.getMessage()); - } - return SFN.makeInSimpleForm(machine, port, stfn); - } - - /** - * Returns an SFN from the received string that is supposed to not contain the - * port and to be in query form - * - * @param surlString - * @param slash - * @param question - * @return - * @throws ParsingSFNAttributesException - * @throws InvalidSFNAttributesException - */ - private static SFN makeFromQueryFormNoPort(String surlString, int question, - int slash) throws ParsingSFNAttributesException, - InvalidSFNAttributesException { - - String machine = surlString.substring(0, slash); - Machine machineType = null; - try { - machineType = Machine.make(machine); - } catch (InvalidMachineAttributeException e) { - - log.warn("SFN: Unable to build -machine- attribute from {}. {}", - machine, e.getMessage()); - } - // EndPoint - String endpoint = surlString.substring(slash, question); - EndPoint endpointType = null; - try { - endpointType = EndPoint.make(endpoint); - } catch (InvalidEndPointAttributeException e) { - - log.warn("SFN: Unable to build -endpoint- attribute from {}. {}", - endpoint, e.getMessage()); - } - // StFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - if (question + 5 >= surlString.length()) { - throw new ParsingSFNAttributesException( - surlString, - "String interpreted as omitting the optional port specification, and as referring to query form; but nothing left after the question mark!"); // nothing - // left - // after - // question!!! - } - String stfnString = surlString.substring(question + 5, surlString.length()); - StFN stfn = null; - try { - stfn = StFN.make(stfnString); - } catch (InvalidStFNAttributeException e) { - log.warn("SFN: Unable to build -stfn- attribute from {}. {}", - stfnString, - e.getMessage()); - } - return SFN.makeInQueryForm(machineType, endpointType, stfn); - } - - /** - * - * Returns an SFN from the received string that is supposed to not contain the - * port and to be in simple form - * - * @param surlString - * @param slash - * @return - * @throws ParsingSFNAttributesException - * @throws InvalidSFNAttributesException - */ - private static SFN makeFromSimpleFormNoPort(String surlString, int slash) - throws ParsingSFNAttributesException, InvalidSFNAttributesException { - - String machine = surlString.substring(0, slash); - Machine machineType = null; - try { - machineType = Machine.make(machine); - } catch (InvalidMachineAttributeException e) { - log.warn("SFN: Unable to build -machine- attribute from {}. {}", - machine, e.getMessage()); - } - // StFN checks only for a starting / while the rest can be empty! So it - // is sufficient to choose whatever String starts at the /... even just - // the slash itself if that is what is left!!! Should the StFN - // definition be changed??? - String stfnString = surlString.substring(slash, surlString.length()); - StFN stfn = null; - try { - stfn = StFN.make(stfnString); - } catch (InvalidStFNAttributeException e) { - log.warn("SFN: Unable to build -stfn- attribute from {}. {}", - stfnString, - e.getMessage()); - } - return SFN.makeInSimpleForm(machineType, stfn); - - } - - /** - * Method that returns a Collection of all parent SFNs. The following example - * clarifies what is meant by parent SFNs. - * - * Original SFN: storage.egrid.it:8444/EGRID/original/data/nyse/file.txt - * - * Parent SFNs: storage.egrid.it:8444/EGRID/original/data/nyse - * storage.egrid.it:8444/EGRID/original/data - * storage.egrid.it:8444/EGRID/original storage.egrid.it:8444/EGRID - * - * An empty collection is returned if any error occurs during creation of - * parent SFNs. Likewise if This is an EmptySFN. - */ - public Collection getParents() { - - if (empty) { - return new ArrayList(); - } - try { - Collection aux = new ArrayList(); - Collection auxStFN = pn.getParents(); - for (Iterator i = auxStFN.iterator(); i.hasNext();) { - if (ep.isEmpty()) { - aux.add(SFN.makeInSimpleForm(m, p, (StFN) i.next())); - } else { - aux.add(SFN.makeInQueryForm(m, p, ep, (StFN) i.next())); - } - } - return aux; - } catch (InvalidSFNAttributesException e) { - return new ArrayList(); - } - } - - /** - * Method that returns the parent SFN. The following example clarifies what is - * meant by parent SFN. - * - * Original SFN: storage.egrid.it:8444/EGRID/original/data/nyse/file.txt - * - * Parent SFN: storage.egrid.it:8444/EGRID/original/data/nyse - * - * An empty SFN is returned if any error occurs during creation. Likewise if - * This is an EmptySFN. - */ - public SFN getParent() { - - if (empty) { - return makeEmpty(); - } - try { - if (ep.isEmpty()) { - return SFN.makeInSimpleForm(m, p, pn.getParent()); - } else { - return SFN.makeInQueryForm(m, p, ep, pn.getParent()); - } - } catch (InvalidSFNAttributesException e) { - return makeEmpty(); - } - } - - /** - * Method that returns a boolean true if this object is empty. - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns the Machine specified in this SFN. If this is an empty - * SFN, then an empty Machine is returned. - */ - public Machine machine() { - - if (empty) { - return Machine.makeEmpty(); - } - return m; - } - - /** - * Method that returns the Port specified in this SFN. If this is an empty - * SFN, then an empty Port is returned. - */ - public Port port() { - - if (empty) { - return Port.makeEmpty(); - } - return p; - } - - /** - * Method that returns th EndPoint specified in This SFN. If This is an empty - * SFN, then an Empty EndPoint is returned; likewise if none was specified at - * creation time. - */ - public EndPoint endPoint() { - - if (empty) { - return EndPoint.makeEmpty(); - } - return ep; - } - - /** - * Method that returns the StFN specified in this SFN. If this is an empty - * SFN, then an empty StFN is returned. - */ - public StFN stfn() { - - if (empty) { - return StFN.makeEmpty(); - } - return pn; - } - - @Override - public String toString() { - - if (empty) { - return "Empty SFN"; - } - if (ep.isEmpty()) { - if (p.isEmpty()) { - return m.toString() + pn; - } else { - return m + ":" + p + pn; - } - } else { - if (p.isEmpty()) { - return m.toString() + ep.toString() + "?SFN=" + pn; - } else { - return m + ":" + p + ep + "?SFN=" + pn; - } - } - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof SFN)) { - return false; - } - SFN sfno = (SFN) o; - if (empty && sfno.empty) { - return true; - } - return !empty && !sfno.empty && m.equals(sfno.m) && p.equals(sfno.p) - && ep.equals(sfno.ep) && pn.equals(sfno.pn); - } - - @Override - public int hashCode() { - - if (empty) { - return 0; - } - int hash = 17; - hash = 37 * hash + m.hashCode(); - hash = 37 * hash + p.hashCode(); - hash = 37 * hash + ep.hashCode(); - hash = 37 * hash + pn.hashCode(); - return hash; - } - + private static final Logger log = LoggerFactory.getLogger(SFN.class); + + private Machine m = null; + private Port p = null; + private EndPoint ep = null; + private StFN pn = null; + private boolean empty = true; + + private SFN(Machine m, Port p, EndPoint ep, StFN pn, boolean empty) { + + this.m = m; + this.p = p; + this.ep = ep; + this.pn = pn; + this.empty = empty; + } + + /** Static method that returns an empty SFN. */ + public static SFN makeEmpty() { + + return new SFN( + Machine.makeEmpty(), Port.makeEmpty(), EndPoint.makeEmpty(), StFN.makeEmpty(), true); + } + + /** + * Static method that requires a Machine m, the Port p on that Machine, and the StFN stfn. An + * InvalidSFNAttributesException is thrown if any is null or empty. + */ + public static SFN makeInSimpleForm(Machine m, Port p, StFN stfn) + throws InvalidSFNAttributesException { + + if ((m == null) + || (p == null) + || (stfn == null) + || m.isEmpty() + || p.isEmpty() + || stfn.isEmpty()) { + throw new InvalidSFNAttributesException(m, p, stfn); + } + return new SFN(m, p, EndPoint.makeEmpty(), stfn, false); + } + + /** + * Static method that requires a Machine m, the Port p on that Machine, and the StFN stfn. An + * InvalidSFNAttributesException is thrown if any is null or empty. + */ + public static SFN makeInQueryForm(Machine m, Port p, EndPoint ep, StFN stfn) + throws InvalidSFNAttributesException { + + if ((m == null) + || (p == null) + || (ep == null) + || (stfn == null) + || m.isEmpty() + || p.isEmpty() + || (ep.isEmpty()) + || stfn.isEmpty()) { + throw new InvalidSFNAttributesException(m, p, ep, stfn); + } + return new SFN(m, p, ep, stfn, false); + } + + /** + * Static method that requires a Machine m, and the StFN stfn. An InvalidSFNAttributesException is + * thrown if any is null or empty. + */ + public static SFN makeInSimpleForm(Machine m, StFN stfn) throws InvalidSFNAttributesException { + + if ((m == null) || (stfn == null) || m.isEmpty() || stfn.isEmpty()) { + throw new InvalidSFNAttributesException(m, null, stfn); + } + return new SFN(m, Port.makeEmpty(), EndPoint.makeEmpty(), stfn, false); + } + + /** + * Static method that requires a Machine m, the EndPoint ep, and the StFN stfn. An + * InvalidSFNAttributesException is thrown if any is null or empty. + */ + public static SFN makeInQueryForm(Machine m, EndPoint ep, StFN stfn) + throws InvalidSFNAttributesException { + + if ((m == null) + || (stfn == null) + || (ep == null) + || m.isEmpty() + || stfn.isEmpty() + || (ep.isEmpty())) { + throw new InvalidSFNAttributesException(m, null, stfn); + } + return new SFN(m, Port.makeEmpty(), ep, stfn, false); + } + + /** + * Static method that returns an SFN from a String representation. If the supplied String is null + * or malformed, an InvalidSFNAttributesException is thrown. + * + * @param surlString a surl string without the protocol schema part + * @return + * @throws ParsingSFNAttributesException + * @throws InvalidSFNAttributesException + */ + public static SFN makeFromString(String surlString) + throws ParsingSFNAttributesException, InvalidSFNAttributesException { + + if (surlString == null) { + throw new ParsingSFNAttributesException(surlString, "Supplied SFN String was null!"); + } + int colon = surlString.indexOf(":"); // first occurence of : + int slash = surlString.indexOf("/"); // first occurence of / + /* First occurence of ?SFN= */ + int question = surlString.toUpperCase().indexOf("?SFN="); + // TODO MICHELE USER_SURL refactored + if (colon > 0) { + if (question < 0) { + /* + * Supplied string does not contain a colon, and does not contain + * question mark! Treat it as optional port specification, _in_ simple + * form! + */ + if ((slash == -1) || (slash == 0)) { + /* Slash not found or right at the beginning! */ + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as omitting the optional port specification, and as referring to query form;" + + " but the first slash was either not found or right at the beginning!"); + } + return makeFromSimpleFormNoPort(surlString, slash); + } else { + /* + * Supplied string does not contain a colon! Treat it as optional port + * specification, _in_ query form! + */ + if ((slash == -1) || (slash == 0) || (slash > question)) { + /* + * Slash not found or right at the beginning! Or, slash follows + * question! + */ + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as omitting the optional port specification," + + " and as referring to query form; but the first slash was either not found, " + + "or right at the beginning, or only followed the question mark!"); + } + return makeFromQueryFormNoPort(surlString, question, slash); + } + } else { + if (question < 0) { + /* + * Supplied string contains a colon! Treat it as if port _is_ specified, + * and _not_ in query form! + */ + + if ((colon == 0) || (colon > slash)) { + /* + * Solon or slash not found or right at the beginning! or, colon + * follows slash! + */ + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as specifying port, and as not referring to query form; " + + "but either the colon is missing, or it follows the first slash!"); + } + return makeFromSimpleForm(surlString, colon, slash); + } else { + /* + * Supplied string contains a port and it also is in query form! + */ + if ((colon == 0) || (colon > slash) || (slash > question)) { + /* + * Colon or slash not found or right at the beginning! Or, colon + * follows slash! Or slash follows question! + */ + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as having the optional port specification, " + + "and as referring to query form; but either colon is missing, " + + "colon follows first slash, or first slash follows question mark!"); + } + return makeFromQueryForm(surlString, colon, slash, question); + } + } + } + + /** + * Returns an SFN from the received string that is supposed to contain the port and to be in + * simple form + * + * @param surlString + * @param colon + * @param slash + * @param question + * @return + * @throws ParsingSFNAttributesException + * @throws InvalidSFNAttributesException + */ + private static SFN makeFromQueryForm(String surlString, int colon, int slash, int question) + throws ParsingSFNAttributesException, InvalidSFNAttributesException { + + String machineString = surlString.substring(0, colon); + Machine machine = null; + try { + machine = Machine.make(machineString); + } catch (InvalidMachineAttributeException e) { + log.warn( + "SFN: Unable to build -machine- attribute from {}. {}", machineString, e.getMessage()); + } + if ((colon + 1) == slash) { + // slash found right after colon! There is no port! + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as specifying the optional port, and as referring to query form; but the port number is missing since the first slash was found right after the colon!"); + } + String portString = surlString.substring(colon + 1, slash); + Port port = null; + try { + port = Port.make(Integer.parseInt(portString)); + } catch (Throwable e) { + log.warn("SFN: Unable to build -port- attribute from {}. {}", portString, e.getMessage()); + } + // EndPoint + String endpointString = surlString.substring(slash, question); + EndPoint endpoint = null; + try { + endpoint = EndPoint.make(endpointString); + } catch (InvalidEndPointAttributeException e) { + log.warn( + "SFN: Unable to build -endpoint- attribute from {}. {}", endpointString, e.getMessage()); + } + // StFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + if (question + 5 >= surlString.length()) { + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as omitting the optional port specification, and as referring to query form; but theere is nothing left after the question mark!"); // nothing + // left + // after + // question!!! + } + String stfnString = surlString.substring(question + 5, surlString.length()); + StFN stfn = null; + try { + stfn = StFN.make(stfnString); + } catch (InvalidStFNAttributeException e) { + log.warn("SFN: Unable to build -stfn- attribute from {}. {}", stfnString, e.getMessage()); + } + return SFN.makeInQueryForm(machine, port, endpoint, stfn); + } + + /** + * Returns an SFN from the received string that is supposed to contain the port and to be in + * simple form + * + * @param surlString + * @param colon + * @param slash + * @return + * @throws ParsingSFNAttributesException + * @throws InvalidSFNAttributesException + */ + private static SFN makeFromSimpleForm(String surlString, int colon, int slash) + throws ParsingSFNAttributesException, InvalidSFNAttributesException { + + String machineString = surlString.substring(0, colon); + Machine machine = null; + try { + machine = Machine.make(machineString); + } catch (InvalidMachineAttributeException e) { + + log.warn( + "SFN: Unable to build -machine- attribute from {}. {}", machineString, e.getMessage()); + } + if ((colon + 1) == slash) { + /* Slash found right after colon! There is no port! */ + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as specifying port, and as not referring to query form;" + + " but the actual port number is missing since the first slash is " + + "found right after the colon"); + } + String portString = surlString.substring(colon + 1, slash); + Port port = null; + try { + port = Port.make(Integer.parseInt(portString)); + } catch (Throwable e) { + log.warn("SFN: Unable to build -port- attribute from {}. {}", portString, e.getMessage()); + } + // StFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + String stfnString = surlString.substring(slash, surlString.length()); + StFN stfn = null; + try { + stfn = StFN.make(stfnString); + } catch (InvalidStFNAttributeException e) { + log.warn("SFN: Unable to build -stfn- attribute from {}. {}", stfnString, e.getMessage()); + } + return SFN.makeInSimpleForm(machine, port, stfn); + } + + /** + * Returns an SFN from the received string that is supposed to not contain the port and to be in + * query form + * + * @param surlString + * @param slash + * @param question + * @return + * @throws ParsingSFNAttributesException + * @throws InvalidSFNAttributesException + */ + private static SFN makeFromQueryFormNoPort(String surlString, int question, int slash) + throws ParsingSFNAttributesException, InvalidSFNAttributesException { + + String machine = surlString.substring(0, slash); + Machine machineType = null; + try { + machineType = Machine.make(machine); + } catch (InvalidMachineAttributeException e) { + + log.warn("SFN: Unable to build -machine- attribute from {}. {}", machine, e.getMessage()); + } + // EndPoint + String endpoint = surlString.substring(slash, question); + EndPoint endpointType = null; + try { + endpointType = EndPoint.make(endpoint); + } catch (InvalidEndPointAttributeException e) { + + log.warn("SFN: Unable to build -endpoint- attribute from {}. {}", endpoint, e.getMessage()); + } + // StFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + if (question + 5 >= surlString.length()) { + throw new ParsingSFNAttributesException( + surlString, + "String interpreted as omitting the optional port specification, and as referring to query form; but nothing left after the question mark!"); // nothing + // left + // after + // question!!! + } + String stfnString = surlString.substring(question + 5, surlString.length()); + StFN stfn = null; + try { + stfn = StFN.make(stfnString); + } catch (InvalidStFNAttributeException e) { + log.warn("SFN: Unable to build -stfn- attribute from {}. {}", stfnString, e.getMessage()); + } + return SFN.makeInQueryForm(machineType, endpointType, stfn); + } + + /** + * Returns an SFN from the received string that is supposed to not contain the port and to be in + * simple form + * + * @param surlString + * @param slash + * @return + * @throws ParsingSFNAttributesException + * @throws InvalidSFNAttributesException + */ + private static SFN makeFromSimpleFormNoPort(String surlString, int slash) + throws ParsingSFNAttributesException, InvalidSFNAttributesException { + + String machine = surlString.substring(0, slash); + Machine machineType = null; + try { + machineType = Machine.make(machine); + } catch (InvalidMachineAttributeException e) { + log.warn("SFN: Unable to build -machine- attribute from {}. {}", machine, e.getMessage()); + } + // StFN checks only for a starting / while the rest can be empty! So it + // is sufficient to choose whatever String starts at the /... even just + // the slash itself if that is what is left!!! Should the StFN + // definition be changed??? + String stfnString = surlString.substring(slash, surlString.length()); + StFN stfn = null; + try { + stfn = StFN.make(stfnString); + } catch (InvalidStFNAttributeException e) { + log.warn("SFN: Unable to build -stfn- attribute from {}. {}", stfnString, e.getMessage()); + } + return SFN.makeInSimpleForm(machineType, stfn); + } + + /** + * Method that returns a Collection of all parent SFNs. The following example clarifies what is + * meant by parent SFNs. + * + *

Original SFN: storage.egrid.it:8444/EGRID/original/data/nyse/file.txt + * + *

Parent SFNs: storage.egrid.it:8444/EGRID/original/data/nyse + * storage.egrid.it:8444/EGRID/original/data storage.egrid.it:8444/EGRID/original + * storage.egrid.it:8444/EGRID + * + *

An empty collection is returned if any error occurs during creation of parent SFNs. Likewise + * if This is an EmptySFN. + */ + public Collection getParents() { + + if (empty) { + return new ArrayList(); + } + try { + Collection aux = new ArrayList(); + Collection auxStFN = pn.getParents(); + for (Iterator i = auxStFN.iterator(); i.hasNext(); ) { + if (ep.isEmpty()) { + aux.add(SFN.makeInSimpleForm(m, p, (StFN) i.next())); + } else { + aux.add(SFN.makeInQueryForm(m, p, ep, (StFN) i.next())); + } + } + return aux; + } catch (InvalidSFNAttributesException e) { + return new ArrayList(); + } + } + + /** + * Method that returns the parent SFN. The following example clarifies what is meant by parent + * SFN. + * + *

Original SFN: storage.egrid.it:8444/EGRID/original/data/nyse/file.txt + * + *

Parent SFN: storage.egrid.it:8444/EGRID/original/data/nyse + * + *

An empty SFN is returned if any error occurs during creation. Likewise if This is an + * EmptySFN. + */ + public SFN getParent() { + + if (empty) { + return makeEmpty(); + } + try { + if (ep.isEmpty()) { + return SFN.makeInSimpleForm(m, p, pn.getParent()); + } else { + return SFN.makeInQueryForm(m, p, ep, pn.getParent()); + } + } catch (InvalidSFNAttributesException e) { + return makeEmpty(); + } + } + + /** Method that returns a boolean true if this object is empty. */ + public boolean isEmpty() { + + return empty; + } + + /** + * Method that returns the Machine specified in this SFN. If this is an empty SFN, then an empty + * Machine is returned. + */ + public Machine machine() { + + if (empty) { + return Machine.makeEmpty(); + } + return m; + } + + /** + * Method that returns the Port specified in this SFN. If this is an empty SFN, then an empty Port + * is returned. + */ + public Port port() { + + if (empty) { + return Port.makeEmpty(); + } + return p; + } + + /** + * Method that returns th EndPoint specified in This SFN. If This is an empty SFN, then an Empty + * EndPoint is returned; likewise if none was specified at creation time. + */ + public EndPoint endPoint() { + + if (empty) { + return EndPoint.makeEmpty(); + } + return ep; + } + + /** + * Method that returns the StFN specified in this SFN. If this is an empty SFN, then an empty StFN + * is returned. + */ + public StFN stfn() { + + if (empty) { + return StFN.makeEmpty(); + } + return pn; + } + + @Override + public String toString() { + + if (empty) { + return "Empty SFN"; + } + if (ep.isEmpty()) { + if (p.isEmpty()) { + return m.toString() + pn; + } else { + return m + ":" + p + pn; + } + } else { + if (p.isEmpty()) { + return m.toString() + ep.toString() + "?SFN=" + pn; + } else { + return m + ":" + p + ep + "?SFN=" + pn; + } + } + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof SFN)) { + return false; + } + SFN sfno = (SFN) o; + if (empty && sfno.empty) { + return true; + } + return !empty + && !sfno.empty + && m.equals(sfno.m) + && p.equals(sfno.p) + && ep.equals(sfno.ep) + && pn.equals(sfno.pn); + } + + @Override + public int hashCode() { + + if (empty) { + return 0; + } + int hash = 17; + hash = 37 * hash + m.hashCode(); + hash = 37 * hash + p.hashCode(); + hash = 37 * hash + ep.hashCode(); + hash = 37 * hash + pn.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/common/types/SiteProtocol.java b/src/main/java/it/grid/storm/common/types/SiteProtocol.java index 795078fd..348bb710 100644 --- a/src/main/java/it/grid/storm/common/types/SiteProtocol.java +++ b/src/main/java/it/grid/storm/common/types/SiteProtocol.java @@ -1,15 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; -import java.util.Map; import java.util.HashMap; +import java.util.Map; /** * This class represents the possible site protocols of StoRM. - * + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 26th, 2005 @@ -17,56 +16,55 @@ */ public class SiteProtocol { - private String protocol = null; - private static Map m = new HashMap(); - - public static final SiteProtocol SRM = new SiteProtocol("srm") { + private String protocol = null; + private static Map m = new HashMap(); - public int hashCode() { + public static final SiteProtocol SRM = + new SiteProtocol("srm") { - return 1; - } - }; + public int hashCode() { - public static final SiteProtocol EMPTY = new SiteProtocol("empty") { + return 1; + } + }; - public int hashCode() { + public static final SiteProtocol EMPTY = + new SiteProtocol("empty") { - return 0; - } - }; + public int hashCode() { - private SiteProtocol(String protocol) { + return 0; + } + }; - this.protocol = protocol; - m.put(protocol, this); - } + private SiteProtocol(String protocol) { - /** - * Facility method to obtain a SiteProtocol object from its String - * representation. An IllegalArgumentExceptin is thrown if the supplied String - * does not have a SiteProtocol counterpart. The supplied String may contain - * white spaces and be in a mixture of upper and lower case characters. - */ - public static SiteProtocol fromString(String value) - throws IllegalArgumentException { + this.protocol = protocol; + m.put(protocol, this); + } - value = value.toLowerCase().replaceAll(" ", ""); - SiteProtocol aux = (SiteProtocol) m.get(value); - if (aux == null) - throw new IllegalArgumentException(); - return aux; - } + /** + * Facility method to obtain a SiteProtocol object from its String representation. An + * IllegalArgumentExceptin is thrown if the supplied String does not have a SiteProtocol + * counterpart. The supplied String may contain white spaces and be in a mixture of upper and + * lower case characters. + */ + public static SiteProtocol fromString(String value) throws IllegalArgumentException { - public String toString() { + value = value.toLowerCase().replaceAll(" ", ""); + SiteProtocol aux = (SiteProtocol) m.get(value); + if (aux == null) throw new IllegalArgumentException(); + return aux; + } - return protocol; - } + public String toString() { - // Maybe should be removed! - public String getValue() { + return protocol; + } - return protocol; - } + // Maybe should be removed! + public String getValue() { + return protocol; + } } diff --git a/src/main/java/it/grid/storm/common/types/SizeUnit.java b/src/main/java/it/grid/storm/common/types/SizeUnit.java index f6311b19..50599de1 100644 --- a/src/main/java/it/grid/storm/common/types/SizeUnit.java +++ b/src/main/java/it/grid/storm/common/types/SizeUnit.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents a unit of measure for FileSize; it contains a - * conversion factor for changing among units. - * + * This class represents a unit of measure for FileSize; it contains a conversion factor for + * changing among units. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 23rd, 2005 @@ -17,173 +16,154 @@ public abstract class SizeUnit implements Serializable { - /** - * - */ - private static final long serialVersionUID = -3885767398054889628L; + /** */ + private static final long serialVersionUID = -3885767398054889628L; - public static SizeUnit createSizeUnit(String unit) { + public static SizeUnit createSizeUnit(String unit) { - String input = unit.toLowerCase(); - if (input.toLowerCase().equals("byte")) - return SizeUnit.BYTES; - if (input.toLowerCase().equals("kb")) - return SizeUnit.KILOBYTES; - if (input.toLowerCase().equals("mb")) - return SizeUnit.MEGABYTES; - if (input.toLowerCase().equals("gb")) - return SizeUnit.GIGABYTES; - if (input.toLowerCase().equals("tb")) - return SizeUnit.TERABYTES; - return SizeUnit.EMPTY; - } + String input = unit.toLowerCase(); + if (input.toLowerCase().equals("byte")) return SizeUnit.BYTES; + if (input.toLowerCase().equals("kb")) return SizeUnit.KILOBYTES; + if (input.toLowerCase().equals("mb")) return SizeUnit.MEGABYTES; + if (input.toLowerCase().equals("gb")) return SizeUnit.GIGABYTES; + if (input.toLowerCase().equals("tb")) return SizeUnit.TERABYTES; + return SizeUnit.EMPTY; + } - public static final SizeUnit BYTES = new SizeUnit() { + public static final SizeUnit BYTES = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = 4181134075585414373L; + /** */ + private static final long serialVersionUID = 4181134075585414373L; - public double conversionFactor() { + public double conversionFactor() { - return 1.0; - } + return 1.0; + } - public String toString() { + public String toString() { - return "Bytes"; - } + return "Bytes"; + } - public int hashCode() { + public int hashCode() { - return 1; - } - }; + return 1; + } + }; - public static final SizeUnit KILOBYTES = new SizeUnit() { + public static final SizeUnit KILOBYTES = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = 9095939098314802303L; + /** */ + private static final long serialVersionUID = 9095939098314802303L; - public double conversionFactor() { + public double conversionFactor() { - return 1000.0; - } + return 1000.0; + } - public String toString() { + public String toString() { - return "KB"; - } + return "KB"; + } - public int hashCode() { + public int hashCode() { - return 2; - } - }; + return 2; + } + }; - public static final SizeUnit MEGABYTES = new SizeUnit() { + public static final SizeUnit MEGABYTES = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = -4371556318373779599L; + /** */ + private static final long serialVersionUID = -4371556318373779599L; - public double conversionFactor() { + public double conversionFactor() { - return 1000000.0; - } + return 1000000.0; + } - public String toString() { + public String toString() { - return "MB"; - } + return "MB"; + } - public int hashCode() { + public int hashCode() { - return 3; - } - }; + return 3; + } + }; - public static final SizeUnit GIGABYTES = new SizeUnit() { + public static final SizeUnit GIGABYTES = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = -7917622928734775939L; + /** */ + private static final long serialVersionUID = -7917622928734775939L; - public double conversionFactor() { + public double conversionFactor() { - return SizeUnit.MEGABYTES.conversionFactor() * 1000; - } + return SizeUnit.MEGABYTES.conversionFactor() * 1000; + } - public String toString() { + public String toString() { - return "GB"; - } + return "GB"; + } - public int hashCode() { + public int hashCode() { - return 4; - } - }; + return 4; + } + }; - public static final SizeUnit TERABYTES = new SizeUnit() { + public static final SizeUnit TERABYTES = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = -8093974088166886328L; + /** */ + private static final long serialVersionUID = -8093974088166886328L; - public double conversionFactor() { + public double conversionFactor() { - return SizeUnit.GIGABYTES.conversionFactor() * 1000; - } + return SizeUnit.GIGABYTES.conversionFactor() * 1000; + } - public String toString() { + public String toString() { - return "TB"; - } + return "TB"; + } - public int hashCode() { + public int hashCode() { - return 5; - } - }; + return 5; + } + }; - public static final SizeUnit EMPTY = new SizeUnit() { + public static final SizeUnit EMPTY = + new SizeUnit() { - /** - * - */ - private static final long serialVersionUID = 5609696668282214567L; + /** */ + private static final long serialVersionUID = 5609696668282214567L; - public double conversionFactor() { + public double conversionFactor() { - return 0.0; - } + return 0.0; + } - public String toString() { + public String toString() { - return "EMPTY"; - } + return "EMPTY"; + } - public int hashCode() { + public int hashCode() { - return 0; - } - }; + return 0; + } + }; - private SizeUnit() { + private SizeUnit() {} - } - - /** - * This method returns a conversion factor: the amount of bytes present in 1 - * unit of this. - */ - public abstract double conversionFactor(); - + /** This method returns a conversion factor: the amount of bytes present in 1 unit of this. */ + public abstract double conversionFactor(); } diff --git a/src/main/java/it/grid/storm/common/types/StFN.java b/src/main/java/it/grid/storm/common/types/StFN.java index 6b3bd0cb..74951573 100644 --- a/src/main/java/it/grid/storm/common/types/StFN.java +++ b/src/main/java/it/grid/storm/common/types/StFN.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; import it.grid.storm.namespace.naming.NamingConst; - import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; @@ -13,230 +11,216 @@ /** * This class represents the Storage File Name of a SURL. - * + * * @author CNAF Bologna - EGRID ICTP * @version 2.0 * @date March 2005 */ public class StFN { - private ArrayList name = new ArrayList(); - private boolean directory = false; - - private boolean empty = true; - public static final String PNAME_PATH = "path"; - - private static final String ROOT_STFN = "/"; - - private StFN(ArrayList name, boolean empty, boolean dir) { - - this.name.clear(); - this.name.addAll(name); - this.empty = empty; - this.directory = dir; - } - - /** - * Public static method that returns an empty StFN. - */ - public static StFN makeEmpty() { - - return new StFN(new ArrayList(), true, false); - } - - /** - * Public static method that requires a String representing the pathname of - * the SFN: it cannot be null or empty otherwise an - * InvalidStFNAttributeException is thrown. Likewise if it contains two - * consecutive dots (..). or does not begin with a slash (/). - */ - public static StFN make(String name) throws InvalidStFNAttributeException { - - if (invalid(name)) { - throw new InvalidStFNAttributeException(name); - } - return new StFN(normalize(name), false, checkDirectory(name)); - } - - /** - * Public static method that returns true if the supplied String ends with the - * Separator, thereby indicating a directory. - */ - private static boolean checkDirectory(String path) { - - if (path != null) { - return path.endsWith(NamingConst.SEPARATOR); - } else { - return false; - } - } - - /** - * Private method that returns true if the supplied string is null, or is - * empty, or contains two consecutive dots (..), or does not begin with a - * slash (/). - */ - static private boolean invalid(String name) { - - boolean wrong = (name == null) || (name.equals("")) - || (name.charAt(0) != '/'); - return wrong; - } - - /** - * Private method that accepts a valid String as defined by the private valid - * method, and returns an ordered ArrayList of all slash-separated elemets, - * trimmed of leading and trailing white spaces. Multiple consecutive slashes - * are treated as a single slash. Example1: /a/ b /c/d Result: a b c d - * Example2: /////a///b//////////// c/d///////// Result: a b c d Example3: / - * Result: empty ArrayList! - */ - static private ArrayList normalize(String s) { - - // split around slash! - String[] pieces = s.split("/"); - // remove all empty Strings which may have been produced because of - // consecutive slashes! - ArrayList auxList = new ArrayList(); - int pos = 0; - String aux = null; - for (String piece : pieces) { - aux = piece; // get the element - aux = aux.trim(); // remove all leading and trailing white spaces - if (!aux.equals("")) { - auxList.add(pos++, aux); - } - } - return auxList; - } - - /** - * Method that returns a Collection of all parent StFNs, stopping at root - * parent. The following example clarifies what is meant by parent StFNs, and - * by stopping at root parent. Original StFN: - * /EGRID/original/data/nyse/file.txt Parent StFNs: /EGRID/original/data/nyse - * /EGRID/original/data /EGRID/original /EGRID Second example: /file.txt - * Parent StFNs: Empty collection! Third example: /EGRID/ Parent StFNs: Empty - * collection! An empty collection is returned if any error occurs during - * creation of parent StFNs. Likewise if This is an EmptyStFN. - */ - public Collection getParents() { - - Collection aux = new ArrayList(); - if (empty) { - // empty StFN! - return aux; - } - // number of elements in this StFN - int size = name.size(); - if ((size == 0) || (size == 1)) { - // StFN directly on root, or with only _one_ element! - return aux; - } - for (int i = 1; i < size; i++) { - // recall sublist goes from 0 inclusive, to i _EXCLUDED_!!! - aux.add(new StFN(new ArrayList(name.subList(0, i)), false, true)); - } - return aux; - } - - /** - * Method that returns the parent StFN. The following example clarifies what - * is meant by parent StFN. Beware of the root of the StFN: the parent is - * calculated from the root! Original StFN: /EGRID/original/data/nyse/file.txt - * Parent StFN: /EGRID/original/data/nyse Second example, Original StFN: - * /file.txt Parent StFN: Empty StFN! Third example: /EGRID/ Parent StFN: - * Empty StFN! An empty StFN is returned if any error occurs during creation - * of parent. Likewise if This is an EmptyStFN. - */ - public StFN getParent() { - - if (empty) { - return makeEmpty(); // empty StFN! - } - int size = name.size(); // number of elements in this StFN - if ((size == 0) || (size == 1)) { - return makeEmpty(); // either directly on root, or only one element! - } - return new StFN(new ArrayList(name.subList(0, size - 1)), false, true); - } - - /** - * Method that returns true if this StFN is empty. - */ - public boolean isEmpty() { - - return empty; - } - - public String getValue() { - - return toString(); - } - - @Override - public String toString() { - - if (empty) { - return "Empty StFN"; - } - int size = name.size(); - if (size == 0) { - return ROOT_STFN; - } - StringBuilder sb = new StringBuilder(); - sb.append("/"); - for (Iterator i = name.iterator(); i.hasNext();) { - sb.append(i.next()); - if (i.hasNext()) { - sb.append("/"); - } - } - if (directory) { - sb.append(NamingConst.SEPARATOR); - } - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof StFN)) { - return false; - } - StFN po = (StFN) o; - if (po.empty && empty) { - return true; - } - if ((!empty) && (!po.empty) && (name.size() == 0) && (po.name.size() == 0)) { - return true; - } - return (!empty) && (!po.empty) && (directory == po.directory) && name.equals(po.name); - } - - @Override - public int hashCode() { - - if (empty) { - return 0; - } - int hash = 17; - if (name.size() != 0) { - hash = 31 * hash + name.hashCode(); - } - hash = 31 * hash + (directory ? 1 : 0); - return hash; - } - - /** - * Encode StFN for FE communication. - */ - public void encode(Map param, String name) { - - param.put(name, toString()); - } - + private ArrayList name = new ArrayList(); + private boolean directory = false; + + private boolean empty = true; + public static final String PNAME_PATH = "path"; + + private static final String ROOT_STFN = "/"; + + private StFN(ArrayList name, boolean empty, boolean dir) { + + this.name.clear(); + this.name.addAll(name); + this.empty = empty; + this.directory = dir; + } + + /** Public static method that returns an empty StFN. */ + public static StFN makeEmpty() { + + return new StFN(new ArrayList(), true, false); + } + + /** + * Public static method that requires a String representing the pathname of the SFN: it cannot be + * null or empty otherwise an InvalidStFNAttributeException is thrown. Likewise if it contains two + * consecutive dots (..). or does not begin with a slash (/). + */ + public static StFN make(String name) throws InvalidStFNAttributeException { + + if (invalid(name)) { + throw new InvalidStFNAttributeException(name); + } + return new StFN(normalize(name), false, checkDirectory(name)); + } + + /** + * Public static method that returns true if the supplied String ends with the Separator, thereby + * indicating a directory. + */ + private static boolean checkDirectory(String path) { + + if (path != null) { + return path.endsWith(NamingConst.SEPARATOR); + } else { + return false; + } + } + + /** + * Private method that returns true if the supplied string is null, or is empty, or contains two + * consecutive dots (..), or does not begin with a slash (/). + */ + private static boolean invalid(String name) { + + boolean wrong = (name == null) || (name.equals("")) || (name.charAt(0) != '/'); + return wrong; + } + + /** + * Private method that accepts a valid String as defined by the private valid method, and returns + * an ordered ArrayList of all slash-separated elemets, trimmed of leading and trailing white + * spaces. Multiple consecutive slashes are treated as a single slash. Example1: /a/ b /c/d + * Result: a b c d Example2: /////a///b//////////// c/d///////// Result: a b c d Example3: / + * Result: empty ArrayList! + */ + private static ArrayList normalize(String s) { + + // split around slash! + String[] pieces = s.split("/"); + // remove all empty Strings which may have been produced because of + // consecutive slashes! + ArrayList auxList = new ArrayList(); + int pos = 0; + String aux = null; + for (String piece : pieces) { + aux = piece; // get the element + aux = aux.trim(); // remove all leading and trailing white spaces + if (!aux.equals("")) { + auxList.add(pos++, aux); + } + } + return auxList; + } + + /** + * Method that returns a Collection of all parent StFNs, stopping at root parent. The following + * example clarifies what is meant by parent StFNs, and by stopping at root parent. Original StFN: + * /EGRID/original/data/nyse/file.txt Parent StFNs: /EGRID/original/data/nyse /EGRID/original/data + * /EGRID/original /EGRID Second example: /file.txt Parent StFNs: Empty collection! Third example: + * /EGRID/ Parent StFNs: Empty collection! An empty collection is returned if any error occurs + * during creation of parent StFNs. Likewise if This is an EmptyStFN. + */ + public Collection getParents() { + + Collection aux = new ArrayList(); + if (empty) { + // empty StFN! + return aux; + } + // number of elements in this StFN + int size = name.size(); + if ((size == 0) || (size == 1)) { + // StFN directly on root, or with only _one_ element! + return aux; + } + for (int i = 1; i < size; i++) { + // recall sublist goes from 0 inclusive, to i _EXCLUDED_!!! + aux.add(new StFN(new ArrayList(name.subList(0, i)), false, true)); + } + return aux; + } + + /** + * Method that returns the parent StFN. The following example clarifies what is meant by parent + * StFN. Beware of the root of the StFN: the parent is calculated from the root! Original StFN: + * /EGRID/original/data/nyse/file.txt Parent StFN: /EGRID/original/data/nyse Second example, + * Original StFN: /file.txt Parent StFN: Empty StFN! Third example: /EGRID/ Parent StFN: Empty + * StFN! An empty StFN is returned if any error occurs during creation of parent. Likewise if This + * is an EmptyStFN. + */ + public StFN getParent() { + + if (empty) { + return makeEmpty(); // empty StFN! + } + int size = name.size(); // number of elements in this StFN + if ((size == 0) || (size == 1)) { + return makeEmpty(); // either directly on root, or only one element! + } + return new StFN(new ArrayList(name.subList(0, size - 1)), false, true); + } + + /** Method that returns true if this StFN is empty. */ + public boolean isEmpty() { + + return empty; + } + + public String getValue() { + + return toString(); + } + + @Override + public String toString() { + + if (empty) { + return "Empty StFN"; + } + int size = name.size(); + if (size == 0) { + return ROOT_STFN; + } + StringBuilder sb = new StringBuilder(); + sb.append("/"); + for (Iterator i = name.iterator(); i.hasNext(); ) { + sb.append(i.next()); + if (i.hasNext()) { + sb.append("/"); + } + } + if (directory) { + sb.append(NamingConst.SEPARATOR); + } + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof StFN)) { + return false; + } + StFN po = (StFN) o; + if (po.empty && empty) { + return true; + } + if ((!empty) && (!po.empty) && (name.size() == 0) && (po.name.size() == 0)) { + return true; + } + return (!empty) && (!po.empty) && (directory == po.directory) && name.equals(po.name); + } + + @Override + public int hashCode() { + + if (empty) { + return 0; + } + int hash = 17; + if (name.size() != 0) { + hash = 31 * hash + name.hashCode(); + } + hash = 31 * hash + (directory ? 1 : 0); + return hash; + } + + /** Encode StFN for FE communication. */ + public void encode(Map param, String name) { + + param.put(name, toString()); + } } diff --git a/src/main/java/it/grid/storm/common/types/StFNRoot.java b/src/main/java/it/grid/storm/common/types/StFNRoot.java index e8c6201c..41ee6e10 100644 --- a/src/main/java/it/grid/storm/common/types/StFNRoot.java +++ b/src/main/java/it/grid/storm/common/types/StFNRoot.java @@ -1,52 +1,46 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** - * This class represent a Storage File Name Root. A virtual directory path - * assigned to a single Virtual Oraganization, so each SURL of this VO must - * start with correct StFNRoot. + * This class represent a Storage File Name Root. A virtual directory path assigned to a single + * Virtual Oraganization, so each SURL of this VO must start with correct StFNRoot. */ public class StFNRoot { - private String stfnroot; + private String stfnroot; - public StFNRoot(String stfnroot) throws InvalidStFNRootAttributeException { + public StFNRoot(String stfnroot) throws InvalidStFNRootAttributeException { - if ((stfnroot == null) || (stfnroot.equals("")) - || (stfnroot.charAt(0) != '/')) - throw new InvalidStFNRootAttributeException(stfnroot); - this.stfnroot = stfnroot.replaceAll(" ", ""); - } + if ((stfnroot == null) || (stfnroot.equals("")) || (stfnroot.charAt(0) != '/')) + throw new InvalidStFNRootAttributeException(stfnroot); + this.stfnroot = stfnroot.replaceAll(" ", ""); + } - public String getValue() { + public String getValue() { - return stfnroot; - } + return stfnroot; + } - public String toString() { + public String toString() { - return stfnroot; - } + return stfnroot; + } - public boolean equals(Object o) { + public boolean equals(Object o) { - if (o == this) - return true; - if (!(o instanceof StFNRoot)) - return false; - StFNRoot po = (StFNRoot) o; - return stfnroot.equals(po.stfnroot); - } + if (o == this) return true; + if (!(o instanceof StFNRoot)) return false; + StFNRoot po = (StFNRoot) o; + return stfnroot.equals(po.stfnroot); + } - @Override - public int hashCode() { - - int result = 17; - result = 31 * result + (stfnroot != null ? stfnroot.hashCode() : 0); - return result; - } + @Override + public int hashCode() { + int result = 17; + result = 31 * result + (stfnroot != null ? stfnroot.hashCode() : 0); + return result; + } } diff --git a/src/main/java/it/grid/storm/common/types/TFN.java b/src/main/java/it/grid/storm/common/types/TFN.java index 3621eaf5..ee714e43 100644 --- a/src/main/java/it/grid/storm/common/types/TFN.java +++ b/src/main/java/it/grid/storm/common/types/TFN.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; @@ -9,7 +8,7 @@ /** * This class represents a TFN Transfer File Name. - * + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 25th, 2005 @@ -17,269 +16,239 @@ */ public class TFN { - private static Logger log = LoggerFactory.getLogger(TFN.class); - - private Machine m = null; - private Port p = null; - private PFN pfn = null; - private boolean empty = true; // boolean indicating whether this is an empty - // TFN - - private TFN(Machine m, Port p, PFN pfn, boolean empty) { - - this.m = m; - this.p = p; - this.pfn = pfn; - this.empty = empty; - } - - /** - * Static method that returns an empty TFN. - */ - public static TFN makeEmpty() { - - return new TFN(Machine.makeEmpty(), Port.makeEmpty(), PFN.makeEmpty(), true); - } - - /** - * Static method that returns a TFN, and requires a Machine m, the Port p on - * that Machine, and the PhysicalFileName pfn. An - * InvalidTFNAttributesException is thrown if any is null or empty. - */ - public static TFN make(Machine m, Port p, PFN pfn) - throws InvalidTFNAttributesException { - - if ((m == null) || (p == null) || (pfn == null) || m.isEmpty() - || pfn.isEmpty()) - throw new InvalidTFNAttributesException(m, p, pfn); - return new TFN(m, p, pfn, false); - } - - /** - * Static method that returns a TFN with the specified PFN, and EmptyMachine - * as well as EmptyPort. The intended use of this method is to create TFN to - * be used in TURLs with FILE protocol, where no machine and port are needed. - * An InvalidTFNAttributesException is thrown if pfn is null or empty: notice - * that the exception will also show Machine and Port as null. - */ - public static TFN makeByPFN(PFN pfn) throws InvalidTFNAttributesException { - - if ((pfn == null) || (pfn.isEmpty())) - throw new InvalidTFNAttributesException(null, null, pfn); - return new TFN(Machine.makeEmpty(), Port.makeEmpty(), pfn, false); - } - - /** - * Static method that returns a TFN from a String representation. If the - * supplied String is null or malformed, an InvalidTFNAttributesException is - * thrown. - */ - public static TFN makeFromString(String s) - throws InvalidTFNAttributesException { - - if (s == null) - throw new InvalidTFNAttributesException(null, null, null); - int colon = s.indexOf(":"); // first occurence of : - int slash = s.indexOf("/"); // first occurence of / - - if (colon == -1) { - // missing port specification - if ((slash == -1) || (slash == 0)) - throw new InvalidTFNAttributesException(null, null, null); // no colon - // and no - // slash or - // slash - // right at - // the - // beginning! - // machine - String mString = s.substring(0, slash); - Machine m = null; - try { - m = Machine.make(mString); - } catch (InvalidMachineAttributeException e) { - - log.warn("TFN: Unable to build -machine- attribute from {}. {}", - mString, e.getMessage()); - } - - // Port is empty because it is optional specification - Port p = Port.makeEmpty(); - // PFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - String pfnString = s.substring(slash, s.length()); - PFN pfn = null; - try { - pfn = PFN.make(pfnString); - } catch (InvalidPFNAttributeException e) { - log.warn("TFN: Unable to build -pfn- attribute from {}. {}", - pfnString,e.getMessage()); - } - return TFN.make(m, p, pfn); - } else if ((slash != -1) && (colon > slash)) { - // colon follows existing slash: the colon does NOT stand as port number - // delimiter - // treat it as missing port specification - // machine - if (slash == 0) - throw new InvalidTFNAttributesException(null, null, null); // slash - // right at - // the - // beginning! - String mString = s.substring(0, slash); - Machine m = null; - try { - m = Machine.make(mString); - } catch (InvalidMachineAttributeException e) { - - log.warn("TFN: Unable to build -machine- attribute from {}. {}", - mString, e.getMessage()); - } - // Port is empty because it is optional specification - Port p = Port.makeEmpty(); - // PFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - String pfnString = s.substring(slash, s.length()); - PFN pfn = null; - try { - pfn = PFN.make(pfnString); - } catch (InvalidPFNAttributeException e) { - log.warn("TFN: Unable to build -pfn- attribute from {}. {}", - pfnString,e.getMessage()); - } - return TFN.make(m, p, pfn); - } else if ((slash != -1) && (colon < slash)) { - // both machine and port are present - // machine - if (colon == 0) - throw new InvalidTFNAttributesException(null, null, null); // colon - // right at - // the - // beginning! - String mString = s.substring(0, colon); - Machine m = null; - try { - m = Machine.make(mString); - } catch (InvalidMachineAttributeException e) { - - log.warn("TFN: Unable to build -machine- attribute from {}. {}", - mString, e.getMessage()); - } - // port - if ((colon + 1) == slash) - throw new InvalidTFNAttributesException(m, null, null); // slash found - // right after - // colon! There - // is no port! - String pString = s.substring(colon + 1, slash); - Port p = null; - try { - p = Port.make(Integer.parseInt(pString)); - } catch (InvalidPortAttributeException e) { - log.warn("TFN: Unable to build -port- attribute from {}.{}", - pString, e.getMessage()); - } catch (NumberFormatException e) { - log.warn("TFN: Unable to build -port- attribute from {}.{}", - pString, e.getMessage()); - } - // PFN checks only for a starting / while the rest can be empty! So it is - // sufficient to choose whatever String starts at the /... even just the - // slash itself if that is what is left!!! Should the StFN definition be - // changed??? - String pfnString = s.substring(slash, s.length()); - PFN pfn = null; - try { - pfn = PFN.make(pfnString); - } catch (InvalidPFNAttributeException e) { - - log.warn("TFN: Unable to build -pfn- attribute from {}. {}", - pfnString,e.getMessage()); - } - return TFN.make(m, p, pfn); - } else { - // slash missing! Only colon is present: the TFN does not make sense! - throw new InvalidTFNAttributesException(null, null, null); - } - } - - /** - * Method that returns true if this Object is the empty TFN - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns the Machine specified in this TFN. If this is an empty - * TFN, then an empty Machine is returned. - */ - public Machine machine() { - - if (empty) - return Machine.makeEmpty(); - return m; - } - - /** - * Method that returns the Port specified in this TFN. If this is an empty - * TFN,then an empty Port is returned. - */ - public Port port() { - - if (empty) - return Port.makeEmpty(); - return p; - } - - /** - * Method that returns the PhysicalFileName specified in this TFN. If this is - * an empty TFN, then an empty PFN is returned. - */ - public PFN pfn() { - - if (empty) - return PFN.makeEmpty(); - return pfn; - } - - public String toString() { - - if (empty) - return "Empty TFN"; - if (m.isEmpty() && p.isEmpty()) - return pfn.toString(); - if ((!m.isEmpty()) && p.isEmpty()) - return m.toString() + pfn.toString(); - return m + ":" + p + pfn; - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof TFN)) - return false; - TFN tfno = (TFN) o; - if (empty && tfno.empty) - return true; - return (!empty) && (!tfno.empty) && m.equals(tfno.m) && p.equals(tfno.p) - && pfn.equals(tfno.pfn); - } - - public int hashCode() { - - if (empty) - return 0; - int hash = 17; - hash = 37 * hash + m.hashCode(); - hash = 37 * hash + p.hashCode(); - hash = 37 * hash + pfn.hashCode(); - return hash; - } - + private static Logger log = LoggerFactory.getLogger(TFN.class); + + private Machine m = null; + private Port p = null; + private PFN pfn = null; + private boolean empty = true; // boolean indicating whether this is an empty + // TFN + + private TFN(Machine m, Port p, PFN pfn, boolean empty) { + + this.m = m; + this.p = p; + this.pfn = pfn; + this.empty = empty; + } + + /** Static method that returns an empty TFN. */ + public static TFN makeEmpty() { + + return new TFN(Machine.makeEmpty(), Port.makeEmpty(), PFN.makeEmpty(), true); + } + + /** + * Static method that returns a TFN, and requires a Machine m, the Port p on that Machine, and the + * PhysicalFileName pfn. An InvalidTFNAttributesException is thrown if any is null or empty. + */ + public static TFN make(Machine m, Port p, PFN pfn) throws InvalidTFNAttributesException { + + if ((m == null) || (p == null) || (pfn == null) || m.isEmpty() || pfn.isEmpty()) + throw new InvalidTFNAttributesException(m, p, pfn); + return new TFN(m, p, pfn, false); + } + + /** + * Static method that returns a TFN with the specified PFN, and EmptyMachine as well as EmptyPort. + * The intended use of this method is to create TFN to be used in TURLs with FILE protocol, where + * no machine and port are needed. An InvalidTFNAttributesException is thrown if pfn is null or + * empty: notice that the exception will also show Machine and Port as null. + */ + public static TFN makeByPFN(PFN pfn) throws InvalidTFNAttributesException { + + if ((pfn == null) || (pfn.isEmpty())) throw new InvalidTFNAttributesException(null, null, pfn); + return new TFN(Machine.makeEmpty(), Port.makeEmpty(), pfn, false); + } + + /** + * Static method that returns a TFN from a String representation. If the supplied String is null + * or malformed, an InvalidTFNAttributesException is thrown. + */ + public static TFN makeFromString(String s) throws InvalidTFNAttributesException { + + if (s == null) throw new InvalidTFNAttributesException(null, null, null); + int colon = s.indexOf(":"); // first occurence of : + int slash = s.indexOf("/"); // first occurence of / + + if (colon == -1) { + // missing port specification + if ((slash == -1) || (slash == 0)) + throw new InvalidTFNAttributesException(null, null, null); // no colon + // and no + // slash or + // slash + // right at + // the + // beginning! + // machine + String mString = s.substring(0, slash); + Machine m = null; + try { + m = Machine.make(mString); + } catch (InvalidMachineAttributeException e) { + + log.warn("TFN: Unable to build -machine- attribute from {}. {}", mString, e.getMessage()); + } + + // Port is empty because it is optional specification + Port p = Port.makeEmpty(); + // PFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + String pfnString = s.substring(slash, s.length()); + PFN pfn = null; + try { + pfn = PFN.make(pfnString); + } catch (InvalidPFNAttributeException e) { + log.warn("TFN: Unable to build -pfn- attribute from {}. {}", pfnString, e.getMessage()); + } + return TFN.make(m, p, pfn); + } else if ((slash != -1) && (colon > slash)) { + // colon follows existing slash: the colon does NOT stand as port number + // delimiter + // treat it as missing port specification + // machine + if (slash == 0) throw new InvalidTFNAttributesException(null, null, null); // slash + // right at + // the + // beginning! + String mString = s.substring(0, slash); + Machine m = null; + try { + m = Machine.make(mString); + } catch (InvalidMachineAttributeException e) { + + log.warn("TFN: Unable to build -machine- attribute from {}. {}", mString, e.getMessage()); + } + // Port is empty because it is optional specification + Port p = Port.makeEmpty(); + // PFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + String pfnString = s.substring(slash, s.length()); + PFN pfn = null; + try { + pfn = PFN.make(pfnString); + } catch (InvalidPFNAttributeException e) { + log.warn("TFN: Unable to build -pfn- attribute from {}. {}", pfnString, e.getMessage()); + } + return TFN.make(m, p, pfn); + } else if ((slash != -1) && (colon < slash)) { + // both machine and port are present + // machine + if (colon == 0) throw new InvalidTFNAttributesException(null, null, null); // colon + // right at + // the + // beginning! + String mString = s.substring(0, colon); + Machine m = null; + try { + m = Machine.make(mString); + } catch (InvalidMachineAttributeException e) { + + log.warn("TFN: Unable to build -machine- attribute from {}. {}", mString, e.getMessage()); + } + // port + if ((colon + 1) == slash) + throw new InvalidTFNAttributesException(m, null, null); // slash found + // right after + // colon! There + // is no port! + String pString = s.substring(colon + 1, slash); + Port p = null; + try { + p = Port.make(Integer.parseInt(pString)); + } catch (InvalidPortAttributeException e) { + log.warn("TFN: Unable to build -port- attribute from {}.{}", pString, e.getMessage()); + } catch (NumberFormatException e) { + log.warn("TFN: Unable to build -port- attribute from {}.{}", pString, e.getMessage()); + } + // PFN checks only for a starting / while the rest can be empty! So it is + // sufficient to choose whatever String starts at the /... even just the + // slash itself if that is what is left!!! Should the StFN definition be + // changed??? + String pfnString = s.substring(slash, s.length()); + PFN pfn = null; + try { + pfn = PFN.make(pfnString); + } catch (InvalidPFNAttributeException e) { + + log.warn("TFN: Unable to build -pfn- attribute from {}. {}", pfnString, e.getMessage()); + } + return TFN.make(m, p, pfn); + } else { + // slash missing! Only colon is present: the TFN does not make sense! + throw new InvalidTFNAttributesException(null, null, null); + } + } + + /** Method that returns true if this Object is the empty TFN */ + public boolean isEmpty() { + + return empty; + } + + /** + * Method that returns the Machine specified in this TFN. If this is an empty TFN, then an empty + * Machine is returned. + */ + public Machine machine() { + + if (empty) return Machine.makeEmpty(); + return m; + } + + /** + * Method that returns the Port specified in this TFN. If this is an empty TFN,then an empty Port + * is returned. + */ + public Port port() { + + if (empty) return Port.makeEmpty(); + return p; + } + + /** + * Method that returns the PhysicalFileName specified in this TFN. If this is an empty TFN, then + * an empty PFN is returned. + */ + public PFN pfn() { + + if (empty) return PFN.makeEmpty(); + return pfn; + } + + public String toString() { + + if (empty) return "Empty TFN"; + if (m.isEmpty() && p.isEmpty()) return pfn.toString(); + if ((!m.isEmpty()) && p.isEmpty()) return m.toString() + pfn.toString(); + return m + ":" + p + pfn; + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof TFN)) return false; + TFN tfno = (TFN) o; + if (empty && tfno.empty) return true; + return (!empty) + && (!tfno.empty) + && m.equals(tfno.m) + && p.equals(tfno.p) + && pfn.equals(tfno.pfn); + } + + public int hashCode() { + + if (empty) return 0; + int hash = 17; + hash = 37 * hash + m.hashCode(); + hash = 37 * hash + p.hashCode(); + hash = 37 * hash + pfn.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/common/types/TURLPrefix.java b/src/main/java/it/grid/storm/common/types/TURLPrefix.java index 1329214b..cad97278 100644 --- a/src/main/java/it/grid/storm/common/types/TURLPrefix.java +++ b/src/main/java/it/grid/storm/common/types/TURLPrefix.java @@ -1,116 +1,110 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; +import it.grid.storm.namespace.model.Protocol; import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.namespace.model.Protocol; /** - * This class represent the Transport Protocol available to get file from a - * certain Storage Element. This Trasnport Protocol prefix will be used to match - * with user specifed prefix to TTURL Creation. + * This class represent the Transport Protocol available to get file from a certain Storage Element. + * This Trasnport Protocol prefix will be used to match with user specifed prefix to TTURL Creation. */ public class TURLPrefix { - private static final Logger log = LoggerFactory.getLogger(TURLPrefix.class); - public static final String PNAME_TURL_PREFIX = "turlPrefix"; - private ArrayList desiredProtocols; - - public TURLPrefix() { - - this.desiredProtocols = new ArrayList(); - } - - public TURLPrefix(Collection protocols) { - - this.desiredProtocols = new ArrayList(protocols); - } - - /** - * Method used to add a TransferProtocol to this holding structure. Null may - * also be added. A boolean true is returned if the holding structure changed - * as a result of the add. If this holding structure does not change, then - * false is returned. - */ - public boolean addProtocol(Protocol protocol) { - - return this.desiredProtocols.add(protocol); - } - - /** - * Method used to retrieve a TransferProtocol from this holding structure. An - * int is needed as index to the TransferProtocol to retrieve. Elements are - * not removed! - */ - public Protocol getProtocol(int index) { - - return desiredProtocols.get(index); - } - - public List getDesiredProtocols() { - - return this.desiredProtocols; - } - - public int size() { - - return desiredProtocols.size(); - } - - public void print() { - - } - - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("TURLPrefix: "); - for (Iterator i = desiredProtocols.iterator(); i.hasNext();) { - sb.append(i.next()); - sb.append(" "); - } - return sb.toString(); - } - - /** - * @param inputParam - * @param memberName - * @return - */ - public static TURLPrefix decode(Map inputParam, String memberName) { - - TURLPrefix decodedTurlPrefix = null; - if (inputParam.containsKey(memberName)) { - if (inputParam.get(memberName) != null) { - Object[] valueArray = null; - if (inputParam.get(memberName).getClass().isArray()) { - valueArray = (Object[]) inputParam.get(memberName); - } else { - valueArray = new Object[] { inputParam.get(memberName) }; - } - LinkedList protocols = new LinkedList(); - for (Object value : valueArray) { - Protocol protocol = Protocol.getProtocol(value.toString()); - if (protocol.equals(Protocol.UNKNOWN)) { - log.warn("Protocol {} is unknown." , value); - } else { - protocols.add(protocol); - } - } - if (protocols.size() > 0) { - decodedTurlPrefix = new TURLPrefix(protocols); - } - } - } - return decodedTurlPrefix; - } - - public boolean allows(Protocol protocol) { - - return desiredProtocols.contains(protocol); - } + private static final Logger log = LoggerFactory.getLogger(TURLPrefix.class); + public static final String PNAME_TURL_PREFIX = "turlPrefix"; + private ArrayList desiredProtocols; + + public TURLPrefix() { + + this.desiredProtocols = new ArrayList(); + } + + public TURLPrefix(Collection protocols) { + + this.desiredProtocols = new ArrayList(protocols); + } + + /** + * Method used to add a TransferProtocol to this holding structure. Null may also be added. A + * boolean true is returned if the holding structure changed as a result of the add. If this + * holding structure does not change, then false is returned. + */ + public boolean addProtocol(Protocol protocol) { + + return this.desiredProtocols.add(protocol); + } + + /** + * Method used to retrieve a TransferProtocol from this holding structure. An int is needed as + * index to the TransferProtocol to retrieve. Elements are not removed! + */ + public Protocol getProtocol(int index) { + + return desiredProtocols.get(index); + } + + public List getDesiredProtocols() { + + return this.desiredProtocols; + } + + public int size() { + + return desiredProtocols.size(); + } + + public void print() {} + + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("TURLPrefix: "); + for (Iterator i = desiredProtocols.iterator(); i.hasNext(); ) { + sb.append(i.next()); + sb.append(" "); + } + return sb.toString(); + } + + /** + * @param inputParam + * @param memberName + * @return + */ + public static TURLPrefix decode(Map inputParam, String memberName) { + + TURLPrefix decodedTurlPrefix = null; + if (inputParam.containsKey(memberName)) { + if (inputParam.get(memberName) != null) { + Object[] valueArray = null; + if (inputParam.get(memberName).getClass().isArray()) { + valueArray = (Object[]) inputParam.get(memberName); + } else { + valueArray = new Object[] {inputParam.get(memberName)}; + } + LinkedList protocols = new LinkedList(); + for (Object value : valueArray) { + Protocol protocol = Protocol.getProtocol(value.toString()); + if (protocol.equals(Protocol.UNKNOWN)) { + log.warn("Protocol {} is unknown.", value); + } else { + protocols.add(protocol); + } + } + if (protocols.size() > 0) { + decodedTurlPrefix = new TURLPrefix(protocols); + } + } + } + return decodedTurlPrefix; + } + + public boolean allows(Protocol protocol) { + + return desiredProtocols.contains(protocol); + } } diff --git a/src/main/java/it/grid/storm/common/types/TimeUnit.java b/src/main/java/it/grid/storm/common/types/TimeUnit.java index 3b5dcaba..166cf6a4 100644 --- a/src/main/java/it/grid/storm/common/types/TimeUnit.java +++ b/src/main/java/it/grid/storm/common/types/TimeUnit.java @@ -1,190 +1,169 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents a unit of measure for EstimatedTime; it contains a - * conversion factor for changing from one to the other units. - * + * This class represents a unit of measure for EstimatedTime; it contains a conversion factor for + * changing from one to the other units. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 23rd, 2005 * @version 1.0 */ - package it.grid.storm.common.types; import java.io.Serializable; public abstract class TimeUnit implements Serializable { - /** - * - */ - private static final long serialVersionUID = 3158145080727358357L; - - public static TimeUnit createTimeUnit(String unit) { - - String input = unit.toLowerCase(); - if (input.equals("seconds") || input.equals("sec")) - return TimeUnit.SECONDS; - if (input.equals("minutes") || input.equals("min")) - return TimeUnit.MINUTES; - if (input.equals("hours") || input.equals("h")) - return TimeUnit.HOURS; - if (input.equals("days") || input.equals("d")) - return TimeUnit.DAYS; - if (input.equals("weeks") || input.equals("week")) - return TimeUnit.WEEKS; - return TimeUnit.EMPTY; - } + /** */ + private static final long serialVersionUID = 3158145080727358357L; - public static TimeUnit EMPTY = new TimeUnit() { + public static TimeUnit createTimeUnit(String unit) { - /** - * - */ - private static final long serialVersionUID = 5599746107173300367L; + String input = unit.toLowerCase(); + if (input.equals("seconds") || input.equals("sec")) return TimeUnit.SECONDS; + if (input.equals("minutes") || input.equals("min")) return TimeUnit.MINUTES; + if (input.equals("hours") || input.equals("h")) return TimeUnit.HOURS; + if (input.equals("days") || input.equals("d")) return TimeUnit.DAYS; + if (input.equals("weeks") || input.equals("week")) return TimeUnit.WEEKS; + return TimeUnit.EMPTY; + } - public double conversionFactor() { + public static TimeUnit EMPTY = + new TimeUnit() { - return -1.0; - } + /** */ + private static final long serialVersionUID = 5599746107173300367L; - public String toString() { + public double conversionFactor() { - return "none"; - } + return -1.0; + } - public int hashCode() { + public String toString() { - return -1; - } - }; + return "none"; + } - public static TimeUnit SECONDS = new TimeUnit() { + public int hashCode() { - /** - * - */ - private static final long serialVersionUID = 4222321087668878368L; + return -1; + } + }; - public double conversionFactor() { + public static TimeUnit SECONDS = + new TimeUnit() { - return 1.0; - } + /** */ + private static final long serialVersionUID = 4222321087668878368L; - public String toString() { + public double conversionFactor() { - return "seconds"; - } + return 1.0; + } - public int hashCode() { + public String toString() { - return 1; - } - }; + return "seconds"; + } - public static TimeUnit MINUTES = new TimeUnit() { + public int hashCode() { - /** - * - */ - private static final long serialVersionUID = -7667017133554141272L; + return 1; + } + }; - public double conversionFactor() { + public static TimeUnit MINUTES = + new TimeUnit() { - return 60.0; - } + /** */ + private static final long serialVersionUID = -7667017133554141272L; - public String toString() { + public double conversionFactor() { - return "minutes"; - } + return 60.0; + } - public int hashCode() { + public String toString() { - return 2; - } - }; + return "minutes"; + } - public static TimeUnit HOURS = new TimeUnit() { + public int hashCode() { - /** - * - */ - private static final long serialVersionUID = 2405882444491700325L; + return 2; + } + }; - public double conversionFactor() { + public static TimeUnit HOURS = + new TimeUnit() { - return 3600.0; - } + /** */ + private static final long serialVersionUID = 2405882444491700325L; - public String toString() { + public double conversionFactor() { - return "hours"; - } + return 3600.0; + } - public int hashCode() { + public String toString() { - return 3; - } - }; + return "hours"; + } - public static TimeUnit DAYS = new TimeUnit() { + public int hashCode() { - /** - * - */ - private static final long serialVersionUID = 9053681263751106642L; + return 3; + } + }; - public double conversionFactor() { + public static TimeUnit DAYS = + new TimeUnit() { - return 86400.0; - } + /** */ + private static final long serialVersionUID = 9053681263751106642L; - public String toString() { + public double conversionFactor() { - return "days"; - } + return 86400.0; + } - public int hashCode() { + public String toString() { - return 4; - } - }; + return "days"; + } - public static TimeUnit WEEKS = new TimeUnit() { + public int hashCode() { - /** - * - */ - private static final long serialVersionUID = 84259069743229691L; + return 4; + } + }; - public double conversionFactor() { + public static TimeUnit WEEKS = + new TimeUnit() { - return 604800.0; - } + /** */ + private static final long serialVersionUID = 84259069743229691L; - public String toString() { + public double conversionFactor() { - return "weeks"; - } + return 604800.0; + } - public int hashCode() { + public String toString() { - return 5; - } - }; + return "weeks"; + } - private TimeUnit() { + public int hashCode() { - } + return 5; + } + }; - /** - * This method returns a converson factor: the amout of seconds present in 1 - * unit of this. - */ - public abstract double conversionFactor(); + private TimeUnit() {} + /** This method returns a converson factor: the amout of seconds present in 1 unit of this. */ + public abstract double conversionFactor(); } diff --git a/src/main/java/it/grid/storm/common/types/TransferProtocol.java b/src/main/java/it/grid/storm/common/types/TransferProtocol.java index 285380ee..a1ce7cfe 100644 --- a/src/main/java/it/grid/storm/common/types/TransferProtocol.java +++ b/src/main/java/it/grid/storm/common/types/TransferProtocol.java @@ -1,144 +1,127 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; /** * This class represents the possible transfer protocols of StoRM. - * + * * @author EGRID ICTP - CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ public class TransferProtocol { - private String protocol; + private String protocol; - /** - * Static attribute that indicates EMPTY TransferProtocol - */ - public static TransferProtocol EMPTY = new TransferProtocol("empty") { + /** Static attribute that indicates EMPTY TransferProtocol */ + public static TransferProtocol EMPTY = + new TransferProtocol("empty") { - public int hashCode() { + public int hashCode() { - return 0; - } - }; + return 0; + } + }; - /** - * Static attribute that indicates FILE TransferProtocol. - */ - public static TransferProtocol FILE = new TransferProtocol("file") { + /** Static attribute that indicates FILE TransferProtocol. */ + public static TransferProtocol FILE = + new TransferProtocol("file") { - public int hashCode() { + public int hashCode() { - return 1; - } - }; + return 1; + } + }; - /** - * Static attribute that indicates GSIFTP TransferProtocol. - */ - public static TransferProtocol GSIFTP = new TransferProtocol("gsiftp") { + /** Static attribute that indicates GSIFTP TransferProtocol. */ + public static TransferProtocol GSIFTP = + new TransferProtocol("gsiftp") { - public int hashCode() { + public int hashCode() { - return 2; - } - }; + return 2; + } + }; - /** - * Static attribute that indicates RFIO TransferProtocol. - */ - public static TransferProtocol RFIO = new TransferProtocol("rfio") { + /** Static attribute that indicates RFIO TransferProtocol. */ + public static TransferProtocol RFIO = + new TransferProtocol("rfio") { - public int hashCode() { + public int hashCode() { - return 3; - } - }; + return 3; + } + }; - /** - * Static attribute that indicates ROOT TransferProtocol. - */ - public static TransferProtocol ROOT = new TransferProtocol("root") { + /** Static attribute that indicates ROOT TransferProtocol. */ + public static TransferProtocol ROOT = + new TransferProtocol("root") { - public int hashCode() { + public int hashCode() { - return 4; - } - }; - - /** - * Static attribute that indicates XROOT TransferProtocol. - */ - public static TransferProtocol XROOT = new TransferProtocol("xroot") { + return 4; + } + }; - public int hashCode() { + /** Static attribute that indicates XROOT TransferProtocol. */ + public static TransferProtocol XROOT = + new TransferProtocol("xroot") { - return 8; - } - }; + public int hashCode() { - /** - * Static attribute that indicates HTTP TransferProtocol. - */ - public static TransferProtocol HTTP = new TransferProtocol("http") { + return 8; + } + }; - public int hashCode() { + /** Static attribute that indicates HTTP TransferProtocol. */ + public static TransferProtocol HTTP = + new TransferProtocol("http") { - return 5; - } - }; + public int hashCode() { - /** - * Static attribute that indicates HTTPS TransferProtocol. - */ - public static TransferProtocol HTTPS = new TransferProtocol("https") { + return 5; + } + }; - public int hashCode() { + /** Static attribute that indicates HTTPS TransferProtocol. */ + public static TransferProtocol HTTPS = + new TransferProtocol("https") { - return 6; - } - }; + public int hashCode() { - private TransferProtocol(String protocol) { + return 6; + } + }; - this.protocol = protocol; - } + private TransferProtocol(String protocol) { - public String getValue() { + this.protocol = protocol; + } - return protocol; - } + public String getValue() { - public String toString() { + return protocol; + } - return protocol; - } - - /** - * Facility method to obtain a TransferProtocol object given its String - * representation. Any white spaces are removed. In case no match is found, an - * EMPTY TransferProtocol is returned. - */ - public static TransferProtocol getTransferProtocol(String protocol) { - - if (protocol.toLowerCase().replaceAll(" ", "").equals(FILE.toString())) - return FILE; - if (protocol.toLowerCase().replaceAll(" ", "").equals(GSIFTP.toString())) - return GSIFTP; - if (protocol.toLowerCase().replaceAll(" ", "").equals(RFIO.toString())) - return RFIO; - if (protocol.toLowerCase().replaceAll(" ", "").equals(ROOT.toString())) - return ROOT; - if (protocol.toLowerCase().replaceAll(" ", "").equals(XROOT.toString())) - return XROOT; - if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTP.toString())) - return HTTP; - if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTPS.toString())) - return HTTPS; - return EMPTY; - } + public String toString() { + + return protocol; + } + + /** + * Facility method to obtain a TransferProtocol object given its String representation. Any white + * spaces are removed. In case no match is found, an EMPTY TransferProtocol is returned. + */ + public static TransferProtocol getTransferProtocol(String protocol) { + + if (protocol.toLowerCase().replaceAll(" ", "").equals(FILE.toString())) return FILE; + if (protocol.toLowerCase().replaceAll(" ", "").equals(GSIFTP.toString())) return GSIFTP; + if (protocol.toLowerCase().replaceAll(" ", "").equals(RFIO.toString())) return RFIO; + if (protocol.toLowerCase().replaceAll(" ", "").equals(ROOT.toString())) return ROOT; + if (protocol.toLowerCase().replaceAll(" ", "").equals(XROOT.toString())) return XROOT; + if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTP.toString())) return HTTP; + if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTPS.toString())) return HTTPS; + return EMPTY; + } } diff --git a/src/main/java/it/grid/storm/common/types/VO.java b/src/main/java/it/grid/storm/common/types/VO.java index a8355194..ce99244e 100644 --- a/src/main/java/it/grid/storm/common/types/VO.java +++ b/src/main/java/it/grid/storm/common/types/VO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.common.types; @@ -8,60 +7,55 @@ public class VO implements Serializable { - private String vo; + private String vo; - public static final VO NO_VO = new VO("NO_VO"); + public static final VO NO_VO = new VO("NO_VO"); - private VO(String vo) { + private VO(String vo) { - this.vo = vo; - } + this.vo = vo; + } - public static VO make(String newVo) { + public static VO make(String newVo) { - if (newVo.equals("NO_VO")) - return NO_VO; - else - return new VO(newVo); - } + if (newVo.equals("NO_VO")) return NO_VO; + else return new VO(newVo); + } - public static VO makeDefault() { + public static VO makeDefault() { - return new VO("CNAF"); - } + return new VO("CNAF"); + } - public static VO makeNoVo() { + public static VO makeNoVo() { - return NO_VO; - } + return NO_VO; + } - public String getValue() { + public String getValue() { - return vo; - } + return vo; + } - public String toString() { + public String toString() { - return vo; - } + return vo; + } - public boolean equals(Object o) { + public boolean equals(Object o) { - if (!(o instanceof VO)) - return false; - if (o == this) - return true; - VO tmp = (VO) o; + if (!(o instanceof VO)) return false; + if (o == this) return true; + VO tmp = (VO) o; - return (vo.equals(tmp.getValue())); - } + return (vo.equals(tmp.getValue())); + } - @Override - public int hashCode() { - - int result = 17; - result = 31 * result + (vo != null ? vo.hashCode() : 0); - return result; - } + @Override + public int hashCode() { + int result = 17; + result = 31 * result + (vo != null ? vo.hashCode() : 0); + return result; + } } diff --git a/src/main/java/it/grid/storm/concurrency/NamedThread.java b/src/main/java/it/grid/storm/concurrency/NamedThread.java index 0fa0efbc..272e9b56 100644 --- a/src/main/java/it/grid/storm/concurrency/NamedThread.java +++ b/src/main/java/it/grid/storm/concurrency/NamedThread.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.concurrency; @@ -10,60 +9,57 @@ public class NamedThread extends Thread { - public static final String DEFAULT_NAME = "StoRM-Thread"; - private static final AtomicInteger created = new AtomicInteger(); - private static final AtomicInteger alive = new AtomicInteger(); - private static final Logger log = LoggerFactory.getLogger(NamedThread.class); + public static final String DEFAULT_NAME = "StoRM-Thread"; + private static final AtomicInteger created = new AtomicInteger(); + private static final AtomicInteger alive = new AtomicInteger(); + private static final Logger log = LoggerFactory.getLogger(NamedThread.class); - /** - * @param target - */ - public NamedThread(Runnable target) { + /** @param target */ + public NamedThread(Runnable target) { - this(target, DEFAULT_NAME); - } + this(target, DEFAULT_NAME); + } - /** - * @param target - * @param name - */ - public NamedThread(Runnable target, String name) { + /** + * @param target + * @param name + */ + public NamedThread(Runnable target, String name) { - super(target, name + "-" + created.incrementAndGet()); + super(target, name + "-" + created.incrementAndGet()); - log.trace("Created thread {}", getName()); - - setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { + log.trace("Created thread {}", getName()); - public void uncaughtException(Thread t, Throwable e) { + setUncaughtExceptionHandler( + new Thread.UncaughtExceptionHandler() { - log.error("UNCAUGHT in thread {}", t.getName(), e); - } - }); + public void uncaughtException(Thread t, Throwable e) { - } + log.error("UNCAUGHT in thread {}", t.getName(), e); + } + }); + } - public void run() { + public void run() { - log.trace("NamedThread.run name={}", getName()); + log.trace("NamedThread.run name={}", getName()); - try { - alive.incrementAndGet(); - super.run(); - } finally { - alive.decrementAndGet(); - log.trace("NamedThread.run name={} done.", getName()); - } - } + try { + alive.incrementAndGet(); + super.run(); + } finally { + alive.decrementAndGet(); + log.trace("NamedThread.run name={} done.", getName()); + } + } - public static int getThreadsCreated() { + public static int getThreadsCreated() { - return created.get(); - } + return created.get(); + } - public static int getThreadAlive() { - - return alive.get(); - } + public static int getThreadAlive() { + return alive.get(); + } } diff --git a/src/main/java/it/grid/storm/concurrency/NamedThreadFactory.java b/src/main/java/it/grid/storm/concurrency/NamedThreadFactory.java index 377d216d..424323bb 100644 --- a/src/main/java/it/grid/storm/concurrency/NamedThreadFactory.java +++ b/src/main/java/it/grid/storm/concurrency/NamedThreadFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.concurrency; @@ -8,16 +7,15 @@ public class NamedThreadFactory implements ThreadFactory { - private final String factoryName; + private final String factoryName; - public NamedThreadFactory(String name) { + public NamedThreadFactory(String name) { - this.factoryName = name; - } + this.factoryName = name; + } - public Thread newThread(Runnable r) { - - return new NamedThread(r, factoryName); - } + public Thread newThread(Runnable r) { + return new NamedThread(r, factoryName); + } } diff --git a/src/main/java/it/grid/storm/concurrency/TimingThreadPool.java b/src/main/java/it/grid/storm/concurrency/TimingThreadPool.java index 8603f0b8..59f2c37a 100644 --- a/src/main/java/it/grid/storm/concurrency/TimingThreadPool.java +++ b/src/main/java/it/grid/storm/concurrency/TimingThreadPool.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.concurrency; @@ -12,62 +11,61 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TimingThreadPool extends ThreadPoolExecutor { - public TimingThreadPool(int corePoolSize, int maximumPoolSize, - long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, - ThreadFactory threadFactory) { - - super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, - threadFactory); - } + public TimingThreadPool( + int corePoolSize, + int maximumPoolSize, + long keepAliveTime, + TimeUnit unit, + BlockingQueue workQueue, + ThreadFactory threadFactory) { - private final ThreadLocal startTime = new ThreadLocal<>(); - private static final Logger log = LoggerFactory - .getLogger(TimingThreadPool.class); - private final AtomicLong numTasks = new AtomicLong(); - private final AtomicLong totalTime = new AtomicLong(); + super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory); + } - @Override - protected void beforeExecute(Thread t, Runnable r) { + private final ThreadLocal startTime = new ThreadLocal<>(); + private static final Logger log = LoggerFactory.getLogger(TimingThreadPool.class); + private final AtomicLong numTasks = new AtomicLong(); + private final AtomicLong totalTime = new AtomicLong(); - super.beforeExecute(t, r); - log.debug("Thread {}: start {}", t, r); - startTime.set(System.nanoTime()); - } + @Override + protected void beforeExecute(Thread t, Runnable r) { - @Override - protected void afterExecute(Runnable r, Throwable t) { + super.beforeExecute(t, r); + log.debug("Thread {}: start {}", t, r); + startTime.set(System.nanoTime()); + } - try { - long endTime = System.nanoTime(); - long taskTime = endTime - startTime.get(); - startTime.remove(); - numTasks.incrementAndGet(); - totalTime.addAndGet(taskTime); - if (t == null && r instanceof Future) { - try { - Object result = ((Future) r).get(); - log.debug("Thread ended with result: {}", result); - } catch (CancellationException ce) { - t = ce; - } catch (ExecutionException ee) { - t = ee.getCause(); - } catch (InterruptedException ie) { - Thread.currentThread().interrupt(); // ignore/reset - } - log.debug("Throwable {}. end {}, time={}ns", - t,r,taskTime); - } else { - log.debug("Throwable {}", t); + @Override + protected void afterExecute(Runnable r, Throwable t) { - } - } finally { - super.afterExecute(r, t); - } - } + try { + long endTime = System.nanoTime(); + long taskTime = endTime - startTime.get(); + startTime.remove(); + numTasks.incrementAndGet(); + totalTime.addAndGet(taskTime); + if (t == null && r instanceof Future) { + try { + Object result = ((Future) r).get(); + log.debug("Thread ended with result: {}", result); + } catch (CancellationException ce) { + t = ce; + } catch (ExecutionException ee) { + t = ee.getCause(); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); // ignore/reset + } + log.debug("Throwable {}. end {}, time={}ns", t, r, taskTime); + } else { + log.debug("Throwable {}", t); + } + } finally { + super.afterExecute(r, t); + } + } } diff --git a/src/main/java/it/grid/storm/config/ConfigReader.java b/src/main/java/it/grid/storm/config/ConfigReader.java index 56fb2670..d7bfb866 100644 --- a/src/main/java/it/grid/storm/config/ConfigReader.java +++ b/src/main/java/it/grid/storm/config/ConfigReader.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.config; import static com.google.common.base.Preconditions.checkNotNull; import java.util.Iterator; - import org.apache.commons.configuration.CompositeConfiguration; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; @@ -45,15 +43,13 @@ private void logPropertiesConfiguration(PropertiesConfiguration properties) { log.debug("Configuration properties: "); String key; - for (Iterator i = properties.getKeys(); i.hasNext();) { + for (Iterator i = properties.getKeys(); i.hasNext(); ) { key = (String) i.next(); log.debug("{} = {}", key, properties.getProperty(key)); } } - /** - * Method that returns the Apache object holding all configuration parameters! - */ + /** Method that returns the Apache object holding all configuration parameters! */ public Configuration getConfiguration() { return c; @@ -66,12 +62,9 @@ public Configuration getConfiguration() { */ public String configurationDirectory() { - if (configurationPathname.isEmpty()) - return ""; + if (configurationPathname.isEmpty()) return ""; int lastSlash = this.configurationPathname.lastIndexOf(java.io.File.separator); - if (lastSlash == -1) - return ""; // no slash! + if (lastSlash == -1) return ""; // no slash! return this.configurationPathname.substring(0, lastSlash + 1); } - } diff --git a/src/main/java/it/grid/storm/config/Configuration.java b/src/main/java/it/grid/storm/config/Configuration.java index c3d284e5..38251697 100644 --- a/src/main/java/it/grid/storm/config/Configuration.java +++ b/src/main/java/it/grid/storm/config/Configuration.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.config; @@ -9,6 +8,9 @@ import static it.grid.storm.info.du.DiskUsageService.DEFAULT_TASKS_PARALLEL; import static java.lang.System.getProperty; +import com.google.common.collect.Lists; +import it.grid.storm.rest.RestServer; +import it.grid.storm.xmlrpc.XMLRPCHttpServer; import java.io.File; import java.lang.reflect.Field; import java.lang.reflect.Method; @@ -16,15 +18,9 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; - import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.lang.ArrayUtils; -import com.google.common.collect.Lists; - -import it.grid.storm.rest.RestServer; -import it.grid.storm.xmlrpc.XMLRPCHttpServer; - /** * Singleton holding all configuration values that any other object in the StoRM backend reads from * configuration files, databases, etc. Implements a 'get' method for each value that @@ -34,7 +30,6 @@ * configuration medium, default values, as well as the option of holding multiple values, is * specified in each method comment. */ - public class Configuration { public static final String DEFAULT_STORM_CONFIG_FILE = @@ -184,9 +179,7 @@ private Configuration() throws ConfigurationException { cr = new ConfigReader(filePath, refreshRate); } - /** - * Returns the sole instance of the Configuration class. - */ + /** Returns the sole instance of the Configuration class. */ public static Configuration getInstance() { return Configuration.instance; @@ -205,7 +198,7 @@ public String configurationDir() { /** * getNamespaceConfigPath - * + * * @return String */ public String namespaceConfigPath() { @@ -215,7 +208,7 @@ public String namespaceConfigPath() { /** * MANDATORY CONFIGURATION PARAMETER! Define the SURL end-points. - * + * * @return String[] */ public String[] getManagedSURLs() { @@ -227,9 +220,7 @@ public String[] getManagedSURLs() { return cr.getConfiguration().getStringArray(MANAGED_SURLS_KEY); } - /** - * @return - */ + /** @return */ public Integer[] getManagedSurlDefaultPorts() { Integer[] portsArray; @@ -247,9 +238,7 @@ public Integer[] getManagedSurlDefaultPorts() { return portsArray; } - /** - * @return String - */ + /** @return String */ public String getServiceHostname() { return cr.getConfiguration().getString(SERVICE_HOSTNAME_KEY, "UNDEFINED_STORM_HOSTNAME"); @@ -343,7 +332,8 @@ public String getDBHostname() { public String getDBProperties() { - return cr.getConfiguration().getString(DB_URL_PROPERTIES, "serverTimezone=UTC&autoReconnect=true"); + return cr.getConfiguration() + .getString(DB_URL_PROPERTIES, "serverTimezone=UTC&autoReconnect=true"); } /** @@ -398,7 +388,7 @@ public long getCleaningTimeInterval() { /** * Get the default file size - * + * * @return */ public long getFileDefaultSize() { @@ -490,24 +480,22 @@ public int getPickingMaxBatchSize() { return cr.getConfiguration().getInt(PICKING_MAX_BATCH_SIZE_KEY, 100); } - /** - * Get max number of XMLRPC threads into for the XMLRPC server. - */ + /** Get max number of XMLRPC threads into for the XMLRPC server. */ public int getXMLRPCMaxThread() { return cr.getConfiguration() - .getInt(XMLRPC_MAX_THREAD_KEY, XMLRPCHttpServer.DEFAULT_MAX_THREAD_NUM); + .getInt(XMLRPC_MAX_THREAD_KEY, XMLRPCHttpServer.DEFAULT_MAX_THREAD_NUM); } public int getXMLRPCMaxQueueSize() { return cr.getConfiguration() - .getInt(XMLRPC_MAX_QUEUE_SIZE_KEY, XMLRPCHttpServer.DEFAULT_MAX_QUEUE_SIZE); + .getInt(XMLRPC_MAX_QUEUE_SIZE_KEY, XMLRPCHttpServer.DEFAULT_MAX_QUEUE_SIZE); } /** * Get Default Space Tokens - * + * * @return */ public List getListOfDefaultSpaceToken() { @@ -571,7 +559,7 @@ public int getXmlRpcServerPort() { * Method used by the Synch Component to set the maximum number of entries to return for the srmLs * functionality. If no value is found in the configuration medium, then the default value is * returned instead. key="synchcall.directoryManager.maxLsEntry"; default value=500; - * + * * @return int */ public int getLSMaxNumberOfEntry() { @@ -581,7 +569,7 @@ public int getLSMaxNumberOfEntry() { /** * Default value for the parameter "allLevelRecursive" of the LS request. - * + * * @return boolean */ public boolean getLSallLevelRecursive() { @@ -591,7 +579,7 @@ public boolean getLSallLevelRecursive() { /** * Default value for the parameter "numOfLevels" of the LS request. - * + * * @return int */ public int getLSnumOfLevels() { @@ -601,7 +589,7 @@ public int getLSnumOfLevels() { /** * Default value for the parameter "offset" of the LS request. - * + * * @return int */ public int getLSoffset() { @@ -822,7 +810,7 @@ public int getQueueSize() { /** * getNamespaceConfigFilename - * + * * @return String */ public String getNamespaceConfigFilename() { @@ -832,7 +820,7 @@ public String getNamespaceConfigFilename() { /** * Retrieve the namespace schema file name from the first line (attribute) of namespace.xml. - * + * * @return String */ public String getNamespaceSchemaFilename() { @@ -847,11 +835,11 @@ public int getNamespaceConfigRefreshRateInSeconds() { /** * getNamespaceAutomaticReloading - * + * * @return boolean Method used by Namespace Configuration Reloading Strategy (Peeper). If "peeper" - * found namespace.xml config file changed it checks if it can perform an automatic - * reload. If no value is found in the configuration medium, then the default one is used - * instead. key="namespace.automatic-config-reload"; default value=false + * found namespace.xml config file changed it checks if it can perform an automatic reload. If + * no value is found in the configuration medium, then the default one is used instead. + * key="namespace.automatic-config-reload"; default value=false */ public boolean getNamespaceAutomaticReloading() { @@ -1018,9 +1006,9 @@ public int getHearthbeatPeriod() { /** * getPerformanceGlancePeriod - * + * * @return int If no value is found in the configuration medium, then the default one is used - * instead. key="health.performance.glance.timeInterval"; default value=15 (15 sec) + * instead. key="health.performance.glance.timeInterval"; default value=15 (15 sec) */ public int getPerformanceGlanceTimeInterval() { @@ -1029,9 +1017,9 @@ public int getPerformanceGlanceTimeInterval() { /** * getPerformanceGlancePeriod - * + * * @return int If no value is found in the configuration medium, then the default one is used - * instead. key="health.performance.logbook.timeInterval"; default value=15 (15 sec) + * instead. key="health.performance.logbook.timeInterval"; default value=15 (15 sec) */ public int getPerformanceLogbookTimeInterval() { @@ -1040,9 +1028,9 @@ public int getPerformanceLogbookTimeInterval() { /** * getPerformanceMeasuring - * + * * @return boolean If no value is found in the configuration medium, then the default one is used - * instead. key="health.performance.mesauring.enabled"; default value=false + * instead. key="health.performance.mesauring.enabled"; default value=false */ public boolean getPerformanceMeasuring() { @@ -1051,11 +1039,11 @@ public boolean getPerformanceMeasuring() { /** * getBookKeppeingEnabled - * + * * @return boolean Method used by Namespace Configuration Reloading Strategy (Peeper). If "peeper" - * found namespace.xml config file changed it checks if it can perform an automatic - * reload. If no value is found in the configuration medium, then the default one is used - * instead. key="health.bookkeeping.enabled"; default value=false + * found namespace.xml config file changed it checks if it can perform an automatic reload. If + * no value is found in the configuration medium, then the default one is used instead. + * key="health.bookkeeping.enabled"; default value=false */ public boolean getBookKeepingEnabled() { @@ -1064,7 +1052,7 @@ public boolean getBookKeepingEnabled() { /** * Enable write permission on new created directory for LocalAuthorizationSource usage. - * + * * @return false by default, otherwise what is specified in the properties */ public boolean getEnableWritePermOnDirectory() { @@ -1125,12 +1113,14 @@ public int getRestServicesPort() { public int getRestServicesMaxThreads() { - return cr.getConfiguration().getInt(REST_SERVICES_MAX_THREAD, RestServer.DEFAULT_MAX_THREAD_NUM); + return cr.getConfiguration() + .getInt(REST_SERVICES_MAX_THREAD, RestServer.DEFAULT_MAX_THREAD_NUM); } public int getRestServicesMaxQueueSize() { - return cr.getConfiguration().getInt(REST_SERVICES_MAX_QUEUE_SIZE, RestServer.DEFAULT_MAX_QUEUE_SIZE); + return cr.getConfiguration() + .getInt(REST_SERVICES_MAX_QUEUE_SIZE, RestServer.DEFAULT_MAX_QUEUE_SIZE); } /** @@ -1167,7 +1157,7 @@ public String getStoRMPropertiesVersion() { /** * Flag to support or not the TAPE integration. Default value is false. - * + * * @return */ public boolean getTapeSupportEnabled() { @@ -1175,34 +1165,25 @@ public boolean getTapeSupportEnabled() { return cr.getConfiguration().getBoolean(TAPE_SUPPORT_ENABLED_KEY, false); } - /** - * @return - */ + /** @return */ public boolean getSynchronousQuotaCheckEnabled() { return cr.getConfiguration().getBoolean(SYNCHRONOUS_QUOTA_CHECK_ENABLED_KEY, false); } - /** - * - * @return the refresh period in seconds - */ + /** @return the refresh period in seconds */ public int getGPFSQuotaRefreshPeriod() { return cr.getConfiguration().getInt(GPFS_QUOTA_REFRESH_PERIOD_KEY, 900); } - /** - * @return - */ + /** @return */ public boolean getFastBootstrapEnabled() { return cr.getConfiguration().getBoolean(FAST_BOOTSTRAP_ENABLED_KEY, true); } - /** - * @return - */ + /** @return */ public Long getServerPoolStatusCheckTimeout() { return cr.getConfiguration().getLong(SERVER_POOL_STATUS_CHECK_TIMEOUT_KEY, 20000); @@ -1246,8 +1227,12 @@ public String toString() { // "tape.buffer.group.write" // puts in the map the pair // - String mapKey = "get" - + fieldName.substring(0, fieldName.lastIndexOf('_')).replace("_", "").toLowerCase(); + String mapKey = + "get" + + fieldName + .substring(0, fieldName.lastIndexOf('_')) + .replace("_", "") + .toLowerCase(); if (methodKeyMap.containsKey(mapKey)) { String value = methodKeyMap.get(mapKey); methodKeyMap.put(mapKey, value + " , " + (String) field.get(Configuration.instance)); @@ -1265,15 +1250,16 @@ public String toString() { * request real parameters) */ if (method.getName().substring(0, 3).equals("get") - && (!method.getName().equals("getInstance")) && method.getModifiers() == 1) { + && (!method.getName().equals("getInstance")) + && method.getModifiers() == 1) { field = method.invoke(Configuration.instance, dummyArray); if (field.getClass().isArray()) { field = ArrayUtils.toString(field); } String value = methodKeyMap.get(method.getName().toLowerCase()); if (value == null) { - configurationStringBuilder.insert(0, - "!! Unable to find method " + method.getName() + " in methode key map!"); + configurationStringBuilder.insert( + 0, "!! Unable to find method " + method.getName() + " in methode key map!"); } else { configurationStringBuilder.append("Property " + value + " : "); } @@ -1286,11 +1272,11 @@ public String toString() { return configurationStringBuilder.toString(); } catch (Exception e) { if (e.getClass().isAssignableFrom(java.lang.reflect.InvocationTargetException.class)) { - configurationStringBuilder.insert(0, - "!!! Cannot do toString! Got an Exception: " + e.getCause() + "\n"); + configurationStringBuilder.insert( + 0, "!!! Cannot do toString! Got an Exception: " + e.getCause() + "\n"); } else { - configurationStringBuilder.insert(0, - "!!! Cannot do toString! Got an Exception: " + e + "\n"); + configurationStringBuilder.insert( + 0, "!!! Cannot do toString! Got an Exception: " + e + "\n"); } return configurationStringBuilder.toString(); } @@ -1330,7 +1316,8 @@ public int getDiskUsageServiceTasksInterval() { public boolean getDiskUsageServiceTasksParallel() { - return cr.getConfiguration().getBoolean(DISKUSAGE_SERVICE_TASKS_PARALLEL, DEFAULT_TASKS_PARALLEL); + return cr.getConfiguration() + .getBoolean(DISKUSAGE_SERVICE_TASKS_PARALLEL, DEFAULT_TASKS_PARALLEL); } public boolean getPreferIPv6Addresses() { diff --git a/src/main/java/it/grid/storm/config/DefaultValue.java b/src/main/java/it/grid/storm/config/DefaultValue.java index 71090cb6..19203720 100644 --- a/src/main/java/it/grid/storm/config/DefaultValue.java +++ b/src/main/java/it/grid/storm/config/DefaultValue.java @@ -1,113 +1,89 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.config; +import it.grid.storm.checksum.ChecksumAlgorithm; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.checksum.ChecksumAlgorithm; - public class DefaultValue { - private static final Logger log = LoggerFactory.getLogger(DefaultValue.class); - - private DefaultValue() { - - } - - /** - * Retrieve default Space Type for anonymous user - */ - public static String getAnonymous_SpaceType() { - - return "volatile"; - } - - /** - * Retrieve default Space Type for named VO - */ - public static String getNamedVO_SpaceType(String voname) { - - String result = null; - if (result == null) { - log.info("Searching for ANONYMOUS parameter value.."); - result = getAnonymous_SpaceType(); - } - return result; - } - - /** - * Retrieve default Total Space Size for anonymous user - */ - public static long getAnonymous_TotalSpaceSize() { - - return 104857600; // 100 Mb - } - - /** - * Retrieve default Total Space Size for named VO - */ - public static long getNamedVO_TotalSpaceSize(String voname) { - - long result = -1; - if (result == -1) { - log.info("Searching for ANONYMOUS parameter value.."); - result = getAnonymous_TotalSpaceSize(); - } - return result; - } - - /** - * Retrieve default Total Space Size for anonymous user - */ - public static long getAnonymous_GuaranteedSpaceSize() { - - return 10485760; // 10 Mb - } - - /** - * Retrieve default Total Space Size for named VO - */ - public static long getNamedVO_GuaranteedSpaceSize(String voname) { - - long result = -1; - if (result == -1) { - log - .info("Searching for ANONYMOUS parameter value.."); - result = getAnonymous_GuaranteedSpaceSize(); - } - return result; - } - - /** - * Retrieve default Total Space Life Time for anonymous user - */ - public static long getAnonymous_SpaceLifeTime() { - - return 86400; // 24h - } - - /** - * Retrieve default Space Life Time for named VO - */ - public static long getNamedVO_SpaceLifeTime(String voname) { - - long result = -1; - if (result == -1) { - log.info("Searching for ANONYMOUS parameter value.."); - result = getAnonymous_SpaceLifeTime(); - } - return result; - } - - /** - * Retrieve default Checksum Algorithm - */ - public static ChecksumAlgorithm getChecksumAlgorithm() { - - return ChecksumAlgorithm.ADLER32; - } + private static final Logger log = LoggerFactory.getLogger(DefaultValue.class); + + private DefaultValue() {} + + /** Retrieve default Space Type for anonymous user */ + public static String getAnonymous_SpaceType() { + + return "volatile"; + } + + /** Retrieve default Space Type for named VO */ + public static String getNamedVO_SpaceType(String voname) { + + String result = null; + if (result == null) { + log.info("Searching for ANONYMOUS parameter value.."); + result = getAnonymous_SpaceType(); + } + return result; + } + + /** Retrieve default Total Space Size for anonymous user */ + public static long getAnonymous_TotalSpaceSize() { + + return 104857600; // 100 Mb + } + + /** Retrieve default Total Space Size for named VO */ + public static long getNamedVO_TotalSpaceSize(String voname) { + + long result = -1; + if (result == -1) { + log.info("Searching for ANONYMOUS parameter value.."); + result = getAnonymous_TotalSpaceSize(); + } + return result; + } + + /** Retrieve default Total Space Size for anonymous user */ + public static long getAnonymous_GuaranteedSpaceSize() { + + return 10485760; // 10 Mb + } + + /** Retrieve default Total Space Size for named VO */ + public static long getNamedVO_GuaranteedSpaceSize(String voname) { + + long result = -1; + if (result == -1) { + log.info("Searching for ANONYMOUS parameter value.."); + result = getAnonymous_GuaranteedSpaceSize(); + } + return result; + } + + /** Retrieve default Total Space Life Time for anonymous user */ + public static long getAnonymous_SpaceLifeTime() { + + return 86400; // 24h + } + + /** Retrieve default Space Life Time for named VO */ + public static long getNamedVO_SpaceLifeTime(String voname) { + + long result = -1; + if (result == -1) { + log.info("Searching for ANONYMOUS parameter value.."); + result = getAnonymous_SpaceLifeTime(); + } + return result; + } + + /** Retrieve default Checksum Algorithm */ + public static ChecksumAlgorithm getChecksumAlgorithm() { + return ChecksumAlgorithm.ADLER32; + } } diff --git a/src/main/java/it/grid/storm/ea/ExtendedAttributes.java b/src/main/java/it/grid/storm/ea/ExtendedAttributes.java index 193660a7..32cc0014 100644 --- a/src/main/java/it/grid/storm/ea/ExtendedAttributes.java +++ b/src/main/java/it/grid/storm/ea/ExtendedAttributes.java @@ -1,72 +1,54 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; public interface ExtendedAttributes { - /** - * Checks whether an extended attribute is set for a given file. - * - * @param fileName - * complete path filename of the file to retrieve the attribute for. - * @param attributeName - * name of the attribute to retrieve. - * @return true, if the attribute is set, false - * otherwise - * - * @throws ExtendedAttributesException - * in case of errors. - */ - boolean hasXAttr(String fileName, String attributeName); + /** + * Checks whether an extended attribute is set for a given file. + * + * @param fileName complete path filename of the file to retrieve the attribute for. + * @param attributeName name of the attribute to retrieve. + * @return true, if the attribute is set, false otherwise + * @throws ExtendedAttributesException in case of errors. + */ + boolean hasXAttr(String fileName, String attributeName); - /** - * Retrieve the value of a give extended attribute of a given file. - * - * @param fileName - * complete path filename of the file to retrieve the attribute for. - * @param attributeName - * name of the attribute to retrieve. - * @return the value of the requested attribute. - * - * @throws ExtendedAttributesException - * if an unrecognized error occurred (exception message is set). - */ - String getXAttr(String fileName, String attributeName); + /** + * Retrieve the value of a give extended attribute of a given file. + * + * @param fileName complete path filename of the file to retrieve the attribute for. + * @param attributeName name of the attribute to retrieve. + * @return the value of the requested attribute. + * @throws ExtendedAttributesException if an unrecognized error occurred (exception message is + * set). + */ + String getXAttr(String fileName, String attributeName); - /** - * Sets the value of the extended attribute identified by - * fileName and associated with the given file in the filesystem. - * - * @param filename - * complete path filename of the file to set the attribute for. - * @param attributeName - * name of the attribute. - * @param attributeValue - * value of the attribute. - * - * @throws ExtendedAttributesException - * if an unrecognized error occurred (exception message is set). - */ - public void setXAttr(String filename, String attributeName, - String attributeValue); + /** + * Sets the value of the extended attribute identified by fileName and associated + * with the given file in the filesystem. + * + * @param filename complete path filename of the file to set the attribute for. + * @param attributeName name of the attribute. + * @param attributeValue value of the attribute. + * @throws ExtendedAttributesException if an unrecognized error occurred (exception message is + * set). + */ + public void setXAttr(String filename, String attributeName, String attributeValue); - /** - * Removes the extended attribute identified by attributeName and - * associated with the given file in the filesystem. - * - * @param filename - * @param attributeName - * - * @throws FileNotFoundException - * the given file does not exists. - * @throws AttributeNotFoundException - * if attributeName does not exists. - * @throws NotSupportedException - * if extended attributes are not supported by the filesystem. - * @throws ExtendedAttributesException - * if an unrecognized error occurred (exception message is set). - */ - public void rmXAttr(String filename, String attributeName); + /** + * Removes the extended attribute identified by attributeName and associated with the + * given file in the filesystem. + * + * @param filename + * @param attributeName + * @throws FileNotFoundException the given file does not exists. + * @throws AttributeNotFoundException if attributeName does not exists. + * @throws NotSupportedException if extended attributes are not supported by the filesystem. + * @throws ExtendedAttributesException if an unrecognized error occurred (exception message is + * set). + */ + public void rmXAttr(String filename, String attributeName); } diff --git a/src/main/java/it/grid/storm/ea/ExtendedAttributesException.java b/src/main/java/it/grid/storm/ea/ExtendedAttributesException.java index 3a1836cc..8a347e94 100644 --- a/src/main/java/it/grid/storm/ea/ExtendedAttributesException.java +++ b/src/main/java/it/grid/storm/ea/ExtendedAttributesException.java @@ -1,30 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; public class ExtendedAttributesException extends RuntimeException { - private static final long serialVersionUID = 1484068785050730930L; + private static final long serialVersionUID = 1484068785050730930L; - public ExtendedAttributesException() { + public ExtendedAttributesException() {} - } + public ExtendedAttributesException(String message) { - public ExtendedAttributesException(String message) { + super(message); + } - super(message); - } + public ExtendedAttributesException(Throwable cause) { - public ExtendedAttributesException(Throwable cause) { + super(cause); + } - super(cause); - } - - public ExtendedAttributesException(String message, Throwable cause) { - - super(message, cause); - } + public ExtendedAttributesException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/ea/ExtendedAttributesFactory.java b/src/main/java/it/grid/storm/ea/ExtendedAttributesFactory.java index b862c84c..b1e9dca0 100644 --- a/src/main/java/it/grid/storm/ea/ExtendedAttributesFactory.java +++ b/src/main/java/it/grid/storm/ea/ExtendedAttributesFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; @@ -11,9 +10,7 @@ public class ExtendedAttributesFactory { public static ExtendedAttributes getExtendedAttributes() { ExtendedAttributes eaImpl = new ExtendedAttributesSwigImpl(); - - return new MetricsEAAdapter(eaImpl, - METRIC_REGISTRY.getRegistry()); - } + return new MetricsEAAdapter(eaImpl, METRIC_REGISTRY.getRegistry()); + } } diff --git a/src/main/java/it/grid/storm/ea/ExtendedAttributesSwigImpl.java b/src/main/java/it/grid/storm/ea/ExtendedAttributesSwigImpl.java index c7d8d79a..f548a7d0 100644 --- a/src/main/java/it/grid/storm/ea/ExtendedAttributesSwigImpl.java +++ b/src/main/java/it/grid/storm/ea/ExtendedAttributesSwigImpl.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; @@ -8,63 +7,55 @@ public class ExtendedAttributesSwigImpl implements ExtendedAttributes { - public ExtendedAttributesSwigImpl() { + public ExtendedAttributesSwigImpl() {} - } + @Override + public String getXAttr(String fileName, String attributeName) { - @Override - public String getXAttr(String fileName, String attributeName) { + try { - try { + return storm_xattrs.get_xattr_value(fileName, attributeName); - return storm_xattrs.get_xattr_value(fileName, attributeName); + } catch (RuntimeException e) { + throw new ExtendedAttributesException(e); + } + } - } catch (RuntimeException e) { - throw new ExtendedAttributesException(e); - } + @Override + public void setXAttr(String filename, String attributeName, String attributeValue) + throws ExtendedAttributesException { - } + try { + if (attributeValue == null) storm_xattrs.set_xattr(filename, attributeName); + else storm_xattrs.set_xattr(filename, attributeName, attributeValue); - @Override - public void setXAttr(String filename, String attributeName, - String attributeValue) throws ExtendedAttributesException { + } catch (RuntimeException e) { + throw new ExtendedAttributesException(e); + } + } - try { - if (attributeValue == null) - storm_xattrs.set_xattr(filename, attributeName); - else - storm_xattrs.set_xattr(filename, attributeName, attributeValue); + @Override + public void rmXAttr(String filename, String attributeName) throws ExtendedAttributesException { - } catch (RuntimeException e) { - throw new ExtendedAttributesException(e); - } + try { - } + storm_xattrs.remove_xattr(filename, attributeName); - @Override - public void rmXAttr(String filename, String attributeName) - throws ExtendedAttributesException { + } catch (RuntimeException e) { + throw new ExtendedAttributesException(e); + } + } - try { + @Override + public boolean hasXAttr(String fileName, String attributeName) + throws ExtendedAttributesException { - storm_xattrs.remove_xattr(filename, attributeName); + try { - } catch (RuntimeException e) { - throw new ExtendedAttributesException(e); - } - } - - @Override - public boolean hasXAttr(String fileName, String attributeName) - throws ExtendedAttributesException { - - try { - - return storm_xattrs.xattr_is_set(fileName, attributeName); - - } catch (RuntimeException e) { - throw new ExtendedAttributesException(e); - } - } + return storm_xattrs.xattr_is_set(fileName, attributeName); + } catch (RuntimeException e) { + throw new ExtendedAttributesException(e); + } + } } diff --git a/src/main/java/it/grid/storm/ea/MetricsEAAdapter.java b/src/main/java/it/grid/storm/ea/MetricsEAAdapter.java index d913765d..bc201e58 100644 --- a/src/main/java/it/grid/storm/ea/MetricsEAAdapter.java +++ b/src/main/java/it/grid/storm/ea/MetricsEAAdapter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; @@ -40,8 +39,7 @@ public String getXAttr(String fileName, String attributeName) { } } - public void setXAttr(String filename, String attributeName, - String attributeValue) { + public void setXAttr(String filename, String attributeName, String attributeValue) { final Timer.Context context = eaTimer.time(); try { @@ -60,5 +58,4 @@ public void rmXAttr(String filename, String attributeName) { context.stop(); } } - } diff --git a/src/main/java/it/grid/storm/ea/StormEA.java b/src/main/java/it/grid/storm/ea/StormEA.java index 72736ccd..4513ede9 100644 --- a/src/main/java/it/grid/storm/ea/StormEA.java +++ b/src/main/java/it/grid/storm/ea/StormEA.java @@ -1,21 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea; +import com.google.common.collect.Maps; +import it.grid.storm.checksum.ChecksumAlgorithm; import java.text.Format; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Maps; - -import it.grid.storm.checksum.ChecksumAlgorithm; - public class StormEA { private static final Logger log = LoggerFactory.getLogger(StormEA.class); @@ -75,7 +71,6 @@ public static String getChecksum(String fileName, ChecksumAlgorithm algorithm) { } return null; - } public static boolean getMigrated(String fileName) { @@ -96,7 +91,6 @@ public static long getPinned(String fileName) { String pinString = ea.getXAttr(fileName, EA_PINNED); return Long.decode(pinString); - } public static String getTSMRecT(String fileName) { @@ -140,7 +134,6 @@ public static void removePinned(String fileName) { } catch (ExtendedAttributesException eae) { log.warn("Cannot remove pinned attribute from file: {}", fileName, eae); } - } public static void setChecksum(String fileName, String checksum, ChecksumAlgorithm algorithm) { @@ -156,7 +149,7 @@ public static void setChecksum(String fileName, String checksum, ChecksumAlgorit /** * Set the Extended Attribute "pinned" ({@value StormEA#EA_PINNED}) to the given value. - * + * * @param fileName * @param expirationDateInSEC expiration time of the pin expressed as "seconds since the epoch". */ @@ -170,7 +163,8 @@ public static void setPinned(String fileName, long expirationDateInSEC) { log.debug( "The file '{}' is already Pinned and the pre-existing PinLifeTime is greater " + "than the new one. Nothing is changed in EA. Expiration: {}", - fileName, formatter.format(new Date(existingPinValueInSEC * 1000))); + fileName, + formatter.format(new Date(existingPinValueInSEC * 1000))); return; } @@ -181,11 +175,17 @@ public static void setPinned(String fileName, long expirationDateInSEC) { if (log.isDebugEnabled()) { if (existingPinValueInSEC == -1) { - log.debug("Added the Pinned EA to '" + fileName + "' with expiration: " - + formatter.format(new Date(existingPinValueInSEC * 1000))); + log.debug( + "Added the Pinned EA to '" + + fileName + + "' with expiration: " + + formatter.format(new Date(existingPinValueInSEC * 1000))); } else { - log.debug("Updated the Pinned EA to '" + fileName + "' with expiration: " - + formatter.format(new Date(existingPinValueInSEC * 1000))); + log.debug( + "Updated the Pinned EA to '" + + fileName + + "' with expiration: " + + formatter.format(new Date(existingPinValueInSEC * 1000))); } } diff --git a/src/main/java/it/grid/storm/ea/remote/Constants.java b/src/main/java/it/grid/storm/ea/remote/Constants.java index 0d3b1293..86caefbe 100644 --- a/src/main/java/it/grid/storm/ea/remote/Constants.java +++ b/src/main/java/it/grid/storm/ea/remote/Constants.java @@ -1,22 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea.remote; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class Constants { - public static final String ENCODING_SCHEME = "UTF-8"; + public static final String ENCODING_SCHEME = "UTF-8"; - public static final String RESOURCE = "checksum"; + public static final String RESOURCE = "checksum"; - public static final String ADLER_32 = "Adler32"; + public static final String ADLER_32 = "Adler32"; - public static final String CHECKSUM_VALUE_KEY = "value"; + public static final String CHECKSUM_VALUE_KEY = "value"; - public static final String VERSION = "1.0"; + public static final String VERSION = "1.0"; } diff --git a/src/main/java/it/grid/storm/ea/remote/resource/RequestParameters.java b/src/main/java/it/grid/storm/ea/remote/resource/RequestParameters.java index 1a1bb551..57ab5fad 100644 --- a/src/main/java/it/grid/storm/ea/remote/resource/RequestParameters.java +++ b/src/main/java/it/grid/storm/ea/remote/resource/RequestParameters.java @@ -1,132 +1,128 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea.remote.resource; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import it.grid.storm.authz.remote.Constants; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; - import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.authz.remote.Constants; - class RequestParameters { - private static final Logger log = LoggerFactory - .getLogger(RequestParameters.class); - - private final String filePathDecoded; - - private String checksumDecoded; - - RequestParameters(Builder builder) { - - filePathDecoded = decodeAndCheckFilePath(builder.filePath); - checksumDecoded = decodeAndCheckChecksum(builder.checksum); - } - - private static String decodeAndCheckFilePath(String filePath) - throws WebApplicationException, IllegalArgumentException { - - if (filePath == null) { - throw new IllegalArgumentException( - "Unable to decode file path. Invalid parameters: filePath=" + filePath); - } - String filePathDecoded; - try { - filePathDecoded = URLDecoder.decode(filePath, Constants.ENCODING_SCHEME); - } catch (UnsupportedEncodingException e) { - log.error(e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode filePath paramether, unsupported encoding \'" - + Constants.ENCODING_SCHEME + "\'") - .build()); - } - - log.debug("Decoded filePath = {}", filePathDecoded); - - if (filePathDecoded == null || filePathDecoded.trim().equals("")) { - log - .error("Unable to evaluate permissions. Some parameters are missing : filePath {}", - filePathDecoded); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to manage checksum. Some parameters are missing") - .build()); - } - return filePathDecoded; - } - - private static String decodeAndCheckChecksum(String checksum) - throws WebApplicationException { - - if (checksum == null) - return null; - String checksumDecoded; - try { - checksumDecoded = URLDecoder.decode(checksum, Constants.ENCODING_SCHEME); - } catch (UnsupportedEncodingException e) { - log.error(e.getMessage(), e); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode checksum paramether, unsupported encoding \'" - + Constants.ENCODING_SCHEME + "\'") - .build()); - } - log.debug("Decoded checksum = " + checksumDecoded); - - if (checksum == null || checksum.trim().equals("")) { - log - .error("Unable to evaluate permissions. Some parameters are missing : checksum {}" - , checksumDecoded); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to manage checksum. Some parameters are missing") - .build()); - } - return checksumDecoded; - } - - /** - * @return the filePathDecoded - */ - public String getFilePathDecoded() { - - return filePathDecoded; - } - - /** - * @return the filePathDecoded - */ - public String getChecksumDecoded() { - - return checksumDecoded; - } - - public static class Builder { - - private final String filePath; - private String checksum; - - public Builder(String filePath) throws WebApplicationException { - - this.filePath = filePath; - } - - public Builder checksum(String checksum) { - - this.checksum = checksum; - return this; - } - - public RequestParameters build() { - - return new RequestParameters(this); - } - - } + private static final Logger log = LoggerFactory.getLogger(RequestParameters.class); + + private final String filePathDecoded; + + private String checksumDecoded; + + RequestParameters(Builder builder) { + + filePathDecoded = decodeAndCheckFilePath(builder.filePath); + checksumDecoded = decodeAndCheckChecksum(builder.checksum); + } + + private static String decodeAndCheckFilePath(String filePath) + throws WebApplicationException, IllegalArgumentException { + + if (filePath == null) { + throw new IllegalArgumentException( + "Unable to decode file path. Invalid parameters: filePath=" + filePath); + } + String filePathDecoded; + try { + filePathDecoded = URLDecoder.decode(filePath, Constants.ENCODING_SCHEME); + } catch (UnsupportedEncodingException e) { + log.error(e.getMessage(), e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode filePath paramether, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); + } + + log.debug("Decoded filePath = {}", filePathDecoded); + + if (filePathDecoded == null || filePathDecoded.trim().equals("")) { + log.error( + "Unable to evaluate permissions. Some parameters are missing : filePath {}", + filePathDecoded); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to manage checksum. Some parameters are missing") + .build()); + } + return filePathDecoded; + } + + private static String decodeAndCheckChecksum(String checksum) throws WebApplicationException { + + if (checksum == null) return null; + String checksumDecoded; + try { + checksumDecoded = URLDecoder.decode(checksum, Constants.ENCODING_SCHEME); + } catch (UnsupportedEncodingException e) { + log.error(e.getMessage(), e); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode checksum paramether, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); + } + log.debug("Decoded checksum = " + checksumDecoded); + + if (checksum == null || checksum.trim().equals("")) { + log.error( + "Unable to evaluate permissions. Some parameters are missing : checksum {}", + checksumDecoded); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity("Unable to manage checksum. Some parameters are missing") + .build()); + } + return checksumDecoded; + } + + /** @return the filePathDecoded */ + public String getFilePathDecoded() { + + return filePathDecoded; + } + + /** @return the filePathDecoded */ + public String getChecksumDecoded() { + + return checksumDecoded; + } + + public static class Builder { + + private final String filePath; + private String checksum; + + public Builder(String filePath) throws WebApplicationException { + + this.filePath = filePath; + } + + public Builder checksum(String checksum) { + + this.checksum = checksum; + return this; + } + + public RequestParameters build() { + + return new RequestParameters(this); + } + } } diff --git a/src/main/java/it/grid/storm/ea/remote/resource/StormEAResource.java b/src/main/java/it/grid/storm/ea/remote/resource/StormEAResource.java index 7e22399d..d87be29b 100644 --- a/src/main/java/it/grid/storm/ea/remote/resource/StormEAResource.java +++ b/src/main/java/it/grid/storm/ea/remote/resource/StormEAResource.java @@ -1,12 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.ea.remote.resource; import static it.grid.storm.checksum.ChecksumAlgorithm.ADLER32; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; +import it.grid.storm.ea.ExtendedAttributesException; +import it.grid.storm.ea.StormEA; +import it.grid.storm.ea.remote.Constants; import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.Path; @@ -15,85 +17,64 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/* - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010. - * Licensed under the Apache License, Version 2.0 (the "License"); you may not - * use this file except in compliance with the License. You may obtain a copy of - * the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by - * applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS - * OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -/** -* -*/ - -import it.grid.storm.ea.ExtendedAttributesException; -import it.grid.storm.ea.StormEA; -import it.grid.storm.ea.remote.Constants; - -/** - * @author Michele Dibenedetto - */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION + "/{filePath}") public class StormEAResource { - private static final Logger log = LoggerFactory - .getLogger(StormEAResource.class); + private static final Logger log = LoggerFactory.getLogger(StormEAResource.class); - @GET - @Path("/" + Constants.ADLER_32) - @Produces("text/plain") - public String getAdler32Checksum(@PathParam("filePath") String filePath) - throws WebApplicationException { + @GET + @Path("/" + Constants.ADLER_32) + @Produces("text/plain") + public String getAdler32Checksum(@PathParam("filePath") String filePath) + throws WebApplicationException { - RequestParameters parameters = new RequestParameters.Builder(filePath) - .build(); - log.info("Getting {} checksum for file {}", - Constants.ADLER_32, - parameters.getFilePathDecoded()); + RequestParameters parameters = new RequestParameters.Builder(filePath).build(); + log.info( + "Getting {} checksum for file {}", Constants.ADLER_32, parameters.getFilePathDecoded()); - String checksum; - try { - checksum = StormEA.getChecksum(parameters.getFilePathDecoded(), ADLER32); - } catch (ExtendedAttributesException e) { - log.error(e.getMessage(), e); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to get the checksum, Extended attributes management failure") - .build()); - } - log.info("Checksum for file {} is {}", filePath, checksum); - return checksum; - } + String checksum; + try { + checksum = StormEA.getChecksum(parameters.getFilePathDecoded(), ADLER32); + } catch (ExtendedAttributesException e) { + log.error(e.getMessage(), e); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to get the checksum, Extended attributes management failure") + .build()); + } + log.info("Checksum for file {} is {}", filePath, checksum); + return checksum; + } - @PUT - @Path("/" + Constants.ADLER_32) - @Produces("text/plain") - public void setAdler32Checksum(@PathParam("filePath") String filePath, - @QueryParam(Constants.CHECKSUM_VALUE_KEY) String checksum) - throws WebApplicationException { + @PUT + @Path("/" + Constants.ADLER_32) + @Produces("text/plain") + public void setAdler32Checksum( + @PathParam("filePath") String filePath, + @QueryParam(Constants.CHECKSUM_VALUE_KEY) String checksum) + throws WebApplicationException { - RequestParameters parameters = new RequestParameters.Builder(filePath) - .checksum(checksum).build(); + RequestParameters parameters = + new RequestParameters.Builder(filePath).checksum(checksum).build(); - log.info("Setting {} checksum for file {} with value {}", - Constants.ADLER_32, - parameters.getFilePathDecoded(), - parameters.getChecksumDecoded()); + log.info( + "Setting {} checksum for file {} with value {}", + Constants.ADLER_32, + parameters.getFilePathDecoded(), + parameters.getChecksumDecoded()); - try { - StormEA.setChecksum(parameters.getFilePathDecoded(), parameters.getChecksumDecoded(), ADLER32); - } catch (ExtendedAttributesException e) { - log.error(e.getMessage(), e); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to set the checksum, Extended attributes management failure") - .build()); - } - } -} \ No newline at end of file + try { + StormEA.setChecksum( + parameters.getFilePathDecoded(), parameters.getChecksumDecoded(), ADLER32); + } catch (ExtendedAttributesException e) { + log.error(e.getMessage(), e); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to set the checksum, Extended attributes management failure") + .build()); + } + } +} diff --git a/src/main/java/it/grid/storm/filesystem/AclLockPool.java b/src/main/java/it/grid/storm/filesystem/AclLockPool.java index bee313c1..aa57d840 100644 --- a/src/main/java/it/grid/storm/filesystem/AclLockPool.java +++ b/src/main/java/it/grid/storm/filesystem/AclLockPool.java @@ -1,37 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file AclLockPool.java * @author Riccardo Murri - * - * The it.grid.storm.filesystem.AclLockPool class + *

The it.grid.storm.filesystem.AclLockPool class */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ package it.grid.storm.filesystem; -import it.grid.storm.filesystem.AclLockPoolElement; - import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** - * Maps path names to lock objects. Expect each map value to maintain a usage - * count; on {@link #remove(String)}, the usage count is checked, and the entry - * is effectively removed only if the usage count has dropped at -or below- - * zero. - * - *

This class' purpose is to provide a shared storage for lock objects used - * by the {@link it.grid.storm.filesystem.File} class. - * + * Maps path names to lock objects. Expect each map value to maintain a usage count; on {@link + * #remove(String)}, the usage count is checked, and the entry is effectively removed only if the + * usage count has dropped at -or below- zero. + * + *

This class' purpose is to provide a shared storage for lock objects used by the {@link + * it.grid.storm.filesystem.File} class. + * * @author Riccardo Murri @version $Revision: 1.6 $ */ class AclLockPool { @@ -39,40 +34,41 @@ class AclLockPool { // ---- constructors ---- // /** - * Creates a new, empty pool with the specified initial capacity, load factor, - * and concurrencyLevel. - * + * Creates a new, empty pool with the specified initial capacity, load factor, and + * concurrencyLevel. + * * @see java.util.concurrent.ConcurrentHashMap; */ - AclLockPool(final int initialCapacity, final float loadFactor, - final int concurrencyLevel) { + AclLockPool(final int initialCapacity, final float loadFactor, final int concurrencyLevel) { - assert (initialCapacity >= 0) : "Negative initialCapacity passed to AclLockPool(int,float,int) constructor"; - assert (loadFactor >= 0) : "Negative loadFactor passed to AclLockPool(int,float,int) constructor"; - assert (concurrencyLevel >= 0) : "Negative concurrencyLevel passed to AclLockPool(int,float,int) constructor"; + assert (initialCapacity >= 0) + : "Negative initialCapacity passed to AclLockPool(int,float,int) constructor"; + assert (loadFactor >= 0) + : "Negative loadFactor passed to AclLockPool(int,float,int) constructor"; + assert (concurrencyLevel >= 0) + : "Negative concurrencyLevel passed to AclLockPool(int,float,int) constructor"; - __map = new ConcurrentHashMap(initialCapacity, loadFactor, - concurrencyLevel); + __map = new ConcurrentHashMap(initialCapacity, loadFactor, concurrencyLevel); } /** - * Creates a new, empty pool with the specified initial capacity, and the - * default load factor and concurrencyLevel (from {@link - * java.util.concurrent.ConcurrentHashMap}) - * + * Creates a new, empty pool with the specified initial capacity, and the default load factor and + * concurrencyLevel (from {@link java.util.concurrent.ConcurrentHashMap}) + * * @see java.util.concurrent.ConcurrentHashMap; */ AclLockPool(final int initialCapacity) { - assert (initialCapacity >= 0) : "Negative initialCapacity passed to AclLockPool(int,float) constructor"; + assert (initialCapacity >= 0) + : "Negative initialCapacity passed to AclLockPool(int,float) constructor"; __map = new ConcurrentHashMap(initialCapacity); } /** - * Creates a new, empty pool with the default initial capacity, load factor - * and concurrencyLevel (from {@link java.util.concurrent.ConcurrentHashMap}) - * + * Creates a new, empty pool with the default initial capacity, load factor and concurrencyLevel + * (from {@link java.util.concurrent.ConcurrentHashMap}) + * * @see java.util.concurrent.ConcurrentHashMap; */ AclLockPool() { @@ -83,41 +79,35 @@ class AclLockPool { // --- public methods --- // /** - * Return the lock object associated with the given path name; if the map - * contains no lock for the given pathname, a new one is created and returned. - * The usage counter for the associated element is incremented, so {@link - * get()} invocations should match exactly {@link remove(String)} invocations. - * + * Return the lock object associated with the given path name; if the map contains no lock for the + * given pathname, a new one is created and returned. The usage counter for the associated element + * is incremented, so {@link get()} invocations should match exactly {@link remove(String)} + * invocations. */ - synchronized public AclLockPoolElement get(final String pathname) { + public synchronized AclLockPoolElement get(final String pathname) { - if (!__map.containsKey(pathname)) - __map.put(pathname, new AclLockPoolElement()); + if (!__map.containsKey(pathname)) __map.put(pathname, new AclLockPoolElement()); AclLockPoolElement lock = __map.get(pathname); lock.incrementUsageCount(); return lock; } /** - * Remove the element associated with the given path name. The usage counter - * associated with the given pathname is decremented; if it drops at zero, the - * associated element is effectively removed from the map. + * Remove the element associated with the given path name. The usage counter associated with the + * given pathname is decremented; if it drops at zero, the associated element is effectively + * removed from the map. */ - synchronized public void remove(final String pathname) { + public synchronized void remove(final String pathname) { AclLockPoolElement e = __map.get(pathname); if (null != e) { int count = e.decrementUsageCountAndGetIt(); - if (0 >= count) - __map.remove(pathname); + if (0 >= count) __map.remove(pathname); } } - /** - * Return true if an element is assciated with the given path - * name. - */ - synchronized public boolean contains(final String pathname) { + /** Return true if an element is assciated with the given path name. */ + public synchronized boolean contains(final String pathname) { return __map.containsKey(pathname); } diff --git a/src/main/java/it/grid/storm/filesystem/AclLockPoolElement.java b/src/main/java/it/grid/storm/filesystem/AclLockPoolElement.java index d6701991..5ae245c2 100644 --- a/src/main/java/it/grid/storm/filesystem/AclLockPoolElement.java +++ b/src/main/java/it/grid/storm/filesystem/AclLockPoolElement.java @@ -1,81 +1,76 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file AclLockPoolElement.java * @author Riccardo Murri - * - * The it.grid.storm.filesystem.AclLockPoolElement class + *

The it.grid.storm.filesystem.AclLockPoolElement class */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ package it.grid.storm.filesystem; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicInteger; /** * Usage-counted semaphore object. - * - *

- * Each {@link #incrementUsageCountAndReturnSelf()} request increments the usage - * counter, and each {@link #decrementUsageCountAndGetIt()} request decrements - * it. - * + * + *

Each {@link #incrementUsageCountAndReturnSelf()} request increments the usage counter, and + * each {@link #decrementUsageCountAndGetIt()} request decrements it. + * * @author Riccardo Murri * @version $Revision: 1.5 $ */ class AclLockPoolElement extends Semaphore { - // ---- constructors ---- + // ---- constructors ---- - /** - * Default constructor. The semaphore is initialized for allowing only 1 - * permit at a time (thus serializing accesses through the acquire() and - * release() calls), and with the default fairness setting. The usage count is - * initialized to 0. - * - * @see java.util.concurrent.AtomicInteger; - * @see java.util.concurrent.Semaphore; - */ - public AclLockPoolElement() { + /** + * Default constructor. The semaphore is initialized for allowing only 1 permit at a time (thus + * serializing accesses through the acquire() and release() calls), and with the default fairness + * setting. The usage count is initialized to 0. + * + * @see java.util.concurrent.AtomicInteger; + * @see java.util.concurrent.Semaphore; + */ + public AclLockPoolElement() { - super(1); - usageCount = new AtomicInteger(); - } + super(1); + usageCount = new AtomicInteger(); + } - // --- public methods --- + // --- public methods --- - /** - * Return the lock object associated with the given file name, or create a new - * one if no mapping for the given path name is already in this map. - */ - public void incrementUsageCount() { + /** + * Return the lock object associated with the given file name, or create a new one if no mapping + * for the given path name is already in this map. + */ + public void incrementUsageCount() { - usageCount.incrementAndGet(); - } + usageCount.incrementAndGet(); + } - /** Return the stored usage count. */ - public int getUsageCount() { + /** Return the stored usage count. */ + public int getUsageCount() { - return usageCount.intValue(); - } + return usageCount.intValue(); + } - /** Decrement the stored usage count. */ - public int decrementUsageCountAndGetIt() { + /** Decrement the stored usage count. */ + public int decrementUsageCountAndGetIt() { - return usageCount.decrementAndGet(); - } + return usageCount.decrementAndGet(); + } - // --- private instance variables --- // + // --- private instance variables --- // - /** Usage counter. */ - private final AtomicInteger usageCount; + /** Usage counter. */ + private final AtomicInteger usageCount; } diff --git a/src/main/java/it/grid/storm/filesystem/CannotGiveAway.java b/src/main/java/it/grid/storm/filesystem/CannotGiveAway.java index 432d55eb..096cb172 100644 --- a/src/main/java/it/grid/storm/filesystem/CannotGiveAway.java +++ b/src/main/java/it/grid/storm/filesystem/CannotGiveAway.java @@ -1,40 +1,36 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file CannotGiveAway.java * @author Riccardo Murri - * - * Source file for class CannotGiveAway - * + *

Source file for class CannotGiveAway */ /* * Copyright (c) 2006, Riccardo Murri for the * EGRID/INFN joint project StoRM. - * + * * You may copy, distribute and modify this file under the terms of the * LICENSE.txt file at the root of the StoRM backend source tree. - * + * * $Id: CannotGiveAway.java,v 1.1 2006/03/31 13:35:01 rmurri Exp $ */ package it.grid.storm.filesystem; /** - * Thrown when the StoRM process has insufficient privileges to change ownership - * of a file. - * - * Ownership change is a privileged operation on most POSIX systems, which - * usually requires "root" privileges. - * + * Thrown when the StoRM process has insufficient privileges to change ownership of a file. + * + *

Ownership change is a privileged operation on most POSIX systems, which usually requires + * "root" privileges. + * * @author Riccardo Murri * @version $Revision: 1.1 $ */ public class CannotGiveAway extends FilesystemError { - public CannotGiveAway(final String msg) { + public CannotGiveAway(final String msg) { - super(msg); - } + super(msg); + } } diff --git a/src/main/java/it/grid/storm/filesystem/FSException.java b/src/main/java/it/grid/storm/filesystem/FSException.java index df91a7c2..7bf87fad 100644 --- a/src/main/java/it/grid/storm/filesystem/FSException.java +++ b/src/main/java/it/grid/storm/filesystem/FSException.java @@ -1,29 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; public class FSException extends Exception { - public FSException() { + public FSException() { - super(); - } + super(); + } - public FSException(String message) { + public FSException(String message) { - super(message); - } + super(message); + } - public FSException(String message, Throwable cause) { + public FSException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public FSException(Throwable cause) { - - super(cause); - } + public FSException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemChecker.java b/src/main/java/it/grid/storm/filesystem/FileSystemChecker.java index 24b60786..ea4ce517 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemChecker.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemChecker.java @@ -1,23 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import java.io.File; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public interface FileSystemChecker { - /** - * @param file - * @return - * @throws IllegalArgumentException - * if File is null - */ - public boolean isGPFS(File file) throws IllegalArgumentException, - FileSystemCheckerException; + /** + * @param file + * @return + * @throws IllegalArgumentException if File is null + */ + public boolean isGPFS(File file) throws IllegalArgumentException, FileSystemCheckerException; } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerException.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerException.java index 7a25675f..3958da29 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerException.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerException.java @@ -1,18 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerException extends Exception { - public FileSystemCheckerException(String message) { - - super(message); + public FileSystemCheckerException(String message) { - } + super(message); + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFactory.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFactory.java index e32ba8be..bba2558d 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFactory.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFactory.java @@ -1,123 +1,117 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerFactory { - private static Logger log = LoggerFactory - .getLogger(FileSystemCheckerFactory.class); - - public enum FileSystemCheckerType { - Mtab, Mounts - } + private static Logger log = LoggerFactory.getLogger(FileSystemCheckerFactory.class); - private final FileSystemCheckerType chosenType; - private static final FileSystemCheckerType defaultType = FileSystemCheckerType.Mtab; + public enum FileSystemCheckerType { + Mtab, + Mounts + } - private static FileSystemCheckerFactory instance = null; + private final FileSystemCheckerType chosenType; + private static final FileSystemCheckerType defaultType = FileSystemCheckerType.Mtab; - /** - * @param type - */ - private FileSystemCheckerFactory(FileSystemCheckerType type) { + private static FileSystemCheckerFactory instance = null; - chosenType = type; - } + /** @param type */ + private FileSystemCheckerFactory(FileSystemCheckerType type) { - /** - * Singleton getter method - * - * @return the class instance already created by a call to the init method, - * creates a new one using the defaultType otherwise - */ - public static FileSystemCheckerFactory getInstance() { + chosenType = type; + } - if (instance == null) { - log - .info("FileSystemCheckerFactory not explicitaly initialized, " - + "using default checker type :{}", defaultType); - init(defaultType); - } - return instance; - } + /** + * Singleton getter method + * + * @return the class instance already created by a call to the init method, creates a new one + * using the defaultType otherwise + */ + public static FileSystemCheckerFactory getInstance() { - /** - * Initializes the class by creating the singleton instance if not already - * done, does nothing if it already exists and has the chosenType is the same - * as the provided FileSystemCheckerType throws an IllegalStateException - * otherwise - * - * @param type - * @throws IllegalArgumentException - * if type is null - * @throws IllegalStateException - * if class already initialized with a different - * FileSystemCheckerType - */ - public static void init(FileSystemCheckerType type) - throws IllegalArgumentException, IllegalStateException { + if (instance == null) { + log.info( + "FileSystemCheckerFactory not explicitaly initialized, " + + "using default checker type :{}", + defaultType); + init(defaultType); + } + return instance; + } - if (type == null) { - log - .error("Unable to init FileSystemCheckerFactory. Received null FileSystemCheckerType parameter!"); - throw new IllegalArgumentException( - "Received null FileSystemCheckerType parameter!"); - } - if (instance == null) { - instance = new FileSystemCheckerFactory(type); - } else { - if (!instance.chosenType.equals(type)) { - log - .warn("FileSystemCheckerFactory already initialized for {}. " - + "Cannot initialize it again for {}.", - instance.chosenType, - type); - throw new IllegalStateException( - "Asked to initialize the already initialized FileSystemCheckerFactory " - + "with FileSystemCheckerType " + type - + ". Current FileSystemCheckerType is " + instance.chosenType); - } else { - log - .info("Asked to re-initialize the already initialized FileSystemCheckerFactory, nothing to do"); - } - } - } + /** + * Initializes the class by creating the singleton instance if not already done, does nothing if + * it already exists and has the chosenType is the same as the provided FileSystemCheckerType + * throws an IllegalStateException otherwise + * + * @param type + * @throws IllegalArgumentException if type is null + * @throws IllegalStateException if class already initialized with a different + * FileSystemCheckerType + */ + public static void init(FileSystemCheckerType type) + throws IllegalArgumentException, IllegalStateException { - /** - * Creates the proper FileSystemChecker implementation object using the - * chosenType available attribute - * - * @return - */ - public FileSystemChecker createFileSystemChecker() - throws IllegalStateException, FileSystemCheckerException { + if (type == null) { + log.error( + "Unable to init FileSystemCheckerFactory. Received null FileSystemCheckerType parameter!"); + throw new IllegalArgumentException("Received null FileSystemCheckerType parameter!"); + } + if (instance == null) { + instance = new FileSystemCheckerFactory(type); + } else { + if (!instance.chosenType.equals(type)) { + log.warn( + "FileSystemCheckerFactory already initialized for {}. " + + "Cannot initialize it again for {}.", + instance.chosenType, + type); + throw new IllegalStateException( + "Asked to initialize the already initialized FileSystemCheckerFactory " + + "with FileSystemCheckerType " + + type + + ". Current FileSystemCheckerType is " + + instance.chosenType); + } else { + log.info( + "Asked to re-initialize the already initialized FileSystemCheckerFactory, nothing to do"); + } + } + } - switch (this.chosenType) { - case Mtab: - return FileSystemCheckerMtabMonolithic.getInstance(); - case Mounts: - return FileSystemCheckerMountsMonolithic.getInstance(); - default: - log - .error("No correct FileSystemChecker setted : " - + this.chosenType - + " unable to create the FileSystemChecker. Available FileSystemCheckerType : " - + FileSystemCheckerFactory.FileSystemCheckerType.values() - + " Please contact StoRM developers"); - throw new IllegalStateException( - "No correct FileSystemCheckerType setted : " + this.chosenType - + ". Available FileSystemCheckerType : " - + FileSystemCheckerFactory.FileSystemCheckerType.values() - + " Please contact StoRM developers"); - } - } + /** + * Creates the proper FileSystemChecker implementation object using the chosenType available + * attribute + * + * @return + */ + public FileSystemChecker createFileSystemChecker() + throws IllegalStateException, FileSystemCheckerException { + switch (this.chosenType) { + case Mtab: + return FileSystemCheckerMtabMonolithic.getInstance(); + case Mounts: + return FileSystemCheckerMountsMonolithic.getInstance(); + default: + log.error( + "No correct FileSystemChecker setted : " + + this.chosenType + + " unable to create the FileSystemChecker. Available FileSystemCheckerType : " + + FileSystemCheckerFactory.FileSystemCheckerType.values() + + " Please contact StoRM developers"); + throw new IllegalStateException( + "No correct FileSystemCheckerType setted : " + + this.chosenType + + ". Available FileSystemCheckerType : " + + FileSystemCheckerFactory.FileSystemCheckerType.values() + + " Please contact StoRM developers"); + } + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFromFile.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFromFile.java index 52d420d2..1910fac2 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFromFile.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerFromFile.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -14,169 +13,156 @@ import java.util.List; import org.slf4j.Logger; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ abstract class FileSystemCheckerFromFile implements FileSystemChecker { - private final Logger log; - private List GPFSMountPoints = null; - private long initInstant = 0L; - private static final String GPFS_FILESYSTEM_NAME = "gpfs"; - - protected FileSystemCheckerFromFile(Logger log) { - this.log = log; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) - */ - public boolean isGPFS(File file) throws IllegalArgumentException, - FileSystemCheckerException { - - if (file == null) { - log.error("IsGPFS method invoked with null File parameter!"); - throw new IllegalArgumentException("Provided null File argument"); - } - tryInit(); - return this.evaluate(file.getAbsolutePath()); - } - - protected synchronized void tryInit() throws FileSystemCheckerException { - - if (this.refreshNeeded()) { - this.init(); - } - } - - /** - * Checks is /etc/mtab file has been modified since last initialization - * - * @return true if a call of init() method is needed - */ - private boolean refreshNeeded() { - - boolean response = false; - if (initInstant == 0L - || initInstant < new File(getFilePath()).lastModified()) { - response = true; - } - return response; - } - - /** - * Initializes the object setting /etc/mtab parsing instant and the list of - * GPFS mount points - */ - private void init() throws FileSystemCheckerException { - - this.initInstant = Calendar.getInstance().getTimeInMillis(); - this.GPFSMountPoints = listGPFSMountPoints(); - } - - /** - * Checks if file path filePath belongs to one of the stored GPFS mount points - * - * @param filePath - * the file path to be checked - * - * @return true if file path filePath is on a GPFS mount points - */ - private synchronized boolean evaluate(String filePath) { - - boolean response = false; - for (String GPFSMountPoint : this.GPFSMountPoints) { - if (filePath.startsWith(GPFSMountPoint)) { - response = true; - break; - } - } - return response; - } - - /** - * Parse /etc/mtab file and retrieves all GPFS mount points - * - * @return a list of GPFS mount points - */ - private List listGPFSMountPoints() throws FileSystemCheckerException { - - LinkedList mountPointList = new LinkedList(); - BufferedReader mtab; - try { - mtab = new BufferedReader(new FileReader(getFilePath())); - } catch (FileNotFoundException e) { - log.error(e.getMessage(),e); - - throw new FileSystemCheckerException( - "Error while trying to create a reader for mtab file at " - + getFilePath() + ". FileNotFoundException : " + e.getMessage()); - } - String line; - try { - while ((line = mtab.readLine()) != null) { - if (this.skipLine(line)) { - continue; - } - LinkedList elementsList = tokenizeLine(line); - if (elementsList.get(getFsNameIndex()).equals(GPFS_FILESYSTEM_NAME)) { - mountPointList.add(elementsList.get(getMountPointIndex())); - } - } - } catch (IOException e) { - log.error(e.getMessage(), e); - throw new FileSystemCheckerException( - "Error while trying to read mtab file at " + getFilePath() - + ". IOException : " + e.getMessage()); - } - return mountPointList; - } - - /** - * Provides the path of file containing GPFS mount points - * - * @return the path of a file containing GPFS mount points - */ - protected abstract String getFilePath(); - - /** - * Provides the index of file system name in the list provided by method - * tokenizeLine - * - * @return the index of file system name in a tokenized list - */ - protected abstract int getFsNameIndex(); - - /** - * Provides the index of file mount point in the list provided by method - * tokenizeLine - * - * @return the index of file mount point in a tokenized list - */ - protected abstract int getMountPointIndex(); - - /** - * Tokenizes a line putting in a list all the strings from the line related to - * mounted partitions - * - * @param line - * a line from mounted partitions file containing informations about - * mounted partition - * @return a list of strings containing space-free informations about mounted - * partitions - */ - protected abstract LinkedList tokenizeLine(String line); - - /** - * Checks if the provided line has to be skipped because contains information - * not concerning to mounted partition - * - * @param a - * string line from mounted partitions file - * @return true if the line has to be skipped, true otherwise - */ - protected abstract boolean skipLine(String line); -} \ No newline at end of file + private final Logger log; + private List GPFSMountPoints = null; + private long initInstant = 0L; + private static final String GPFS_FILESYSTEM_NAME = "gpfs"; + + protected FileSystemCheckerFromFile(Logger log) { + this.log = log; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) + */ + public boolean isGPFS(File file) throws IllegalArgumentException, FileSystemCheckerException { + + if (file == null) { + log.error("IsGPFS method invoked with null File parameter!"); + throw new IllegalArgumentException("Provided null File argument"); + } + tryInit(); + return this.evaluate(file.getAbsolutePath()); + } + + protected synchronized void tryInit() throws FileSystemCheckerException { + + if (this.refreshNeeded()) { + this.init(); + } + } + + /** + * Checks is /etc/mtab file has been modified since last initialization + * + * @return true if a call of init() method is needed + */ + private boolean refreshNeeded() { + + boolean response = false; + if (initInstant == 0L || initInstant < new File(getFilePath()).lastModified()) { + response = true; + } + return response; + } + + /** Initializes the object setting /etc/mtab parsing instant and the list of GPFS mount points */ + private void init() throws FileSystemCheckerException { + + this.initInstant = Calendar.getInstance().getTimeInMillis(); + this.GPFSMountPoints = listGPFSMountPoints(); + } + + /** + * Checks if file path filePath belongs to one of the stored GPFS mount points + * + * @param filePath the file path to be checked + * @return true if file path filePath is on a GPFS mount points + */ + private synchronized boolean evaluate(String filePath) { + + boolean response = false; + for (String GPFSMountPoint : this.GPFSMountPoints) { + if (filePath.startsWith(GPFSMountPoint)) { + response = true; + break; + } + } + return response; + } + + /** + * Parse /etc/mtab file and retrieves all GPFS mount points + * + * @return a list of GPFS mount points + */ + private List listGPFSMountPoints() throws FileSystemCheckerException { + + LinkedList mountPointList = new LinkedList(); + BufferedReader mtab; + try { + mtab = new BufferedReader(new FileReader(getFilePath())); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + + throw new FileSystemCheckerException( + "Error while trying to create a reader for mtab file at " + + getFilePath() + + ". FileNotFoundException : " + + e.getMessage()); + } + String line; + try { + while ((line = mtab.readLine()) != null) { + if (this.skipLine(line)) { + continue; + } + LinkedList elementsList = tokenizeLine(line); + if (elementsList.get(getFsNameIndex()).equals(GPFS_FILESYSTEM_NAME)) { + mountPointList.add(elementsList.get(getMountPointIndex())); + } + } + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new FileSystemCheckerException( + "Error while trying to read mtab file at " + + getFilePath() + + ". IOException : " + + e.getMessage()); + } + return mountPointList; + } + + /** + * Provides the path of file containing GPFS mount points + * + * @return the path of a file containing GPFS mount points + */ + protected abstract String getFilePath(); + + /** + * Provides the index of file system name in the list provided by method tokenizeLine + * + * @return the index of file system name in a tokenized list + */ + protected abstract int getFsNameIndex(); + + /** + * Provides the index of file mount point in the list provided by method tokenizeLine + * + * @return the index of file mount point in a tokenized list + */ + protected abstract int getMountPointIndex(); + + /** + * Tokenizes a line putting in a list all the strings from the line related to mounted partitions + * + * @param line a line from mounted partitions file containing informations about mounted partition + * @return a list of strings containing space-free informations about mounted partitions + */ + protected abstract LinkedList tokenizeLine(String line); + + /** + * Checks if the provided line has to be skipped because contains information not concerning to + * mounted partition + * + * @param a string line from mounted partitions file + * @return true if the line has to be skipped, true otherwise + */ + protected abstract boolean skipLine(String line); +} diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMounts.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMounts.java index b850f1f2..d15f6bc6 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMounts.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMounts.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -9,103 +8,94 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerMounts extends FileSystemCheckerFromFile { - private static final Logger log = LoggerFactory - .getLogger(FileSystemCheckerMounts.class); - private static final String MOUNTS_FILE_PATH = "/proc/mounts"; - - private static final FileSystemCheckerMounts instance = new FileSystemCheckerMounts(); - - /** - * Singleton private constructor - */ - private FileSystemCheckerMounts() { - - super(log); - } - - /** - * Singleton instance getter. initialize the instance if needed - * - * @return singleton instance - */ - public static FileSystemCheckerMounts getInstance() - throws FileSystemCheckerException { - - instance.tryInit(); - return instance; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFilePath() - */ - @Override - protected String getFilePath() { - - return MOUNTS_FILE_PATH; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFsNameIndex() - */ - @Override - protected int getFsNameIndex() { - - return 2; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#getMountPointIndex() - */ - @Override - protected int getMountPointIndex() { - - return 1; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#skipLine(java.lang.String - * ) - */ - @Override - protected boolean skipLine(String line) { - - if (line.startsWith("#") || !line.startsWith("/dev/")) { - return true; - } - return false; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#tokenizeLine(java.lang - * .String) - */ - @Override - protected LinkedList tokenizeLine(String line) { - - String[] elementsArray = line.split(" "); - LinkedList elementsList = new LinkedList( - Arrays.asList(elementsArray)); - while (elementsList.remove("")) { - } - return elementsList; - } + private static final Logger log = LoggerFactory.getLogger(FileSystemCheckerMounts.class); + private static final String MOUNTS_FILE_PATH = "/proc/mounts"; + + private static final FileSystemCheckerMounts instance = new FileSystemCheckerMounts(); + + /** Singleton private constructor */ + private FileSystemCheckerMounts() { + + super(log); + } + + /** + * Singleton instance getter. initialize the instance if needed + * + * @return singleton instance + */ + public static FileSystemCheckerMounts getInstance() throws FileSystemCheckerException { + + instance.tryInit(); + return instance; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFilePath() + */ + @Override + protected String getFilePath() { + + return MOUNTS_FILE_PATH; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFsNameIndex() + */ + @Override + protected int getFsNameIndex() { + + return 2; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#getMountPointIndex() + */ + @Override + protected int getMountPointIndex() { + + return 1; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#skipLine(java.lang.String + * ) + */ + @Override + protected boolean skipLine(String line) { + + if (line.startsWith("#") || !line.startsWith("/dev/")) { + return true; + } + return false; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#tokenizeLine(java.lang + * .String) + */ + @Override + protected LinkedList tokenizeLine(String line) { + + String[] elementsArray = line.split(" "); + LinkedList elementsList = new LinkedList(Arrays.asList(elementsArray)); + while (elementsList.remove("")) {} + return elementsList; + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMountsMonolithic.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMountsMonolithic.java index 7b8f5718..c49d60d6 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMountsMonolithic.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMountsMonolithic.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -16,163 +15,151 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerMountsMonolithic implements FileSystemChecker { - private static final Logger log = LoggerFactory - .getLogger(FileSystemCheckerMountsMonolithic.class); - - private static final String GPFS_FILESYSTEM_NAME = "gpfs"; - private static final String MOUNTS_FILE_PATH = "/proc/mounts"; - - private static FileSystemCheckerMountsMonolithic instance = new FileSystemCheckerMountsMonolithic(); - private List GPFSMountPoints = null; - private long initInstant = 0L; - - /** - * Singleton private constructor - */ - private FileSystemCheckerMountsMonolithic() { - - super(); - } - - /** - * Singleton instance getter. initialize the instance if needed - * - * @return singleton instance - */ - public static FileSystemCheckerMountsMonolithic getInstance() - throws FileSystemCheckerException { - - synchronized (instance) { - if (instance.refreshNeeded()) { - instance.init(); - } - } - return instance; - } - - /** - * Initializes the object setting /etc/mtab parsing instant and the list of - * GPFS mount points - */ - private synchronized void init() throws FileSystemCheckerException { - - this.initInstant = Calendar.getInstance().getTimeInMillis(); - this.GPFSMountPoints = listGPFSMountPoints(); - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) - */ - public boolean isGPFS(File file) throws IllegalArgumentException, - FileSystemCheckerException { - - if (file == null) { - log.error("IsGPFS method invoked with null File parameter!"); - throw new IllegalArgumentException("Provided null File argument"); - } - synchronized (instance) { - if (refreshNeeded()) { - this.init(); - } - } - return this.evaluate(file.getAbsolutePath()); - } - - /** - * Checks is /etc/mtab file has been modified since last initialization - * - * @return true if a call of init() method is needed - */ - private synchronized boolean refreshNeeded() { - - boolean response = false; - if (initInstant == 0L - || initInstant < new File(MOUNTS_FILE_PATH).lastModified()) { - response = true; - } - return response; - } - - /** - * Checks if file path filePath belongs to one of the stored GPFS mount points - * - * @param filePath - * the file path to be checked - * - * @return true if file path filePath is on a GPFS mount points - */ - private boolean evaluate(String filePath) { - - boolean response = false; - for (String GPFSMountPoint : this.GPFSMountPoints) { - if (filePath.startsWith(GPFSMountPoint)) { - response = true; - break; - } - } - return response; - } - - /** - * Parse /etc/mtab file and retrieves all GPFS mount points - * - * @return a list of GPFS mount points - */ - private static List listGPFSMountPoints() - throws FileSystemCheckerException { - - LinkedList mountPointList = new LinkedList(); - BufferedReader mtab; - try { - mtab = new BufferedReader(new FileReader(MOUNTS_FILE_PATH)); - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - throw new FileSystemCheckerException( - "Error while trying to create a reader for mtab file at " - + MOUNTS_FILE_PATH + ". FileNotFoundException : " + e.getMessage()); - } - String line; - try { - while ((line = mtab.readLine()) != null) { - if (line.startsWith("#") || !line.startsWith("/dev/")) { - continue; - } - LinkedList elementsList = tokenizeLine(line); - if (elementsList.get(2).equals(GPFS_FILESYSTEM_NAME)) { - mountPointList.add(elementsList.get(1)); - } - } - } catch (IOException e) { - log.error(e.getMessage(), e); - throw new FileSystemCheckerException( - "Error while trying to read mtab file at " + MOUNTS_FILE_PATH - + ". IOException : " + e.getMessage()); - } - return mountPointList; - } - - /** - * Transform the received string in a list of non spaced strings - * - * @param line - * a string - * @return a list of strings without spaces - */ - private static LinkedList tokenizeLine(String line) { - - String[] elementsArray = line.split(" "); - LinkedList elementsList = new LinkedList( - Arrays.asList(elementsArray)); - while (elementsList.remove("")) { - } - return elementsList; - } + private static final Logger log = + LoggerFactory.getLogger(FileSystemCheckerMountsMonolithic.class); + + private static final String GPFS_FILESYSTEM_NAME = "gpfs"; + private static final String MOUNTS_FILE_PATH = "/proc/mounts"; + + private static FileSystemCheckerMountsMonolithic instance = + new FileSystemCheckerMountsMonolithic(); + private List GPFSMountPoints = null; + private long initInstant = 0L; + + /** Singleton private constructor */ + private FileSystemCheckerMountsMonolithic() { + + super(); + } + + /** + * Singleton instance getter. initialize the instance if needed + * + * @return singleton instance + */ + public static FileSystemCheckerMountsMonolithic getInstance() throws FileSystemCheckerException { + + synchronized (instance) { + if (instance.refreshNeeded()) { + instance.init(); + } + } + return instance; + } + + /** Initializes the object setting /etc/mtab parsing instant and the list of GPFS mount points */ + private synchronized void init() throws FileSystemCheckerException { + + this.initInstant = Calendar.getInstance().getTimeInMillis(); + this.GPFSMountPoints = listGPFSMountPoints(); + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) + */ + public boolean isGPFS(File file) throws IllegalArgumentException, FileSystemCheckerException { + + if (file == null) { + log.error("IsGPFS method invoked with null File parameter!"); + throw new IllegalArgumentException("Provided null File argument"); + } + synchronized (instance) { + if (refreshNeeded()) { + this.init(); + } + } + return this.evaluate(file.getAbsolutePath()); + } + + /** + * Checks is /etc/mtab file has been modified since last initialization + * + * @return true if a call of init() method is needed + */ + private synchronized boolean refreshNeeded() { + + boolean response = false; + if (initInstant == 0L || initInstant < new File(MOUNTS_FILE_PATH).lastModified()) { + response = true; + } + return response; + } + + /** + * Checks if file path filePath belongs to one of the stored GPFS mount points + * + * @param filePath the file path to be checked + * @return true if file path filePath is on a GPFS mount points + */ + private boolean evaluate(String filePath) { + + boolean response = false; + for (String GPFSMountPoint : this.GPFSMountPoints) { + if (filePath.startsWith(GPFSMountPoint)) { + response = true; + break; + } + } + return response; + } + + /** + * Parse /etc/mtab file and retrieves all GPFS mount points + * + * @return a list of GPFS mount points + */ + private static List listGPFSMountPoints() throws FileSystemCheckerException { + + LinkedList mountPointList = new LinkedList(); + BufferedReader mtab; + try { + mtab = new BufferedReader(new FileReader(MOUNTS_FILE_PATH)); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + throw new FileSystemCheckerException( + "Error while trying to create a reader for mtab file at " + + MOUNTS_FILE_PATH + + ". FileNotFoundException : " + + e.getMessage()); + } + String line; + try { + while ((line = mtab.readLine()) != null) { + if (line.startsWith("#") || !line.startsWith("/dev/")) { + continue; + } + LinkedList elementsList = tokenizeLine(line); + if (elementsList.get(2).equals(GPFS_FILESYSTEM_NAME)) { + mountPointList.add(elementsList.get(1)); + } + } + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new FileSystemCheckerException( + "Error while trying to read mtab file at " + + MOUNTS_FILE_PATH + + ". IOException : " + + e.getMessage()); + } + return mountPointList; + } + + /** + * Transform the received string in a list of non spaced strings + * + * @param line a string + * @return a list of strings without spaces + */ + private static LinkedList tokenizeLine(String line) { + + String[] elementsArray = line.split(" "); + LinkedList elementsList = new LinkedList(Arrays.asList(elementsArray)); + while (elementsList.remove("")) {} + return elementsList; + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtab.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtab.java index 0de65d57..8ff15d28 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtab.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtab.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -8,94 +7,87 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerMtab extends FileSystemCheckerFromFile { - private static final Logger log = LoggerFactory - .getLogger(FileSystemCheckerMtab.class); - - private static FileSystemCheckerMtab instance = new FileSystemCheckerMtab(); - - /** - * Singleton private constructor - */ - private FileSystemCheckerMtab() { - - super(log); - } - - /** - * Singleton instance getter. initialize the instance if needed - * - * @return singleton instance - */ - public static FileSystemCheckerMtab getInstance() - throws FileSystemCheckerException { - - instance.tryInit(); - return instance; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFilePath() - */ - @Override - protected String getFilePath() { - - return MtabUtil.getFilePath(); - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFsNameIndex() - */ - @Override - protected int getFsNameIndex() { - - return MtabUtil.getFsNameIndex(); - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#getMountPointIndex() - */ - @Override - protected int getMountPointIndex() { - - return MtabUtil.getMountPointIndex(); - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#skipLine(java.lang.String - * ) - */ - @Override - protected boolean skipLine(String line) { - - return MtabUtil.skipLineForMountPoints(line); - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.filesystem.FileSystemCheckerFromFile#tokenizeLine(java.lang - * .String) - */ - @Override - protected LinkedList tokenizeLine(String line) { - - return MtabUtil.tokenizeLine(line); - } + private static final Logger log = LoggerFactory.getLogger(FileSystemCheckerMtab.class); + + private static FileSystemCheckerMtab instance = new FileSystemCheckerMtab(); + + /** Singleton private constructor */ + private FileSystemCheckerMtab() { + + super(log); + } + + /** + * Singleton instance getter. initialize the instance if needed + * + * @return singleton instance + */ + public static FileSystemCheckerMtab getInstance() throws FileSystemCheckerException { + + instance.tryInit(); + return instance; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFilePath() + */ + @Override + protected String getFilePath() { + + return MtabUtil.getFilePath(); + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemCheckerFromFile#getFsNameIndex() + */ + @Override + protected int getFsNameIndex() { + + return MtabUtil.getFsNameIndex(); + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#getMountPointIndex() + */ + @Override + protected int getMountPointIndex() { + + return MtabUtil.getMountPointIndex(); + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#skipLine(java.lang.String + * ) + */ + @Override + protected boolean skipLine(String line) { + + return MtabUtil.skipLineForMountPoints(line); + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.filesystem.FileSystemCheckerFromFile#tokenizeLine(java.lang + * .String) + */ + @Override + protected LinkedList tokenizeLine(String line) { + + return MtabUtil.tokenizeLine(line); + } } diff --git a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtabMonolithic.java b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtabMonolithic.java index f26195de..30a4b6de 100644 --- a/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtabMonolithic.java +++ b/src/main/java/it/grid/storm/filesystem/FileSystemCheckerMtabMonolithic.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -16,163 +15,149 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileSystemCheckerMtabMonolithic implements FileSystemChecker { - private static final Logger log = LoggerFactory - .getLogger(FileSystemCheckerMtabMonolithic.class); - - private static final String GPFS_FILESYSTEM_NAME = "gpfs"; - private static final String MTAB_FILE_PATH = "/etc/mtab"; - - private static FileSystemCheckerMtabMonolithic instance = new FileSystemCheckerMtabMonolithic(); - private List GPFSMountPoints = null; - private long initInstant = 0L; - - /** - * Singleton private constructor - */ - private FileSystemCheckerMtabMonolithic() { - - super(); - } - - /** - * Singleton instance getter. initialize the instance if needed - * - * @return singleton instance - */ - public static FileSystemCheckerMtabMonolithic getInstance() - throws FileSystemCheckerException { - - synchronized (instance) { - if (instance.refreshNeeded()) { - instance.init(); - } - } - return instance; - } - - /** - * Initializes the object setting /etc/mtab parsing instant and the list of - * GPFS mount points - */ - private synchronized void init() throws FileSystemCheckerException { - - this.initInstant = Calendar.getInstance().getTimeInMillis(); - this.GPFSMountPoints = listGPFSMountPoints(); - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) - */ - public boolean isGPFS(File file) throws IllegalArgumentException, - FileSystemCheckerException { - - if (file == null) { - log.error("IsGPFS method invoked with null File parameter!"); - throw new IllegalArgumentException("Provided null File argument"); - } - synchronized (instance) { - if (refreshNeeded()) { - this.init(); - } - } - return this.evaluate(file.getAbsolutePath()); - } - - /** - * Checks is /etc/mtab file has been modified since last initialization - * - * @return true if a call of init() method is needed - */ - private synchronized boolean refreshNeeded() { - - boolean response = false; - if (initInstant == 0L - || initInstant < new File(MTAB_FILE_PATH).lastModified()) { - response = true; - } - return response; - } - - /** - * Checks if file path filePath belongs to one of the stored GPFS mount points - * - * @param filePath - * the file path to be checked - * - * @return true if file path filePath is on a GPFS mount points - */ - private boolean evaluate(String filePath) { - - boolean response = false; - for (String GPFSMountPoint : this.GPFSMountPoints) { - if (filePath.startsWith(GPFSMountPoint)) { - response = true; - break; - } - } - return response; - } - - /** - * Parse /etc/mtab file and retrieves all GPFS mount points - * - * @return a list of GPFS mount points - */ - private static List listGPFSMountPoints() - throws FileSystemCheckerException { - - LinkedList mountPointList = new LinkedList(); - BufferedReader mtab; - try { - mtab = new BufferedReader(new FileReader(MTAB_FILE_PATH)); - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - throw new FileSystemCheckerException( - "Error while trying to create a reader for mtab file at " - + MTAB_FILE_PATH + ". FileNotFoundException : " + e.getMessage()); - } - String line; - try { - while ((line = mtab.readLine()) != null) { - if (line.startsWith("#") || !line.startsWith("/dev/")) { - continue; - } - LinkedList elementsList = tokenizeLine(line); - if (elementsList.get(2).equals(GPFS_FILESYSTEM_NAME)) { - mountPointList.add(elementsList.get(1)); - } - } - } catch (IOException e) { - log.error(e.getMessage(), e); - throw new FileSystemCheckerException( - "Error while trying to read mtab file at " + MTAB_FILE_PATH - + ". IOException : " + e.getMessage()); - } - return mountPointList; - } - - /** - * Transform the received string in a list of non spaced strings - * - * @param line - * a string - * @return a list of strings without spaces - */ - private static LinkedList tokenizeLine(String line) { - - String[] elementsArray = line.split(" "); - LinkedList elementsList = new LinkedList( - Arrays.asList(elementsArray)); - while (elementsList.remove("")) { - } - return elementsList; - } + private static final Logger log = LoggerFactory.getLogger(FileSystemCheckerMtabMonolithic.class); + + private static final String GPFS_FILESYSTEM_NAME = "gpfs"; + private static final String MTAB_FILE_PATH = "/etc/mtab"; + + private static FileSystemCheckerMtabMonolithic instance = new FileSystemCheckerMtabMonolithic(); + private List GPFSMountPoints = null; + private long initInstant = 0L; + + /** Singleton private constructor */ + private FileSystemCheckerMtabMonolithic() { + + super(); + } + + /** + * Singleton instance getter. initialize the instance if needed + * + * @return singleton instance + */ + public static FileSystemCheckerMtabMonolithic getInstance() throws FileSystemCheckerException { + + synchronized (instance) { + if (instance.refreshNeeded()) { + instance.init(); + } + } + return instance; + } + + /** Initializes the object setting /etc/mtab parsing instant and the list of GPFS mount points */ + private synchronized void init() throws FileSystemCheckerException { + + this.initInstant = Calendar.getInstance().getTimeInMillis(); + this.GPFSMountPoints = listGPFSMountPoints(); + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.filesystem.FileSystemChecker#isGPFS(java.io.File) + */ + public boolean isGPFS(File file) throws IllegalArgumentException, FileSystemCheckerException { + + if (file == null) { + log.error("IsGPFS method invoked with null File parameter!"); + throw new IllegalArgumentException("Provided null File argument"); + } + synchronized (instance) { + if (refreshNeeded()) { + this.init(); + } + } + return this.evaluate(file.getAbsolutePath()); + } + + /** + * Checks is /etc/mtab file has been modified since last initialization + * + * @return true if a call of init() method is needed + */ + private synchronized boolean refreshNeeded() { + + boolean response = false; + if (initInstant == 0L || initInstant < new File(MTAB_FILE_PATH).lastModified()) { + response = true; + } + return response; + } + + /** + * Checks if file path filePath belongs to one of the stored GPFS mount points + * + * @param filePath the file path to be checked + * @return true if file path filePath is on a GPFS mount points + */ + private boolean evaluate(String filePath) { + + boolean response = false; + for (String GPFSMountPoint : this.GPFSMountPoints) { + if (filePath.startsWith(GPFSMountPoint)) { + response = true; + break; + } + } + return response; + } + + /** + * Parse /etc/mtab file and retrieves all GPFS mount points + * + * @return a list of GPFS mount points + */ + private static List listGPFSMountPoints() throws FileSystemCheckerException { + + LinkedList mountPointList = new LinkedList(); + BufferedReader mtab; + try { + mtab = new BufferedReader(new FileReader(MTAB_FILE_PATH)); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + throw new FileSystemCheckerException( + "Error while trying to create a reader for mtab file at " + + MTAB_FILE_PATH + + ". FileNotFoundException : " + + e.getMessage()); + } + String line; + try { + while ((line = mtab.readLine()) != null) { + if (line.startsWith("#") || !line.startsWith("/dev/")) { + continue; + } + LinkedList elementsList = tokenizeLine(line); + if (elementsList.get(2).equals(GPFS_FILESYSTEM_NAME)) { + mountPointList.add(elementsList.get(1)); + } + } + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new FileSystemCheckerException( + "Error while trying to read mtab file at " + + MTAB_FILE_PATH + + ". IOException : " + + e.getMessage()); + } + return mountPointList; + } + + /** + * Transform the received string in a list of non spaced strings + * + * @param line a string + * @return a list of strings without spaces + */ + private static LinkedList tokenizeLine(String line) { + + String[] elementsArray = line.split(" "); + LinkedList elementsList = new LinkedList(Arrays.asList(elementsArray)); + while (elementsList.remove("")) {} + return elementsList; + } } diff --git a/src/main/java/it/grid/storm/filesystem/Filesystem.java b/src/main/java/it/grid/storm/filesystem/Filesystem.java index 918f0053..e5015845 100644 --- a/src/main/java/it/grid/storm/filesystem/Filesystem.java +++ b/src/main/java/it/grid/storm/filesystem/Filesystem.java @@ -1,75 +1,67 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file Filesystem.java * @author Riccardo Murri - * - * The it.grid.storm.filesystem.Filesystem class + *

The it.grid.storm.filesystem.Filesystem class */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ package it.grid.storm.filesystem; -import java.io.File; -import java.util.concurrent.Semaphore; - import it.grid.storm.filesystem.swig.fs_acl; import it.grid.storm.filesystem.swig.genericfs; import it.grid.storm.griduser.LocalUser; +import java.io.File; +import java.util.concurrent.Semaphore; /** * Façade and base class for filesystem manipulation. - * - * This class implements methods for manipulation of filesystem entries. You - * should not use this class directly in StoRM code from outside this package, - * rather manipulate the filesystem in StoRM through the {@link File} and {@link - * Space} interfaces: this class is just a thin wrapper around low-level - * filesystem calls. - * - * This class should be instanciated by giving a fs::genericfs subclass instance - * that is shall be used for all filesystem operations. Clearly, only the - * configuration mechanism knows which filesystem-type has been configured on a - * certain path, so this class should only be instanciated at - * stratup/configuration time. - * - * Not only, can an instance of this class be shared among different objects - * operating on the same filesystem (or portion of it), but it actually @em - * should, because of shared ACL locking (see "Implementation Notes"). - * - * - * Implementation notes - * - * This class tries to consistently use: - * - * - String for path names. - * - * - long for sizes; sizes are always expressed in bytes. - * - * - {@link FilesystemPermission} for expressing permissions that a user has on - * a file; - * - * - {@link it.grid.storm.griduser.LocalUser} for representing user credentials. - * - * This class serializes files access in the ACL manipulation methods, because - * the POSIX ACL manipulation API only allows for getting or setting the - * whole list of permissions; no changes of individual permissions can - * be performed. Therefore, no two threads can concurrently manipulate ACLs on - * the same file, or they may overwrite each other's changes. - * - * A per-filename lock is maintained, and no two threads can concurrently modify - * ACL on the same pathname. Modification by other programs (that is, outside - * StoRM) cannot be prevented, so we still have a race condition. - * + * + *

This class implements methods for manipulation of filesystem entries. You should not use this + * class directly in StoRM code from outside this package, rather manipulate the filesystem in StoRM + * through the {@link File} and {@link Space} interfaces: this class is just a thin wrapper around + * low-level filesystem calls. + * + *

This class should be instanciated by giving a fs::genericfs subclass instance that is shall be + * used for all filesystem operations. Clearly, only the configuration mechanism knows which + * filesystem-type has been configured on a certain path, so this class should only be instanciated + * at stratup/configuration time. + * + *

Not only, can an instance of this class be shared among different objects operating on the + * same filesystem (or portion of it), but it actually @em should, because of shared ACL locking + * (see "Implementation Notes"). + * + *

Implementation notes + * + *

This class tries to consistently use: + * + *

- String for path names. + * + *

- long for sizes; sizes are always expressed in bytes. + * + *

- {@link FilesystemPermission} for expressing permissions that a user has on a file; + * + *

- {@link it.grid.storm.griduser.LocalUser} for representing user credentials. + * + *

This class serializes files access in the ACL manipulation methods, because the POSIX ACL + * manipulation API only allows for getting or setting the whole list of permissions; no + * changes of individual permissions can be performed. Therefore, no two threads can concurrently + * manipulate ACLs on the same file, or they may overwrite each other's changes. + * + *

A per-filename lock is maintained, and no two threads can concurrently modify ACL on the same + * pathname. Modification by other programs (that is, outside StoRM) cannot be prevented, so we + * still have a race condition. + * * @author Riccardo Murri @version $Revision: 1.20 $ - **/ + */ public class Filesystem implements FilesystemIF { // --- private instance variables --- @@ -83,16 +75,20 @@ public class Filesystem implements FilesystemIF { private GetGroupPermissionMethod getGroupPermissionMethod = new GetGroupPermissionMethod(); private GetUserPermissionMethod getUserPermissionMethod = new GetUserPermissionMethod(); - private GetEffectiveGroupPermissionMethod getEffectiveGroupPermissionMethod = new GetEffectiveGroupPermissionMethod(); - private GetEffectiveUserPermissionMethod getEffectiveUserPermissionMethod = new GetEffectiveUserPermissionMethod(); + private GetEffectiveGroupPermissionMethod getEffectiveGroupPermissionMethod = + new GetEffectiveGroupPermissionMethod(); + private GetEffectiveUserPermissionMethod getEffectiveUserPermissionMethod = + new GetEffectiveUserPermissionMethod(); private GrantGroupPermissionMethod grantGroupPermissionMethod = new GrantGroupPermissionMethod(); private GrantUserPermissionMethod grantUserPermissionMethod = new GrantUserPermissionMethod(); - private RemoveGroupPermissionMethod removeGroupPermissionMethod = new RemoveGroupPermissionMethod(); + private RemoveGroupPermissionMethod removeGroupPermissionMethod = + new RemoveGroupPermissionMethod(); private RemoveUserPermissionMethod removeUserPermissionMethod = new RemoveUserPermissionMethod(); - private RevokeGroupPermissionMethod revokeGroupPermissionMethod = new RevokeGroupPermissionMethod(); + private RevokeGroupPermissionMethod revokeGroupPermissionMethod = + new RevokeGroupPermissionMethod(); private RevokeUserPermissionMethod revokeUserPermissionMethod = new RevokeUserPermissionMethod(); private SetGroupPermissionMethod setGroupPermissionMethod = new SetGroupPermissionMethod(); @@ -103,7 +99,8 @@ public class Filesystem implements FilesystemIF { /** Constructor, taking native low-level filesystem interface object. */ public Filesystem(final genericfs nativeFs) { - assert (null != nativeFs) : "Null nativeFs in Filesystem(NativeFilesystemInterface) constructor"; + assert (null != nativeFs) + : "Null nativeFs in Filesystem(NativeFilesystemInterface) constructor"; fs = nativeFs; @@ -121,26 +118,19 @@ public Filesystem(final genericfs nativeFs) { /** * @defgroup fs_fileops Low-level File and Directory Operations - * - * Methods here come in two variants: the "exact" one which tries to return - * up-to-date information, and the "standard" one, which uses common POSIX - * system calls to get the required info. - * - * The "exact" variant may force a cluster-wide metadata update, so it may - * result in performance degradation if used frerquently. - * - * Motivation for this comes from the GPFS filesystem: GPFS nodes perform file - * metadata and attribute caching, so they may return out-of-date results to - * the standard system calls. The GPFS API provides the corresponding "exact" - * calls. - * - * In the generic POSIX filesystem implementation, there's no difference - * between the "exact" and the "standard" call. @{ - **/ - - /** - * Get file size in bytes. Same as calling {@link java.io.File#length()}. + *

Methods here come in two variants: the "exact" one which tries to return up-to-date + * information, and the "standard" one, which uses common POSIX system calls to get the + * required info. + *

The "exact" variant may force a cluster-wide metadata update, so it may result in + * performance degradation if used frerquently. + *

Motivation for this comes from the GPFS filesystem: GPFS nodes perform file metadata and + * attribute caching, so they may return out-of-date results to the standard system calls. The + * GPFS API provides the corresponding "exact" calls. + *

In the generic POSIX filesystem implementation, there's no difference between the + * "exact" and the "standard" call. @{ */ + + /** Get file size in bytes. Same as calling {@link java.io.File#length()}. */ @Override public long getSize(final String file) { @@ -155,25 +145,20 @@ public long getSize(final String file) { public long getLastModifiedTime(final String fileOrDirectory) { /** - * Since the lastModificationTime can be retrieved by Java.io.File we prefer - * to don't use the native driver for get this information. - * - * @todo This should be done in the JAVA side of each driver, since this - * solution will not work in situation (as Amazon s3) for which a native JVM - * support does not exists. - * + * Since the lastModificationTime can be retrieved by Java.io.File we prefer to don't use the + * native driver for get this information. + * + * @todo This should be done in the JAVA side of each driver, since this solution will not work + * in situation (as Amazon s3) for which a native JVM support does not exists. */ - File fileOrDir = new File(fileOrDirectory); return fileOrDir.lastModified(); - } /** - * Get up-to-date file size in bytes. Returned value may differ from the size - * returned by {@link java.io.File#length()} on filesystems that do metadata - * caching (GPFS, for instance). Since it may force a metadata update on all - * cluster nodes, this method may be slow. + * Get up-to-date file size in bytes. Returned value may differ from the size returned by {@link + * java.io.File#length()} on filesystems that do metadata caching (GPFS, for instance). Since it + * may force a metadata update on all cluster nodes, this method may be slow. */ @Override public long getExactSize(final String file) { @@ -183,11 +168,10 @@ public long getExactSize(final String file) { } /** - * Get up-to-date file last modification time, as a UNIX epoch. Returned value - * may differ from the size returned by {@link java.io.File#lastModified()} on - * filesystems that do metadata caching (GPFS, for instance). Since it may - * force a metadata update on all cluster nodes, this method may be - * slow. + * Get up-to-date file last modification time, as a UNIX epoch. Returned value may differ from the + * size returned by {@link java.io.File#lastModified()} on filesystems that do metadata caching + * (GPFS, for instance). Since it may force a metadata update on all cluster nodes, this method + * may be slow. */ @Override public long getExactLastModifiedTime(final String fileOrDirectory) { @@ -197,7 +181,7 @@ public long getExactLastModifiedTime(final String fileOrDirectory) { /** * Truncate the specified file to the desired size - * + * * @param filename @param desired_size @return */ @Override @@ -226,18 +210,15 @@ public void changeFileGroupOwnership(String filename, String groupName) { /** * @defgroup fs_space Low-level Space Reservation Functions - * - * This interface is a draft! Due to the unsettled state of - * the SRM spec regarding to reserved space semantics, and the differences - * between SRM space reservation and GPFS preallocation, it's better not use - * this interface directly; rather, use wrapper objects (like {@link - * it.grid.storm.filesystem.Space}) for operations. @{ - **/ + *

This interface is a draft! Due to the unsettled state of the SRM spec + * regarding to reserved space semantics, and the differences between SRM space reservation + * and GPFS preallocation, it's better not use this interface directly; rather, use wrapper + * objects (like {@link it.grid.storm.filesystem.Space}) for operations. @{ + */ /** - * Return available space (in bytes) on filesystem. Please note that this - * value may be inaccurate on cluster/networked filesystems, due to metadata - * caching. + * Return available space (in bytes) on filesystem. Please note that this value may be inaccurate + * on cluster/networked filesystems, due to metadata caching. */ @Override public long getFreeSpace() { @@ -246,11 +227,12 @@ public long getFreeSpace() { } @Override - public boolean canAccess(final LocalUser u, final String fileOrDirectory, - final FilesystemPermission accessMode) { + public boolean canAccess( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission accessMode) { assert (null != u) : "Null LocalUser parameter passed to Filesystem.canAccess()"; - assert (null != fileOrDirectory) : "Null fileOrDirectory parameter passed to Filesystem.canAccess()"; + assert (null != fileOrDirectory) + : "Null fileOrDirectory parameter passed to Filesystem.canAccess()"; assert (null != accessMode) : "Null accessMode parameter passed to Filesystem.canAccess()"; fs_acl acl = fs.new_acl(); @@ -259,101 +241,89 @@ public boolean canAccess(final LocalUser u, final String fileOrDirectory, } @Override - public FilesystemPermission getEffectiveGroupPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission getEffectiveGroupPermission( + final LocalUser u, final String fileOrDirectory) { - return getPermissionTemplate(u, fileOrDirectory, - getEffectiveGroupPermissionMethod); + return getPermissionTemplate(u, fileOrDirectory, getEffectiveGroupPermissionMethod); } @Override - public FilesystemPermission getEffectiveUserPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission getEffectiveUserPermission( + final LocalUser u, final String fileOrDirectory) { - return getPermissionTemplate(u, fileOrDirectory, - getEffectiveUserPermissionMethod); + return getPermissionTemplate(u, fileOrDirectory, getEffectiveUserPermissionMethod); } @Override - public FilesystemPermission getGroupPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission getGroupPermission(final LocalUser u, final String fileOrDirectory) { return getPermissionTemplate(u, fileOrDirectory, getGroupPermissionMethod); } @Override - public FilesystemPermission getUserPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission getUserPermission(final LocalUser u, final String fileOrDirectory) { return getPermissionTemplate(u, fileOrDirectory, getUserPermissionMethod); } @Override - public FilesystemPermission grantGroupPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission grantGroupPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - grantGroupPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, grantGroupPermissionMethod); } @Override - public FilesystemPermission grantUserPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission grantUserPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - grantUserPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, grantUserPermissionMethod); } @Override - public FilesystemPermission removeGroupPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission removeGroupPermission( + final LocalUser u, final String fileOrDirectory) { - return removePermissionTemplate(u, fileOrDirectory, - removeGroupPermissionMethod); + return removePermissionTemplate(u, fileOrDirectory, removeGroupPermissionMethod); } @Override - public FilesystemPermission removeUserPermission(final LocalUser u, - final String fileOrDirectory) { + public FilesystemPermission removeUserPermission( + final LocalUser u, final String fileOrDirectory) { - return removePermissionTemplate(u, fileOrDirectory, - removeUserPermissionMethod); + return removePermissionTemplate(u, fileOrDirectory, removeUserPermissionMethod); } @Override - public FilesystemPermission revokeGroupPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission revokeGroupPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - revokeGroupPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, revokeGroupPermissionMethod); } @Override - public FilesystemPermission revokeUserPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission revokeUserPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - revokeUserPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, revokeUserPermissionMethod); } @Override - public FilesystemPermission setGroupPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission setGroupPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - setGroupPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, setGroupPermissionMethod); } @Override - public FilesystemPermission setUserPermission(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission permission) { + public FilesystemPermission setUserPermission( + final LocalUser u, final String fileOrDirectory, final FilesystemPermission permission) { - return setPermissionTemplate(u, fileOrDirectory, permission, - setUserPermissionMethod); + return setPermissionTemplate(u, fileOrDirectory, permission, setUserPermissionMethod); } private interface GetPermissionMethod { - + public FilesystemPermission get(final fs_acl a, final LocalUser u); } @@ -381,22 +351,19 @@ public FilesystemPermission get(final fs_acl a, final LocalUser u) { } } - private class GetEffectiveGroupPermissionMethod - implements GetPermissionMethod { + private class GetEffectiveGroupPermissionMethod implements GetPermissionMethod { public FilesystemPermission get(final fs_acl a, final LocalUser u) { if (a.has_group_perm(u.getPrimaryGid())) { - return new FilesystemPermission( - a.get_group_effective_perm(u.getPrimaryGid())); + return new FilesystemPermission(a.get_group_effective_perm(u.getPrimaryGid())); } else { return null; } } } - private class GetEffectiveUserPermissionMethod - implements GetPermissionMethod { + private class GetEffectiveUserPermissionMethod implements GetPermissionMethod { public FilesystemPermission get(final fs_acl a, final LocalUser u) { @@ -408,12 +375,14 @@ public FilesystemPermission get(final fs_acl a, final LocalUser u) { } } - private FilesystemPermission getPermissionTemplate(final LocalUser u, - final String fileOrDirectory, final GetPermissionMethod permissionMethod) { + private FilesystemPermission getPermissionTemplate( + final LocalUser u, final String fileOrDirectory, final GetPermissionMethod permissionMethod) { assert (null != u) : "Null LocalUser passed to Filesystem.getPermissionTemplate()"; - assert (null != fileOrDirectory) : "Null fileOrDirectory passed to Filesystem.getPermissionTemplate()"; - assert (null != permissionMethod) : "Null permissionMethod passed to Filesystem.getPermissionTemplate()"; + assert (null != fileOrDirectory) + : "Null fileOrDirectory passed to Filesystem.getPermissionTemplate()"; + assert (null != permissionMethod) + : "Null permissionMethod passed to Filesystem.getPermissionTemplate()"; fs_acl acl = fs.new_acl(); acl.load(fileOrDirectory, false); @@ -432,9 +401,10 @@ private class RemoveUserPermissionMethod implements RemovePermissionMethod { public FilesystemPermission remove(final fs_acl a, final LocalUser u) { - assert (a.has_user_perm( - u.getUid())) : "Filesystem: removing permission for user " + u.getUid() - + "that has no permission associated!"; + assert (a.has_user_perm(u.getUid())) + : "Filesystem: removing permission for user " + + u.getUid() + + "that has no permission associated!"; return new FilesystemPermission(a.remove_user_perm_not_owner(u.getUid())); } } @@ -443,21 +413,24 @@ private class RemoveGroupPermissionMethod implements RemovePermissionMethod { public FilesystemPermission remove(final fs_acl a, final LocalUser u) { - assert (a.has_user_perm( - u.getUid())) : "Filesystem: removing permission for group " + u.getUid() - + "that has no permission associated!"; - return new FilesystemPermission( - a.remove_group_perm_not_owner(u.getPrimaryGid())); + assert (a.has_user_perm(u.getUid())) + : "Filesystem: removing permission for group " + + u.getUid() + + "that has no permission associated!"; + return new FilesystemPermission(a.remove_group_perm_not_owner(u.getPrimaryGid())); } } - private FilesystemPermission removePermissionTemplate(final LocalUser u, - final String fileOrDirectory, - final RemovePermissionMethod permissionMethod) { + private FilesystemPermission removePermissionTemplate( + final LocalUser u, + final String fileOrDirectory, + final RemovePermissionMethod permissionMethod) { assert (null != u) : "Null LocalUser passed to Filesystem.removePermissionTemplate()"; - assert (null != fileOrDirectory) : "Null fileOrDirectory passed to Filesystem.removePermissionTemplate()"; - assert (null != permissionMethod) : "Null permissionMethod passed to Filesystem.removePermissionTemplate()"; + assert (null != fileOrDirectory) + : "Null fileOrDirectory passed to Filesystem.removePermissionTemplate()"; + assert (null != permissionMethod) + : "Null permissionMethod passed to Filesystem.removePermissionTemplate()"; FilesystemPermission oldPermission; fs_acl acl = fs.new_acl(); @@ -477,18 +450,17 @@ private FilesystemPermission removePermissionTemplate(final LocalUser u, private interface SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p); + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p); } private class SetUserPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_user_perm(u.getUid())) { - return new FilesystemPermission( - a.set_user_perm(u.getUid(), p.toFsAclPermission())); + return new FilesystemPermission(a.set_user_perm(u.getUid(), p.toFsAclPermission())); } else { a.set_user_perm(u.getUid(), p.toFsAclPermission()); return null; @@ -498,12 +470,11 @@ public FilesystemPermission apply(final fs_acl a, final LocalUser u, private class GrantUserPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_user_perm(u.getUid())) { - return new FilesystemPermission( - a.grant_user_perm(u.getUid(), p.toFsAclPermission())); + return new FilesystemPermission(a.grant_user_perm(u.getUid(), p.toFsAclPermission())); } else { a.grant_user_perm(u.getUid(), p.toFsAclPermission()); return null; @@ -513,12 +484,11 @@ public FilesystemPermission apply(final fs_acl a, final LocalUser u, private class RevokeUserPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_user_perm(u.getUid())) { - return new FilesystemPermission( - a.revoke_user_perm(u.getUid(), p.toFsAclPermission())); + return new FilesystemPermission(a.revoke_user_perm(u.getUid(), p.toFsAclPermission())); } else { return null; } @@ -527,12 +497,11 @@ public FilesystemPermission apply(final fs_acl a, final LocalUser u, private class SetGroupPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_group_perm(u.getPrimaryGid())) { - return new FilesystemPermission( - a.set_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); + return new FilesystemPermission(a.set_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); } else { a.set_group_perm(u.getPrimaryGid(), p.toFsAclPermission()); return null; @@ -542,12 +511,12 @@ public FilesystemPermission apply(final fs_acl a, final LocalUser u, private class GrantGroupPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_group_perm(u.getPrimaryGid())) { return new FilesystemPermission( - a.grant_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); + a.grant_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); } else { a.grant_group_perm(u.getPrimaryGid(), p.toFsAclPermission()); return null; @@ -557,26 +526,30 @@ public FilesystemPermission apply(final fs_acl a, final LocalUser u, private class RevokeGroupPermissionMethod implements SetPermissionMethod { - public FilesystemPermission apply(final fs_acl a, final LocalUser u, - final FilesystemPermission p) { + public FilesystemPermission apply( + final fs_acl a, final LocalUser u, final FilesystemPermission p) { if (a.has_group_perm(u.getPrimaryGid())) { return new FilesystemPermission( - a.revoke_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); + a.revoke_group_perm(u.getPrimaryGid(), p.toFsAclPermission())); } else { return null; } } } - private FilesystemPermission setPermissionTemplate(final LocalUser u, - final String fileOrDirectory, final FilesystemPermission p, - final SetPermissionMethod setPermissionMethod) { + private FilesystemPermission setPermissionTemplate( + final LocalUser u, + final String fileOrDirectory, + final FilesystemPermission p, + final SetPermissionMethod setPermissionMethod) { assert (null != u) : "Null LocalUser passed to Filesystem.setPermissionTemplate()"; - assert (null != fileOrDirectory) : "Null fileOrDirectory passed to Filesystem.setPermissionTemplate()"; + assert (null != fileOrDirectory) + : "Null fileOrDirectory passed to Filesystem.setPermissionTemplate()"; assert (null != p) : "Null FilesystemPermission passed to Filesystem.setPermissionTemplate()"; - assert (null != setPermissionMethod) : "Null permissionMethod passed to Filesystem.setPermissionTemplate()"; + assert (null != setPermissionMethod) + : "Null permissionMethod passed to Filesystem.setPermissionTemplate()"; FilesystemPermission oldPermission; fs_acl acl = fs.new_acl(); @@ -593,5 +566,4 @@ private FilesystemPermission setPermissionTemplate(final LocalUser u, } return oldPermission; } - } diff --git a/src/main/java/it/grid/storm/filesystem/FilesystemIF.java b/src/main/java/it/grid/storm/filesystem/FilesystemIF.java index 9776aa80..b59f1dac 100644 --- a/src/main/java/it/grid/storm/filesystem/FilesystemIF.java +++ b/src/main/java/it/grid/storm/filesystem/FilesystemIF.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -10,9 +9,7 @@ public interface FilesystemIF { long FS_BLOCK_SIZE = 512; - /** - * Get file size in bytes. Same as calling {@link java.io.File#length()}. - */ + /** Get file size in bytes. Same as calling {@link java.io.File#length()}. */ long getSize(String file); /** @@ -22,405 +19,321 @@ public interface FilesystemIF { long getLastModifiedTime(String fileOrDirectory); /** - * Get up-to-date file size in bytes. Returned value may differ from the size - * returned by {@link java.io.File#length()} on filesystems that do metadata - * caching (GPFS, for instance). Since it may force a metadata update on all - * cluster nodes, this method may be slow. + * Get up-to-date file size in bytes. Returned value may differ from the size returned by {@link + * java.io.File#length()} on filesystems that do metadata caching (GPFS, for instance). Since it + * may force a metadata update on all cluster nodes, this method may be slow. */ long getExactSize(String file); /** - * Get up-to-date file last modification time, as a UNIX epoch. Returned value - * may differ from the size returned by {@link java.io.File#lastModified()} on - * filesystems that do metadata caching (GPFS, for instance). Since it may - * force a metadata update on all cluster nodes, this method may be - * slow. + * Get up-to-date file last modification time, as a UNIX epoch. Returned value may differ from the + * size returned by {@link java.io.File#lastModified()} on filesystems that do metadata caching + * (GPFS, for instance). Since it may force a metadata update on all cluster nodes, this method + * may be slow. */ long getExactLastModifiedTime(String fileOrDirectory); /** * Truncate the specified file to the desired size - * + * * @param filename @param desired_size @return */ int truncateFile(String filename, long desired_size); /** * Returns true if file is on disk, false otherwise - * + * * @param filename @return */ boolean isFileOnDisk(String filename); /** * Returns a file block size - * + * * @param filename @return */ long getFileBlockSize(String filename); /** * Changes the group ownership for a file - * + * * @param filename @param groupName */ void changeFileGroupOwnership(String filename, String groupName); /** - * Return available space (in bytes) on filesystem. Please note that this - * value may be inaccurate on cluster/networked filesystems, due to metadata - * caching. + * Return available space (in bytes) on filesystem. Please note that this value may be inaccurate + * on cluster/networked filesystems, due to metadata caching. */ long getFreeSpace(); /** - * Return true if the local user u can operate on the - * specified fileOrDirectory in the mode given by accessMode, - * according to the permissions set on the filesystem. - * - * Suppose a local UNIX user (identified by UID and the list of primary and - * supplementary GIDs) requests access to a certain file or directory. - * Roughly, the access control algorithm is: - * - * - if the requestor's UID matches the UID in the @em owner entry, then - * the @em owner permissions are used; - * - * - else if the requestor's UID matches a UID in a specific user - * entry, then the bitwise-AND of that entry's permissions and the @em mask - * entry permissions are used; - * - * - else if any of the requestor's GIDs (primary or supplementary) matches - * the group owner entry, the bitwise-AND of that entry's permissions and - * the @em mask entry permissions are used; - * - * - else if any of the requestor's GIDs (primary or supplementary) matches - * the group owner entry, the bitwise-AND of that entry's permissions and - * the @em mask entry permissions are used; - * - * - else, (if no group entry was found to match) the @em other entry - * permissions are used. - * + * Return true if the local user u can operate on the specified + * fileOrDirectory in the mode given by accessMode, according to the permissions set + * on the filesystem. + * + *

Suppose a local UNIX user (identified by UID and the list of primary and supplementary GIDs) + * requests access to a certain file or directory. Roughly, the access control algorithm is: + * + *

- if the requestor's UID matches the UID in the @em owner entry, then the @em owner + * permissions are used; + * + *

- else if the requestor's UID matches a UID in a specific user entry, then the + * bitwise-AND of that entry's permissions and the @em mask entry permissions are used; + * + *

- else if any of the requestor's GIDs (primary or supplementary) matches the group owner + * entry, the bitwise-AND of that entry's permissions and the @em mask entry permissions are used; + * + *

- else if any of the requestor's GIDs (primary or supplementary) matches the group owner + * entry, the bitwise-AND of that entry's permissions and the @em mask entry permissions are used; + * + *

- else, (if no group entry was found to match) the @em other entry permissions are used. + * * @see fs_acl#access() @sa Linux man page acl(5) */ - boolean canAccess(LocalUser u, String fileOrDirectory, - FilesystemPermission accessMode); + boolean canAccess(LocalUser u, String fileOrDirectory, FilesystemPermission accessMode); /** - * Return the effective permission a group has on the given file or - * directory. - * - * Loads the ACL for the given file or directory, and return the permission - * associated with the primary group of the given {@link - * it.grid.storm.griduser.LocalUser} instance u. If no entry for that - * group is found, return null. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's permissions are to be retrieved. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named group permissions. - * + * Return the effective permission a group has on the given file or directory. + * + *

Loads the ACL for the given file or directory, and return the permission associated with the + * primary group of the given {@link it.grid.storm.griduser.LocalUser} instance u. If no + * entry for that group is found, return null. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * permissions are to be retrieved. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named group permissions. * @return permission associated to the primary GID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that group was found. + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. */ - - FilesystemPermission getEffectiveGroupPermission(LocalUser u, - String fileOrDirectory); + FilesystemPermission getEffectiveGroupPermission(LocalUser u, String fileOrDirectory); /** - * Return the effective permission a user has on the given file or - * directory. - * - * Loads the ACL for the given file or directory, and return the permission - * associated with the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u. If no entry for that - * user is found, return null. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose permissions are to be retrieved. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the given user permissions. - * - * @return permission associated to the UID of the given {@link LocalUser} - * instance u in the given file ACL, or null if no entry - * for that user was found. + * Return the effective permission a user has on the given file or directory. + * + *

Loads the ACL for the given file or directory, and return the permission associated with the + * UID of the given {@link it.grid.storm.griduser.LocalUser} instance u. If no entry for + * that user is found, return null. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose permissions + * are to be retrieved. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * given user permissions. + * @return permission associated to the UID of the given {@link LocalUser} instance u in + * the given file ACL, or null if no entry for that user was found. */ - - FilesystemPermission getEffectiveUserPermission(LocalUser u, - String fileOrDirectory); + FilesystemPermission getEffectiveUserPermission(LocalUser u, String fileOrDirectory); /** * Return the permission a group has on the given file or directory. - * - * Loads the ACL for the given file or directory, and return the permission - * associated with the primary group of the given {@link - * it.grid.storm.griduser.LocalUser} instance u. If no entry for that - * group is found, return null. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's permissions are to be retrieved. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named group permissions. - * + * + *

Loads the ACL for the given file or directory, and return the permission associated with the + * primary group of the given {@link it.grid.storm.griduser.LocalUser} instance u. If no + * entry for that group is found, return null. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * permissions are to be retrieved. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named group permissions. * @return permission associated to the primary GID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that group was found. + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. */ - FilesystemPermission getGroupPermission(LocalUser u, String fileOrDirectory); /** * Return the permission a user has on the given file or directory. - * - * Loads the ACL for the given file or directory, and return the permission - * associated with the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u. If no entry for that - * user is found, return null. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose permissions are to be retrieved. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the given user permissions. - * - * @return permission associated to the UID of the given {@link LocalUser} - * instance u in the given file ACL, or null if no entry - * for that user was found. + * + *

Loads the ACL for the given file or directory, and return the permission associated with the + * UID of the given {@link it.grid.storm.griduser.LocalUser} instance u. If no entry for + * that user is found, return null. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose permissions + * are to be retrieved. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * given user permissions. + * @return permission associated to the UID of the given {@link LocalUser} instance u in + * the given file ACL, or null if no entry for that user was found. */ - FilesystemPermission getUserPermission(LocalUser u, String fileOrDirectory); /** - * Grant specified permission to a group on a file or directory, and return - * the former permission. - * - *

Adds the specified permission to the ones that the primary group of the - * given {@link it.grid.storm.griduser.LocalUser} instance u already - * holds on the given file or directory: all permission bits that are set in - * permission will be set in the appropriate group entry in the file - * ACL. - * - *

If no entry is present for the specified group, then one is created and - * its permission value is set to permission. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the given group permissions - * + * Grant specified permission to a group on a file or directory, and return the former permission. + * + *

Adds the specified permission to the ones that the primary group of the given {@link + * it.grid.storm.griduser.LocalUser} instance u already holds on the given file or + * directory: all permission bits that are set in permission will be set in the appropriate + * group entry in the file ACL. + * + *

If no entry is present for the specified group, then one is created and its permission value + * is set to permission. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * entry is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * given group permissions * @param permission Capabilities to grant. - * - * @return permission formerly associated to the primary GID of the given - * {@link it.grid.storm.griduser.LocalUser} instance u in the given - * file ACL, or null if no entry for that group was found. - * + * @return permission formerly associated to the primary GID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. * @see fs_acl::grant_group_perm() */ - - FilesystemPermission grantGroupPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission); + FilesystemPermission grantGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); /** - * Grant specified permission to a user on a file or directory, and return the - * former permission. - * - *

Adds the specified permissions to the ones that the UID of the given - * {@link it.grid.storm.griduser.LocalUser} instance u already holds on - * the given file or directory: all permission bits that are set in - * permission will be set in the appropriate user entry in the file - * ACL. - * - *

If no entry is present for the specified user, then one is created and - * its permission value is set to permission. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose UID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the given user permissions - * + * Grant specified permission to a user on a file or directory, and return the former permission. + * + *

Adds the specified permissions to the ones that the UID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u already holds on the given file or + * directory: all permission bits that are set in permission will be set in the appropriate + * user entry in the file ACL. + * + *

If no entry is present for the specified user, then one is created and its permission value + * is set to permission. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose UID's entry + * is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * given user permissions * @param permission Capabilities to grant. - * * @return permission formerly associated to the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that user was found. - * + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that user was found. * @see fs_acl::grant_user_perm() */ - - FilesystemPermission grantUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission); + FilesystemPermission grantUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); /** - * Remove a group's entry from a file or directory ACL, and return the (now - * deleted) permission. - * - *

Removes the entry (if any) of the primary group of the given {@link - * it.grid.storm.griduser.LocalUser} instance u from the given file or - * directory ACL. Returns the permission formerly associated with that group. - * - * @todo would this be the correct behaviuor: if the given group is the file - * owning group, then its entry is set to {@link #NONE}, rather than removed. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the given group permissions - * - * @return permission formerly associated to the primary GID of the given - * {@link it.grid.storm.griduser.LocalUser} instance u in the given - * file ACL, or null if no entry for that group was found. - * + * Remove a group's entry from a file or directory ACL, and return the (now deleted) permission. + * + *

Removes the entry (if any) of the primary group of the given {@link + * it.grid.storm.griduser.LocalUser} instance u from the given file or directory ACL. + * Returns the permission formerly associated with that group. + * + * @todo would this be the correct behaviuor: if the given group is the file owning group, then + * its entry is set to {@link #NONE}, rather than removed. + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * entry is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * given group permissions + * @return permission formerly associated to the primary GID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. * @see fs_acl::remove_group_perm_not_owner() */ - - FilesystemPermission removeGroupPermission(LocalUser u, - String fileOrDirectory); + FilesystemPermission removeGroupPermission(LocalUser u, String fileOrDirectory); /** - * Remove a user's entry from a file or directory ACL, and return the (now - * deleted) permission. - * - *

Removes the entry (if any) associated with the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u from the given file or - * directory ACL. Returns the permission formerly associated with that user. - * - * @todo would this be the correct behaviour: if the given user is the file - * owner, then its entry is set to {@link #NONE}, rather than removed. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose UID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named user permissions - * + * Remove a user's entry from a file or directory ACL, and return the (now deleted) permission. + * + *

Removes the entry (if any) associated with the UID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u from the given file or directory ACL. + * Returns the permission formerly associated with that user. + * + * @todo would this be the correct behaviour: if the given user is the file owner, then its entry + * is set to {@link #NONE}, rather than removed. + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose UID's entry + * is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named user permissions * @return permission formerly associated to the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that user was found. - * + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that user was found. * @see fs_acl::remove_user_perm_not_owner() */ - - FilesystemPermission removeUserPermission(LocalUser u, - String fileOrDirectory); + FilesystemPermission removeUserPermission(LocalUser u, String fileOrDirectory); /** - * Revoke specified permission from a group's entry on a file or directory, - * and return the former permission. - * - *

Removes the specified permission from the ones that the primary group - * of the given {@link it.grid.storm.griduser.LocalUser} instance u - * already holds on the given file or directory: all permission bits that are - * set in permission will be cleared in the - * appropriate group entry in the file ACL. - * - *

If no entry is present for the specified group, then one is created and - * its permission value is set to {@link #NONE}. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named group permissions - * + * Revoke specified permission from a group's entry on a file or directory, and return the former + * permission. + * + *

Removes the specified permission from the ones that the primary group of the given {@link + * it.grid.storm.griduser.LocalUser} instance u already holds on the given file or + * directory: all permission bits that are set in permission will be + * cleared in the appropriate group entry in the file ACL. + * + *

If no entry is present for the specified group, then one is created and its permission value + * is set to {@link #NONE}. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * entry is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named group permissions * @param permission Capabilities to revoke. - * - * @return permission formerly associated with the primary GID of the given - * {@link it.grid.storm.griduser.LocalUser} instance u in the given - * file ACL, or null if no entry for that group was found. - * + * @return permission formerly associated with the primary GID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. * @see fs_acl::revoke_group_perm() */ - - FilesystemPermission revokeGroupPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission); + FilesystemPermission revokeGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); /** - * Revoke specified permission from a user's entry on a file or directory, and - * return the former permission. - * - *

Removes the specified permission from the ones that the primary user of - * the given {@link it.grid.storm.griduser.LocalUser} instance u - * already holds on the given file or directory: all permission bits that are - * set in permission will be cleared in the - * appropriate user entry in the file ACL. - * - *

If no entry is present for the specified user, then one is created and - * its permission value is set to {@link #NONE}. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose UID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named user permissions - * + * Revoke specified permission from a user's entry on a file or directory, and return the former + * permission. + * + *

Removes the specified permission from the ones that the primary user of the given {@link + * it.grid.storm.griduser.LocalUser} instance u already holds on the given file or + * directory: all permission bits that are set in permission will be + * cleared in the appropriate user entry in the file ACL. + * + *

If no entry is present for the specified user, then one is created and its permission value + * is set to {@link #NONE}. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose UID's entry + * is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named user permissions * @param permission Capabilities to revoke. - * * @return permission formerly associated with the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that user was found. - * + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that user was found. * @see fs_acl::revoke_user_perm() */ - - FilesystemPermission revokeUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission); + FilesystemPermission revokeUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); /** - * Set the specified permission in a group's entry on a file or directory, and - * return the former permission. - * - *

Sets the entry of the primary group of the given {@link LocalUser} - * instance u to the given permission. Returns the permission - * formerly associated with that group. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose primary GID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named group permissions - * + * Set the specified permission in a group's entry on a file or directory, and return the former + * permission. + * + *

Sets the entry of the primary group of the given {@link LocalUser} instance u to the + * given permission. Returns the permission formerly associated with that group. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose primary GID's + * entry is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named group permissions * @param permission Permission to set in the group entry. - * - * @return permission formerly associated with the primary GID of the given - * {@link it.grid.storm.griduser.LocalUser} instance u in the given - * file ACL, or null if no entry for that group was found. - * + * @return permission formerly associated with the primary GID of the given {@link + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that group was found. * @see fs_acl::set_group_perm() */ - - FilesystemPermission setGroupPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission); + FilesystemPermission setGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); /** - * Set the specified permission in a user's entry on a file or directory, and - * return the former permission. - * - *

Sets the entry of the primary user of the given {@link LocalUser} - * instance u to the given permission. Returns the permission - * formerly associated with that user. - * - * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) - * whose UID's entry is to be altered. - * - * @param fileOrDirectory pathname to the file or directory whose ACL is to be - * searched for the named user permissions - * + * Set the specified permission in a user's entry on a file or directory, and return the former + * permission. + * + *

Sets the entry of the primary user of the given {@link LocalUser} instance u to the + * given permission. Returns the permission formerly associated with that user. + * + * @param u the local user ({@link it.grid.storm.griduser.LocalUser} instance) whose UID's entry + * is to be altered. + * @param fileOrDirectory pathname to the file or directory whose ACL is to be searched for the + * named user permissions * @param permission Permission to set in the user entry. - * * @return permission formerly associated with the UID of the given {@link - * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, - * or null if no entry for that user was found. - * + * it.grid.storm.griduser.LocalUser} instance u in the given file ACL, or null + * if no entry for that user was found. * @see fs_acl::set_user_perm() */ - - FilesystemPermission setUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission); - -} \ No newline at end of file + FilesystemPermission setUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission); +} diff --git a/src/main/java/it/grid/storm/filesystem/FilesystemPermission.java b/src/main/java/it/grid/storm/filesystem/FilesystemPermission.java index f4463e86..3c2e5ff3 100644 --- a/src/main/java/it/grid/storm/filesystem/FilesystemPermission.java +++ b/src/main/java/it/grid/storm/filesystem/FilesystemPermission.java @@ -1,17 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file FilesystemPermission.java * @author Riccardo Murri - * - * The it.grid.storm.filesystem.FilesystemPermission class. + *

The it.grid.storm.filesystem.FilesystemPermission class. */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ @@ -21,472 +19,423 @@ import it.grid.storm.filesystem.swig.fs_acl; /** - * Provides an abstraction of all operations that can be performed on a - * filesystem entry (file or directory). - * - *

- * Note: this class is an interface to the fs_acl::permission_t type; if the - * low-level fs_acl::permission_t type ever gets modified, then the {@link - * toFsAclPermission()} method should be modified also. - * - *

- * To all effects, instances of this class are immutable. Permissions - * are read off or enforced onto disk files; they should not be altered by StoRM - * code. Still, if there is need for permission manipulation in StoRM, a - * MutableFilesystemPermission derived class can be provided, that promotes to - * public visibility the deny*() and permit*() methods. - * - * + * Provides an abstraction of all operations that can be performed on a filesystem entry (file or + * directory). + * + *

Note: this class is an interface to the fs_acl::permission_t type; if the low-level + * fs_acl::permission_t type ever gets modified, then the {@link toFsAclPermission()} method should + * be modified also. + * + *

To all effects, instances of this class are immutable. Permissions are read off or + * enforced onto disk files; they should not be altered by StoRM code. Still, if there is need for + * permission manipulation in StoRM, a MutableFilesystemPermission derived class can be provided, + * that promotes to public visibility the deny*() and permit*() methods. + * * @see it.grid.storm.authorization.AuthorizationQueryInterface * @see fs_acl::permission_t - * * @author Riccardo Murri * @author EGRID - ICTP Trieste * @version $Revision: 1.19 $ */ public class FilesystemPermission implements java.io.Serializable { - // --- constants used in the bitfield constructor --- // + // --- constants used in the bitfield constructor --- // - /** Permission to execute the file. */ - final static int EXECUTE = fs_acl.permission_flags.PERM_EXECUTE; + /** Permission to execute the file. */ + static final int EXECUTE = fs_acl.permission_flags.PERM_EXECUTE; - /** Permission to write file contents. */ - final static int WRITE_DATA = fs_acl.permission_flags.PERM_WRITE_DATA; + /** Permission to write file contents. */ + static final int WRITE_DATA = fs_acl.permission_flags.PERM_WRITE_DATA; - /** Permission to read file contents. */ - final static int READ_DATA = fs_acl.permission_flags.PERM_READ_DATA; + /** Permission to read file contents. */ + static final int READ_DATA = fs_acl.permission_flags.PERM_READ_DATA; - /** - * Permission to change file extended ACL (that is, beyond normal UNIX - * permission bits). - */ - final static int WRITE_ACL = fs_acl.permission_flags.PERM_WRITE_ACL; + /** Permission to change file extended ACL (that is, beyond normal UNIX permission bits). */ + static final int WRITE_ACL = fs_acl.permission_flags.PERM_WRITE_ACL; - /** - * Permission to read file extended ACL (that is, beyond normal UNIX - * permission bits). - */ - final static int READ_ACL = fs_acl.permission_flags.PERM_READ_ACL; + /** Permission to read file extended ACL (that is, beyond normal UNIX permission bits). */ + static final int READ_ACL = fs_acl.permission_flags.PERM_READ_ACL; - /** Permission to delete a filesystem entry (file or directory). */ - final static int DELETE = fs_acl.permission_flags.PERM_DELETE; + /** Permission to delete a filesystem entry (file or directory). */ + static final int DELETE = fs_acl.permission_flags.PERM_DELETE; - /** Permission to descend to children directories of a directory. */ - final static int TRAVERSE_DIRECTORY = fs_acl.permission_flags.PERM_TRAVERSE_DIRECTORY; - - /** Permission to list directory contents. */ - final static int LIST_DIRECTORY = fs_acl.permission_flags.PERM_LIST_DIRECTORY; - - /** Permission to create a child subdirectory. */ - final static int CREATE_SUBDIRECTORY = fs_acl.permission_flags.PERM_CREATE_SUBDIRECTORY; - - /** Permission to create a new file. */ - final static int CREATE_FILE = fs_acl.permission_flags.PERM_CREATE_FILE; - - /** Permission to delete a file or directory within a directory. */ - final static int DELETE_CHILD = fs_acl.permission_flags.PERM_DELETE_CHILD; - - /** No permission at all. */ - final static int NONE = fs_acl.permission_flags.PERM_NONE; - - /** All permission bits set. */ - final static int ALL = fs_acl.permission_flags.PERM_ALL; - - // --- public constant instances --- // - - /** Permission to read a file. */ - public final static FilesystemPermission Read = new FilesystemPermission( - READ_DATA); - - /** Permission to read and write to a file. */ - public final static FilesystemPermission ReadWrite = new FilesystemPermission( - READ_DATA | WRITE_DATA); - - /** Permission to list directory contents. */ - public final static FilesystemPermission List = new FilesystemPermission( - LIST_DIRECTORY); - - /** - * Permission to traverse directory (descend path where directory is an - * intermediate step). - */ - public final static FilesystemPermission Traverse = new FilesystemPermission( - TRAVERSE_DIRECTORY); - - /** Permission to list and traverse directory. */ - public final static FilesystemPermission ListTraverse = new FilesystemPermission( - LIST_DIRECTORY | TRAVERSE_DIRECTORY); - - /** Permission to list, traverse and write directory. */ - public final static FilesystemPermission ListTraverseWrite = new FilesystemPermission( - LIST_DIRECTORY | TRAVERSE_DIRECTORY | WRITE_DATA); - - /** No permission at all. */ - public final static FilesystemPermission None = new FilesystemPermission( - FilesystemPermission.NONE); - - /** Permission to write. */ - public final static FilesystemPermission Write = new FilesystemPermission( - FilesystemPermission.WRITE_DATA); - - /** - * Permission to create file - */ - public final static FilesystemPermission Create = new FilesystemPermission( - FilesystemPermission.CREATE_FILE); - - /** - * Permission to create subdirectory - */ - public final static FilesystemPermission CreateSubdirectory = new FilesystemPermission( - FilesystemPermission.CREATE_SUBDIRECTORY); - - /** - * Permission to delete file or directory - */ - public final static FilesystemPermission Delete = new FilesystemPermission( - FilesystemPermission.DELETE); - - /** - * - */ - public final static FilesystemPermission ListDirectory = new FilesystemPermission( - FilesystemPermission.LIST_DIRECTORY); - - // --- constructors --- // - - /** - * Copy constructor. Takes another instance of the - * {@link it.grid.storm.filesyste.FilesystemPermission} interface and creates - * an instance of this class granting exactly the same permissions. - */ - public FilesystemPermission(final FilesystemPermission p) { - - this.permission = (p.permission & ALL); - }; - - /** - * Constructor that takes a bitfield of permissions and creates an instance of - * this class granting exactly those permissions. The bitfield - * argument has the same format of the fs_acl::permission_t type, or could be - * constructed by bitwise-OR'ing the READ_DATA, - * WRITE_DATA, ... constants defined elsewhere in this class. For - * any bit that is set in the bitfield argument, the - * corresponding permission will be granted from this object. - * - *

- * Example usage: - * - *

-	 * p = new FilesystemPermission(READ_DATA | WRITE_DATA);
-	 * // p.canReadFile() == true
-	 * // p.canWriteFile() == true
-	 * // p.canCreateNewFile() == false
-	 * 
- * - * @see fs_acl::permission_t - */ - public FilesystemPermission(final int bitfield) { - - this.permission = (bitfield & ALL); - }; - - /** - * Default constructor: creates an instance that denies permission on each and - * every operation. - * - *

- * This constructor's intended usage is in conjunction with the permission - * manipulation functions (in derived classes): - * - *

-	 * p = new FilesystemPermission().permitReadFile().permitWriteFile();
-	 * // p.canReadFile() == true
-	 * // p.canWriteFile() == true
-	 * // p.canCreateNewFile() == false
-	 * 
- */ - protected FilesystemPermission() { - - denyAll(); - } - - // --- permission conversion functions --- // - - /** - * Return an fs_acl::permission_t bitfield representing the same permissions - * that this object encodes. - * - * @see fs_acl::permission_t - */ - public int toFsAclPermission() { - - return permission; - } - - // --- permission test methods --- // - - /** - * Return true if permission is granted to read file contents. - */ - public boolean canReadFile() { - - return 0 != (permission & READ_DATA); - } - - /** - * Return true if permission is granted to write file contents. - * No distinction can be enforced between overwriting contents and appending - * to the file, so no distinction is made here. - */ - public boolean canWriteFile() { - - return 0 != (permission & WRITE_DATA); - } - - /** - * Return true if permission is granted to list directory - * contents. - */ - public boolean canListDirectory() { - - return 0 != (permission & LIST_DIRECTORY); - } - - /** - * Return true if permission is granted to descend to a - * subdirectory. - */ - public boolean canTraverseDirectory() { - - return 0 != (permission & TRAVERSE_DIRECTORY); - } - - /** - * Return true if permission is granted to create a new - * subdirectory. - */ - public boolean canMakeDirectory() { - - return 0 != (permission & CREATE_SUBDIRECTORY); - } - - /** - * Return true if permission is granted to create a new file. - */ - public boolean canCreateNewFile() { - - return 0 != (permission & CREATE_FILE); - } - - /** - * Return true if permission is granted to change filesystem - * entry (file or directory) ACL. - */ - public boolean canChangeAcl() { - - return 0 != (permission & WRITE_ACL); - } - - /** - * Return true if permission is granted to delete entry (file or - * directory). - */ - public boolean canDelete() { - - return 0 != (permission & DELETE); - } - - /** - * Return true if all permissions that are granted by - * other FilesystemPermission instance are also granted by this - * instance. That is, test if other is more restrictive than the this - * instance. - */ - public boolean allows(final FilesystemPermission other) { - - return (other.permission == (this.permission & other.permission)); - } - - /** - * Return true if all permission bits that are set in - * bitfield are also set in this instance. That is, test if - * bitfield represents a more restrictive than the this instance. - */ - public boolean allows(final int bitfield) { - - return (bitfield == (this.permission & bitfield)); - } - - // --- permission manipulation methods --- // - - /** - * Change instance status so that all subsequent can... calls - * will return false. - * - *

- * Returns the instance itself, so that calls to the permission manipulation - * functions can be chained: - * - *

-	 * p = new FilesystemPermission();
-	 * p.denyAll().permitReadFile().permitWriteFile();
-	 * // p.canReadFile() == true
-	 * // p.canWriteFile() == true
-	 * // p.canCreateNewFile() == false
-	 * 
- */ - protected FilesystemPermission denyAll() { - - this.permission = NONE; - return this; - } - - protected FilesystemPermission denyReadFile() { - - permission &= ~READ_DATA; - return this; - } - - protected FilesystemPermission denyWriteFile() { + /** Permission to descend to children directories of a directory. */ + static final int TRAVERSE_DIRECTORY = fs_acl.permission_flags.PERM_TRAVERSE_DIRECTORY; - permission &= ~WRITE_DATA; - return this; - } - - protected FilesystemPermission denyChangeAcl() { + /** Permission to list directory contents. */ + static final int LIST_DIRECTORY = fs_acl.permission_flags.PERM_LIST_DIRECTORY; - permission &= ~WRITE_ACL; - return this; - } + /** Permission to create a child subdirectory. */ + static final int CREATE_SUBDIRECTORY = fs_acl.permission_flags.PERM_CREATE_SUBDIRECTORY; - protected FilesystemPermission denyCreateNewFile() { + /** Permission to create a new file. */ + static final int CREATE_FILE = fs_acl.permission_flags.PERM_CREATE_FILE; - permission &= ~CREATE_FILE; - return this; - } + /** Permission to delete a file or directory within a directory. */ + static final int DELETE_CHILD = fs_acl.permission_flags.PERM_DELETE_CHILD; + + /** No permission at all. */ + static final int NONE = fs_acl.permission_flags.PERM_NONE; + + /** All permission bits set. */ + static final int ALL = fs_acl.permission_flags.PERM_ALL; + + // --- public constant instances --- // + + /** Permission to read a file. */ + public static final FilesystemPermission Read = new FilesystemPermission(READ_DATA); - protected FilesystemPermission denyListDirectory() { + /** Permission to read and write to a file. */ + public static final FilesystemPermission ReadWrite = + new FilesystemPermission(READ_DATA | WRITE_DATA); + + /** Permission to list directory contents. */ + public static final FilesystemPermission List = new FilesystemPermission(LIST_DIRECTORY); + + /** Permission to traverse directory (descend path where directory is an intermediate step). */ + public static final FilesystemPermission Traverse = new FilesystemPermission(TRAVERSE_DIRECTORY); + + /** Permission to list and traverse directory. */ + public static final FilesystemPermission ListTraverse = + new FilesystemPermission(LIST_DIRECTORY | TRAVERSE_DIRECTORY); - permission &= ~LIST_DIRECTORY; - return this; - } + /** Permission to list, traverse and write directory. */ + public static final FilesystemPermission ListTraverseWrite = + new FilesystemPermission(LIST_DIRECTORY | TRAVERSE_DIRECTORY | WRITE_DATA); - protected FilesystemPermission denyTraverseDirectory() { + /** No permission at all. */ + public static final FilesystemPermission None = + new FilesystemPermission(FilesystemPermission.NONE); + + /** Permission to write. */ + public static final FilesystemPermission Write = + new FilesystemPermission(FilesystemPermission.WRITE_DATA); + + /** Permission to create file */ + public static final FilesystemPermission Create = + new FilesystemPermission(FilesystemPermission.CREATE_FILE); - permission &= ~TRAVERSE_DIRECTORY; - return this; - } + /** Permission to create subdirectory */ + public static final FilesystemPermission CreateSubdirectory = + new FilesystemPermission(FilesystemPermission.CREATE_SUBDIRECTORY); - protected FilesystemPermission denyMakeDirectory() { + /** Permission to delete file or directory */ + public static final FilesystemPermission Delete = + new FilesystemPermission(FilesystemPermission.DELETE); - permission &= ~CREATE_SUBDIRECTORY; - return this; - } + /** */ + public static final FilesystemPermission ListDirectory = + new FilesystemPermission(FilesystemPermission.LIST_DIRECTORY); - protected FilesystemPermission denyDelete() { + // --- constructors --- // - permission &= ~DELETE; - return this; - } + /** + * Copy constructor. Takes another instance of the {@link + * it.grid.storm.filesyste.FilesystemPermission} interface and creates an instance of this class + * granting exactly the same permissions. + */ + public FilesystemPermission(final FilesystemPermission p) { - public FilesystemPermission deny(FilesystemPermission other) { + this.permission = (p.permission & ALL); + }; - return new FilesystemPermission(this.permission & ~other.permission); - } + /** + * Constructor that takes a bitfield of permissions and creates an instance of this class granting + * exactly those permissions. The bitfield argument has the same format of the + * fs_acl::permission_t type, or could be constructed by bitwise-OR'ing the READ_DATA + * , WRITE_DATA, ... constants defined elsewhere in this class. For any bit that is + * set in the bitfield argument, the corresponding permission will be granted from + * this object. + * + *

Example usage: + * + *

+   * p = new FilesystemPermission(READ_DATA | WRITE_DATA);
+   * // p.canReadFile() == true
+   * // p.canWriteFile() == true
+   * // p.canCreateNewFile() == false
+   * 
+ * + * @see fs_acl::permission_t + */ + public FilesystemPermission(final int bitfield) { + + this.permission = (bitfield & ALL); + }; + + /** + * Default constructor: creates an instance that denies permission on each and every operation. + * + *

This constructor's intended usage is in conjunction with the permission manipulation + * functions (in derived classes): + * + *

+   * p = new FilesystemPermission().permitReadFile().permitWriteFile();
+   * // p.canReadFile() == true
+   * // p.canWriteFile() == true
+   * // p.canCreateNewFile() == false
+   * 
+ */ + protected FilesystemPermission() { + + denyAll(); + } + + // --- permission conversion functions --- // + + /** + * Return an fs_acl::permission_t bitfield representing the same permissions that this object + * encodes. + * + * @see fs_acl::permission_t + */ + public int toFsAclPermission() { + + return permission; + } + + // --- permission test methods --- // + + /** Return true if permission is granted to read file contents. */ + public boolean canReadFile() { + + return 0 != (permission & READ_DATA); + } + + /** + * Return true if permission is granted to write file contents. No distinction can be + * enforced between overwriting contents and appending to the file, so no distinction is made + * here. + */ + public boolean canWriteFile() { + + return 0 != (permission & WRITE_DATA); + } + + /** Return true if permission is granted to list directory contents. */ + public boolean canListDirectory() { + + return 0 != (permission & LIST_DIRECTORY); + } + + /** Return true if permission is granted to descend to a subdirectory. */ + public boolean canTraverseDirectory() { + + return 0 != (permission & TRAVERSE_DIRECTORY); + } + + /** Return true if permission is granted to create a new subdirectory. */ + public boolean canMakeDirectory() { + + return 0 != (permission & CREATE_SUBDIRECTORY); + } + + /** Return true if permission is granted to create a new file. */ + public boolean canCreateNewFile() { + + return 0 != (permission & CREATE_FILE); + } + + /** + * Return true if permission is granted to change filesystem entry (file or + * directory) ACL. + */ + public boolean canChangeAcl() { + + return 0 != (permission & WRITE_ACL); + } + + /** Return true if permission is granted to delete entry (file or directory). */ + public boolean canDelete() { + + return 0 != (permission & DELETE); + } + + /** + * Return true if all permissions that are granted by other + * FilesystemPermission instance are also granted by this instance. That is, test if other + * is more restrictive than the this instance. + */ + public boolean allows(final FilesystemPermission other) { + + return (other.permission == (this.permission & other.permission)); + } + + /** + * Return true if all permission bits that are set in bitfield are also set in + * this instance. That is, test if bitfield represents a more restrictive than the this + * instance. + */ + public boolean allows(final int bitfield) { + + return (bitfield == (this.permission & bitfield)); + } - /** - * Change instance status so that all subsequent can... calls - * will return true. Dangerous, use with caution. - * - *

- * Returns the instance itself, so that calls to the permission manipulation - * functions can be chained: - * - *

-	 * p = new FilesystemPermission();
-	 * p.permitAll().denyDelete().denyRename();
-	 * // p.canReadFile() == true
-	 * // p.canWriteFile() == true
-	 * // p.canDelete() == false
-	 * 
- */ - protected FilesystemPermission permitAll() { + // --- permission manipulation methods --- // - permission = ALL; - return this; - } + /** + * Change instance status so that all subsequent can... calls will return false + * . + * + *

Returns the instance itself, so that calls to the permission manipulation functions can be + * chained: + * + *

+   * p = new FilesystemPermission();
+   * p.denyAll().permitReadFile().permitWriteFile();
+   * // p.canReadFile() == true
+   * // p.canWriteFile() == true
+   * // p.canCreateNewFile() == false
+   * 
+ */ + protected FilesystemPermission denyAll() { - protected FilesystemPermission permitReadFile() { + this.permission = NONE; + return this; + } - permission |= READ_DATA; - return this; - } + protected FilesystemPermission denyReadFile() { - protected FilesystemPermission permitWriteFile() { + permission &= ~READ_DATA; + return this; + } - permission |= WRITE_DATA; - return this; - } + protected FilesystemPermission denyWriteFile() { - protected FilesystemPermission permitChangeAcl() { + permission &= ~WRITE_DATA; + return this; + } - permission |= WRITE_ACL; - return this; - } + protected FilesystemPermission denyChangeAcl() { - protected FilesystemPermission permitCreateNewFile() { + permission &= ~WRITE_ACL; + return this; + } - permission |= CREATE_FILE; - return this; - } + protected FilesystemPermission denyCreateNewFile() { - protected FilesystemPermission permitListDirectory() { + permission &= ~CREATE_FILE; + return this; + } - permission |= LIST_DIRECTORY; - return this; - } + protected FilesystemPermission denyListDirectory() { - protected FilesystemPermission permitTraverseDirectory() { + permission &= ~LIST_DIRECTORY; + return this; + } - permission |= TRAVERSE_DIRECTORY; - return this; - } + protected FilesystemPermission denyTraverseDirectory() { - protected FilesystemPermission permitMakeDirectory() { + permission &= ~TRAVERSE_DIRECTORY; + return this; + } - permission |= CREATE_SUBDIRECTORY; - return this; - } + protected FilesystemPermission denyMakeDirectory() { - protected FilesystemPermission permitDelete() { + permission &= ~CREATE_SUBDIRECTORY; + return this; + } - permission |= DELETE; - return this; - } + protected FilesystemPermission denyDelete() { - // --- internal status flags --- // + permission &= ~DELETE; + return this; + } - /** - * Method that returns an int representing This FilesystemPermission. It can - * be used as argument to FilesystemPermission constructor to get back an - * equivalent filesystemPermission Object. - */ - public int getInt() { + public FilesystemPermission deny(FilesystemPermission other) { - return permission; - } + return new FilesystemPermission(this.permission & ~other.permission); + } - /** - * The permission set bitfield. Must match the type and representation used in - * fs_acl::permission_t: no conversion is done, the code just assumes that it - * can pass the value back and forth from Java to C++. - */ - protected int permission; + /** + * Change instance status so that all subsequent can... calls will return true + * . Dangerous, use with caution. + * + *

Returns the instance itself, so that calls to the permission manipulation functions can be + * chained: + * + *

+   * p = new FilesystemPermission();
+   * p.permitAll().denyDelete().denyRename();
+   * // p.canReadFile() == true
+   * // p.canWriteFile() == true
+   * // p.canDelete() == false
+   * 
+ */ + protected FilesystemPermission permitAll() { - public String toString() { + permission = ALL; + return this; + } - return Integer.valueOf(permission).toString(); - } + protected FilesystemPermission permitReadFile() { + + permission |= READ_DATA; + return this; + } + + protected FilesystemPermission permitWriteFile() { + + permission |= WRITE_DATA; + return this; + } + + protected FilesystemPermission permitChangeAcl() { + + permission |= WRITE_ACL; + return this; + } + + protected FilesystemPermission permitCreateNewFile() { + + permission |= CREATE_FILE; + return this; + } + + protected FilesystemPermission permitListDirectory() { + + permission |= LIST_DIRECTORY; + return this; + } + + protected FilesystemPermission permitTraverseDirectory() { + + permission |= TRAVERSE_DIRECTORY; + return this; + } + + protected FilesystemPermission permitMakeDirectory() { + + permission |= CREATE_SUBDIRECTORY; + return this; + } + + protected FilesystemPermission permitDelete() { + + permission |= DELETE; + return this; + } + + // --- internal status flags --- // + + /** + * Method that returns an int representing This FilesystemPermission. It can be used as argument + * to FilesystemPermission constructor to get back an equivalent filesystemPermission Object. + */ + public int getInt() { + + return permission; + } + + /** + * The permission set bitfield. Must match the type and representation used in + * fs_acl::permission_t: no conversion is done, the code just assumes that it can pass the value + * back and forth from Java to C++. + */ + protected int permission; + + public String toString() { + + return Integer.valueOf(permission).toString(); + } } diff --git a/src/main/java/it/grid/storm/filesystem/GPFSSpaceSystem.java b/src/main/java/it/grid/storm/filesystem/GPFSSpaceSystem.java index 440083cd..4d8cbdba 100644 --- a/src/main/java/it/grid/storm/filesystem/GPFSSpaceSystem.java +++ b/src/main/java/it/grid/storm/filesystem/GPFSSpaceSystem.java @@ -1,98 +1,93 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import it.grid.storm.filesystem.swig.gpfs; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Class that represents a SpaceSystem that is able to use native GPFS support - * to carry out space reservation operations. - * + * Class that represents a SpaceSystem that is able to use native GPFS support to carry out space + * reservation operations. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date May 2006 */ public class GPFSSpaceSystem implements SpaceSystem { - private gpfs fs = null; // instance of filesystem that will be used to invoke - // native operation! - private static Logger log = LoggerFactory.getLogger(GPFSSpaceSystem.class); - - public GPFSSpaceSystem(String mountpoint) throws SpaceSystemException { - - if (mountpoint == null) { - throw new SpaceSystemException("Supplied mountpoint is null!"); - } - try { - this.fs = new gpfs(mountpoint); - } catch (Exception e) { - throw new SpaceSystemException( - "Unable to instantiate GPFS filesystem on " + mountpoint - + "; exception: " + e); - } - } - - /** - * Method that follows the contract specified in SpaceSystem Interface: please - * refer there for further info. - */ - public long reserveSpace(String pathToFile, long size) - throws ReservationException { - - try { - log.debug("GPFSSpaceSystem : pathToFile: {}" , pathToFile); - fs.prealloc(pathToFile, size); - return size; - } catch (Exception e) { - String explanation = "An exception was thrown by the native underlying filesystem: " - + e.toString(); - log.debug(explanation); - throw new ReservationException(explanation); - } - } - - /** - * Method that follows the contract specified in SpaceSystem Interface: please - * refer there for further info. - */ - public long compactSpace(String pathToFile) throws ReservationException { - - String explanation = "Compact Space operation currently not supported!"; - log.debug(explanation); - throw new ReservationException(explanation); - } - - /** - * Method that follows the contract specified in SpaceSystem Interface: please - * refer there for further info. - */ - public void removeSpace(String pathToFile) throws ReservationException { - - String explanation = "Remove Space operation currently not supported!"; - log.debug(explanation); - throw new ReservationException(explanation); - } - - /** - * Method that follows the contract specified in SpaceSystem Interface: please - * refer there for further info. - */ - public long changeSize(String pathToFile, long newSize) - throws ReservationException { - - String explanation = "Change Size operation currently not supported!"; - log.debug(explanation); - throw new ReservationException(explanation); - } - - @Override - public String toString() { - - return "GPFSSpaceSystem"; - } + private gpfs fs = null; // instance of filesystem that will be used to invoke + // native operation! + private static Logger log = LoggerFactory.getLogger(GPFSSpaceSystem.class); + + public GPFSSpaceSystem(String mountpoint) throws SpaceSystemException { + + if (mountpoint == null) { + throw new SpaceSystemException("Supplied mountpoint is null!"); + } + try { + this.fs = new gpfs(mountpoint); + } catch (Exception e) { + throw new SpaceSystemException( + "Unable to instantiate GPFS filesystem on " + mountpoint + "; exception: " + e); + } + } + + /** + * Method that follows the contract specified in SpaceSystem Interface: please refer there for + * further info. + */ + public long reserveSpace(String pathToFile, long size) throws ReservationException { + + try { + log.debug("GPFSSpaceSystem : pathToFile: {}", pathToFile); + fs.prealloc(pathToFile, size); + return size; + } catch (Exception e) { + String explanation = + "An exception was thrown by the native underlying filesystem: " + e.toString(); + log.debug(explanation); + throw new ReservationException(explanation); + } + } + + /** + * Method that follows the contract specified in SpaceSystem Interface: please refer there for + * further info. + */ + public long compactSpace(String pathToFile) throws ReservationException { + + String explanation = "Compact Space operation currently not supported!"; + log.debug(explanation); + throw new ReservationException(explanation); + } + + /** + * Method that follows the contract specified in SpaceSystem Interface: please refer there for + * further info. + */ + public void removeSpace(String pathToFile) throws ReservationException { + + String explanation = "Remove Space operation currently not supported!"; + log.debug(explanation); + throw new ReservationException(explanation); + } + + /** + * Method that follows the contract specified in SpaceSystem Interface: please refer there for + * further info. + */ + public long changeSize(String pathToFile, long newSize) throws ReservationException { + + String explanation = "Change Size operation currently not supported!"; + log.debug(explanation); + throw new ReservationException(explanation); + } + + @Override + public String toString() { + + return "GPFSSpaceSystem"; + } } diff --git a/src/main/java/it/grid/storm/filesystem/InvalidSpaceAttributesException.java b/src/main/java/it/grid/storm/filesystem/InvalidSpaceAttributesException.java index 2fa7c673..9e05b477 100644 --- a/src/main/java/it/grid/storm/filesystem/InvalidSpaceAttributesException.java +++ b/src/main/java/it/grid/storm/filesystem/InvalidSpaceAttributesException.java @@ -1,81 +1,78 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; -import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.common.types.SizeUnit; +import it.grid.storm.srm.types.TSizeInBytes; /** - * Class that represents an Exception thrown by the Space constructor if any of - * the supplied parameters are null, or totalSize is Empty, or guaranteedSize is - * greater than totalSize. - * + * Class that represents an Exception thrown by the Space constructor if any of the supplied + * parameters are null, or totalSize is Empty, or guaranteedSize is greater than totalSize. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date May 2006 */ public class InvalidSpaceAttributesException extends Exception { - private boolean nullGuarSize = false; // boolean true if garanteedSize is null - private boolean nullTotSize = false; // boolean true if totalSize is null - private boolean nullSpaFil = false; // boolean true if spaceFile is null - private boolean nullSS = false; // boolean true if SpaceSystem is null - private boolean emptyTotSize = false; // boolean true if totalSize is Empty - private boolean greater = false; // boolean true if guaranteedSize and - // totalSize are not null, not empty, and - // when interpreted as double of size BYTE - // it is _guaranteedSize_ that is GREATER - // than _totalSize_ - private double guaranteed = -1.0; // double that gets set only if (greater) is - // true, and represents _guaranteedSize_ - // expressed in bytes. - private double total = -1.0; // double that gets set only if (greater) is - // true, and represents _totalSize_ expressed in - // bytes. + private boolean nullGuarSize = false; // boolean true if garanteedSize is null + private boolean nullTotSize = false; // boolean true if totalSize is null + private boolean nullSpaFil = false; // boolean true if spaceFile is null + private boolean nullSS = false; // boolean true if SpaceSystem is null + private boolean emptyTotSize = false; // boolean true if totalSize is Empty + private boolean greater = false; // boolean true if guaranteedSize and + // totalSize are not null, not empty, and + // when interpreted as double of size BYTE + // it is _guaranteedSize_ that is GREATER + // than _totalSize_ + private double guaranteed = -1.0; // double that gets set only if (greater) is + // true, and represents _guaranteedSize_ + // expressed in bytes. + private double total = -1.0; // double that gets set only if (greater) is + // true, and represents _totalSize_ expressed in + // bytes. - public InvalidSpaceAttributesException(TSizeInBytes guaranteedSize, - TSizeInBytes totalSize, LocalFile spaceFile, SpaceSystem ss) { + public InvalidSpaceAttributesException( + TSizeInBytes guaranteedSize, TSizeInBytes totalSize, LocalFile spaceFile, SpaceSystem ss) { - nullGuarSize = guaranteedSize == null; - nullTotSize = totalSize == null; - nullSpaFil = spaceFile == null; - nullSS = ss == null; - emptyTotSize = (!nullTotSize) && totalSize.isEmpty(); - greater = (!nullGuarSize) - && (!nullTotSize) - && (!guaranteedSize.isEmpty()) - && (!totalSize.isEmpty()) - && (guaranteedSize.getSizeIn(SizeUnit.BYTES) > totalSize - .getSizeIn(SizeUnit.BYTES)); - if (greater) { - guaranteed = guaranteedSize.getSizeIn(SizeUnit.BYTES); - total = totalSize.getSizeIn(SizeUnit.BYTES); - } - } + nullGuarSize = guaranteedSize == null; + nullTotSize = totalSize == null; + nullSpaFil = spaceFile == null; + nullSS = ss == null; + emptyTotSize = (!nullTotSize) && totalSize.isEmpty(); + greater = + (!nullGuarSize) + && (!nullTotSize) + && (!guaranteedSize.isEmpty()) + && (!totalSize.isEmpty()) + && (guaranteedSize.getSizeIn(SizeUnit.BYTES) > totalSize.getSizeIn(SizeUnit.BYTES)); + if (greater) { + guaranteed = guaranteedSize.getSizeIn(SizeUnit.BYTES); + total = totalSize.getSizeIn(SizeUnit.BYTES); + } + } - @Override - public String toString() { + @Override + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("InvalidSpaceAttributesException: nullGuaranteedSize="); - sb.append(nullGuarSize); - sb.append("; nullTotalSize="); - sb.append(nullTotSize); - sb.append("; nullSpaceFile="); - sb.append(nullSpaFil); - sb.append("; nullSpaceSystem="); - sb.append(nullSS); - sb.append("; emptyTotalSize="); - sb.append(emptyTotSize); - sb.append("; guaranteedSize greater than totalSize is "); - sb.append(greater); - if (greater) - sb.append(" with guaranteed="); - sb.append(guaranteed); - sb.append(" and total="); - sb.append(total); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("InvalidSpaceAttributesException: nullGuaranteedSize="); + sb.append(nullGuarSize); + sb.append("; nullTotalSize="); + sb.append(nullTotSize); + sb.append("; nullSpaceFile="); + sb.append(nullSpaFil); + sb.append("; nullSpaceSystem="); + sb.append(nullSS); + sb.append("; emptyTotalSize="); + sb.append(emptyTotSize); + sb.append("; guaranteedSize greater than totalSize is "); + sb.append(greater); + if (greater) sb.append(" with guaranteed="); + sb.append(guaranteed); + sb.append(" and total="); + sb.append(total); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/filesystem/LocalFile.java b/src/main/java/it/grid/storm/filesystem/LocalFile.java index 00efcdee..306f0df3 100644 --- a/src/main/java/it/grid/storm/filesystem/LocalFile.java +++ b/src/main/java/it/grid/storm/filesystem/LocalFile.java @@ -1,764 +1,661 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file File.java * @author Riccardo Murri * @author EGRID - ICTP Trieste, for subsequent Modifications. - * - * The it.grid.storm.filesystem.File class + *

The it.grid.storm.filesystem.File class */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ package it.grid.storm.filesystem; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Maps; - import it.grid.storm.checksum.ChecksumAlgorithm; import it.grid.storm.checksum.ChecksumManager; import it.grid.storm.ea.StormEA; import it.grid.storm.griduser.CannotMapUserException; import it.grid.storm.griduser.LocalUser; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * Façade for operations on a filesystem entry (file or directory). All - * operations on the filesystem should be performed by creating an instance of - * the {@link it.grid.storm.filesystem.File} class, and using its methods to - * create or modify a filesystem entry. From {@link java.io.File} the following - * methods are inherited: {@link java.io.File#delete() delete}, - * {@link java.io.File#exists() exists}, {@link java.io.File#getAbsolutePath() - * getAbsolutePath}, {@link java.io.File#getName() getName}, - * {@link java.io.File#isDirectory() isDirectory}, {@link java.io.File#isFile() - * isFile}, {@link java.io.File#list() list}, {@link java.io.File#mkdir() mkdir} - * , {@link java.io.File#mkdirs() mkdirs}; the methods - * {@link java.io.File#getParentFile() getParentFile} and - * {@link java.io.File#listFiles() listFiles} have been overridden to return - * {@link it.grid.storm.filesystem.File} objects instead of {@link java.io.File} - * ones. Additional methods are provided to manipulate this file's ACL in a - * filesystem-indepenent way; all ACL operations take a LocalUser identity (see - * {@link it.grid.storm.griduser.LocalUser}) and a {@link FilesystemPermission} - * permission representation. A pathname need not exist for an associated - * it.grid.storm.filesystem.File object to be created; indeed, you should create - * an instance associated to a non existing pathname and then invoke - * {@link #create()} or {@link #makeDirectory()} to create a file or a - * directory, respectively. The constructor for this class requires a pathname - * and a {@link Filesystem} object. The passed {@link Filesystem} object need to - * match the filesystem-type of the filesystem the passed pathname resides on. - * Therefore, {@link it.grid.storm.filesystem.File} objects can only be created - * from a factory method that has all the required pieces of information; - * {@link it.grid.storm.namespace.StoRI} looks like the right class for that. - * + * Façade for operations on a filesystem entry (file or directory). All operations on the filesystem + * should be performed by creating an instance of the {@link it.grid.storm.filesystem.File} class, + * and using its methods to create or modify a filesystem entry. From {@link java.io.File} the + * following methods are inherited: {@link java.io.File#delete() delete}, {@link + * java.io.File#exists() exists}, {@link java.io.File#getAbsolutePath() getAbsolutePath}, {@link + * java.io.File#getName() getName}, {@link java.io.File#isDirectory() isDirectory}, {@link + * java.io.File#isFile() isFile}, {@link java.io.File#list() list}, {@link java.io.File#mkdir() + * mkdir} , {@link java.io.File#mkdirs() mkdirs}; the methods {@link java.io.File#getParentFile() + * getParentFile} and {@link java.io.File#listFiles() listFiles} have been overridden to return + * {@link it.grid.storm.filesystem.File} objects instead of {@link java.io.File} ones. Additional + * methods are provided to manipulate this file's ACL in a filesystem-indepenent way; all ACL + * operations take a LocalUser identity (see {@link it.grid.storm.griduser.LocalUser}) and a {@link + * FilesystemPermission} permission representation. A pathname need not exist for an associated + * it.grid.storm.filesystem.File object to be created; indeed, you should create an instance + * associated to a non existing pathname and then invoke {@link #create()} or {@link + * #makeDirectory()} to create a file or a directory, respectively. The constructor for this class + * requires a pathname and a {@link Filesystem} object. The passed {@link Filesystem} object need to + * match the filesystem-type of the filesystem the passed pathname resides on. Therefore, {@link + * it.grid.storm.filesystem.File} objects can only be created from a factory method that has all the + * required pieces of information; {@link it.grid.storm.namespace.StoRI} looks like the right class + * for that. + * * @see java.io.File * @see it.grid.storm.namespace.StoRI * @author Riccardo Murri * @author EGRID - ICTP Trieste, for minor changes. * @version $Revision: 1.8 $ - **/ + */ public class LocalFile { - - private static final Logger log = LoggerFactory.getLogger(LocalFile.class); - - /** The Filesystem interface to operate on the wrapped pathname. */ - private final FilesystemIF fs; - - private final java.io.File localFile; - - - /** - * Constructor, taking parent pathname (as a {@link File}), child name (as a - * String ) and the hosting filesystem object. From parent - * and child an absolute pathname must result, following the rules in - * {@link java.io.File#File(java.io.File,String)}. - * - * @param parent - * {@link File} instance of the parent directory - * @param child - * pathname of the child - * @param fs - * The {@link it.grid.storm.filesystem.Filesystem} object to use for - * operations on this file. - * @see it.grid.storm.filesystem.Filesystem - * @see java.io.File#File(java.io.File,String) - */ - public LocalFile(final LocalFile parent, final String name, - final FilesystemIF fs) throws NullPointerException { - - localFile = new java.io.File(parent.localFile, name); - - assert (localFile.isAbsolute()) : "Non-absolute path in constructor File(File,String,Filesystem)"; - assert (null != fs) : "Null filesystem in constructor File(File,String,Filesystem)"; - - this.fs = fs; - } - - /** - * Constructor, taking string pathname and the hosting filesystem object. The - * path parameter must be a non-empty absolute pathname. - * - * @param pathname - * The pathname wrapped by this PFN; must be absolute, or - * code will fail in an assertion. - * @param fs - * The {@link it.grid.storm.filesystem.Filesystem} object to use for - * operations on this file. - * @see it.grid.storm.filesystem.Filesystem - */ - public LocalFile(final String pathname, final FilesystemIF fs) - throws NullPointerException { - - localFile = new java.io.File(pathname); - - assert (localFile.isAbsolute()) : "Non-absolute path in constructor File(String,Filesystem)"; - assert (null != fs) : "Null Filesystem in constructor File(String,Filesystem)"; - - this.fs = fs; - } - - /** - * Constructor, taking parent pathname (as a String), child name - * (as a String) and the hosting filesystem object. From - * parent and child an absolute pathname must result, following - * the rules in {@link java.io.File#File(String,String)}. - * - * @param parent - * pathname of the parent directory - * @param child - * pathname of the child - * @param fs - * The {@link it.grid.storm.filesystem.Filesystem} object to use for - * operations on this file. - * @see it.grid.storm.filesystem.Filesystem - * @see java.io.File#File(String,String) - */ - public LocalFile(final String parent, final String name, final FilesystemIF fs) - throws NullPointerException { - - localFile = new java.io.File(parent, name); - - assert (localFile.isAbsolute()) : "Non-absolute path in constructor File(String,String,Filesystem)"; - assert (null != fs) : "Null Filesystem in constructor File(String,String,Filesystem)"; - - this.fs = fs; - } - - // ---- public accessor methods ---- - - /** - * Return true if the local user (to which the specified grid - * user is mapped to) can operate on the specified - * fileOrDirectory in the mode given by accessMode, - * according to the permissions set on the filesystem. - */ - public boolean canAccess(final LocalUser u, - final FilesystemPermission accessMode) throws CannotMapUserException { - - return fs.canAccess(u, localFile.getAbsolutePath(), accessMode); - } - - /** - * Method that creates a new empty file, as per contract of java.io.File: - * refer there for further info. - */ - public boolean createNewFile() throws IOException, SecurityException { - - return localFile.createNewFile(); - } - - /** - * Method that deletes This file, as per contract of java.io.File: refer there - * for further info. - */ - public boolean delete() throws SecurityException { - - return localFile.delete(); - } - - /** - * Method that checks for the existence of This file, as per contract of - * java.io.File: refer there for further info. - */ - public boolean exists() throws SecurityException { - - return localFile.exists(); - } - - /** - * Returns the absolute pathname string, as per contract of - * {@link java.io.File}. - */ - public String getAbsolutePath() { - - return localFile.getAbsolutePath(); - } - - /** - * Retrieves the checksum of the file from the corresponding extended - * attribute. If no checksum is found it is computed (scheduled and computed - * by a separate thread) and stored in an extended attribute. - * - * @return the checksum of the file. - */ - public String getDefaultChecksum() { - - try { - return ChecksumManager.getInstance().getDefaultChecksum( - localFile.getAbsolutePath()); - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - return null; - } - - } - - public Map getChecksums() { - - try { - return ChecksumManager.getInstance().getChecksums( - localFile.getAbsolutePath()); - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - return Maps.newHashMap(); - } - - } - - /** - * Returns the algorithm used to compute checksums (as defined in the - * configuration file). - * - * @return - */ - public ChecksumAlgorithm getChecksumAlgorithm() { - - return ChecksumManager.getInstance().getDefaultAlgorithm(); - } - - /** - * Return the effective permission a group has on this file. Loads - * the ACL for this file or directory, and return the permission associated - * with the local account primary group of the given {@link LocalUser} - * instance u. If no ACE for that group is found, return - * {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose local account primary GID's permissions are to - * be retrieved. - * @return effective permission associated to the local account - * primary GID of the given LocalUser u in the given file ACL, - * or null if no ACL entry for that group was found. - */ - public FilesystemPermission getEffectiveGroupPermission(final LocalUser u) { - - return fs.getEffectiveGroupPermission(u, localFile.getAbsolutePath()); - } - - /** - * Return the effective permission a user has on this file. Loads the - * ACL for this file or directory, and return the permission associated with - * the local account UID of the given LocalUser u. If no ACE for that - * user is found, return {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose permissions are to be retrieved. - * @return effective permission associated to the local account UID - * of the given LocalUser u in this file ACL, or - * null if no ACL entry for that user was found. - */ - public FilesystemPermission getEffectiveUserPermission(final LocalUser u) { - - return fs.getEffectiveUserPermission(u, localFile.getAbsolutePath()); - } - - /** - * Return up-to-date file last modification time, as a UNIX epoch. Returned - * value may differ from the size returned by - * {@link java.io.File#lastModified()} on filesystems that do metadata caching - * (GPFS, for instance). Since it may force a metadata update on all cluster - * nodes, this method may be slow. - * - * @return time (seconds since the epoch) this file was last modified. - * @see #lastModified() - * @see #getLastModifiedTime() - */ - public long getExactLastModifiedTime() { - - return fs.getExactLastModifiedTime(localFile.getAbsolutePath()); - } - - /** - * Return up-to-date file size in bytes. Returned value may differ from the - * size returned by {@link java.io.File#length()} on filesystems that do - * metadata caching (GPFS, for instance). Since it may force a metadata update - * on all cluster nodes, this method may be slow. - * - * @return size (in bytes) of this file - * @see #length() - * @see #getExactSize() - */ - public long getExactSize() { - - return fs.getExactSize(localFile.getAbsolutePath()); - } - - /** - * Return the permission a group has on this file. Loads the ACL for this file - * or directory, and return the permission associated with the local account - * primary group of the given {@link LocalUser} instance u. If no ACE - * for that group is found, return {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose local account primary GID's permissions are to - * be retrieved. - * @return permission associated to the local account primary GID of the given - * LocalUser u in the given file ACL. or - * {@link Filesystem#NONE} if no ACE for that group was found. - */ - public FilesystemPermission getGroupPermission(final LocalUser u) { - - return fs.getGroupPermission(u, localFile.getAbsolutePath()); - } - - /** - * Return (possibly cached) file last modification time, as a UNIX epoch. Same - * as {@link #lastModified()}, although this uses the host Filesystem native - * stat() -like calls. May return inaccurate results, as some - * filesystems (notably GPFS) provide the choice between filesystem-specific - * calls for accurate reporting (slower, as it may imply synchronizing the - * metadata cache) or standard libc calls that report possibly outdated - * information. - * - * @return time (seconds since the epoch) this file was last modified. - * @see #lastModified() - * @see #getExactLastModifiedTime() - */ - public long getLastModifiedTime() { - - return fs.getLastModifiedTime(localFile.getAbsolutePath()); - } - - // overridden from java.io.File to change return value - public LocalFile getParentFile() { - - File parent = localFile.getParentFile(); - if (parent == null) { - return null; - } - return new LocalFile(parent.getAbsolutePath(), this.fs); - } - - /** - * Method that returns a String representing the path of This file, as per - * contract of java.io.File: refer there for further info. - */ - public String getPath() { - - return localFile.getPath(); - } - - /** - * Return the (possibly outdated) size in bytes of this file. Same as - * {@link #length()}, although this uses the host Filesystem native - * stat()-like calls. May return inaccurate results, as some - * filesystems (notably GPFS) provide the choice between filesystem-specific - * calls for accurate reporting (slower, as it may imply synchronizing the - * metadata cache) or standard libc calls that report possibly outdated - * information. - * - * @return size (in bytes) of this file - * @see #length() - * @see #getExactSize() - */ - public long getSize() { - - File file = new File(localFile.getAbsolutePath()); - return file.exists() ? file.length() : 0; - } - - /** - * Return the permission a user has on this file. Loads the ACL for this file - * or directory, and return the permission associated with the local account - * UID of the given LocalUser u. If no ACE for that user is found, - * return {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose permissions are to be retrieved. - * @return permission associated to the local account UID of the given - * LocalUser u in this file ACL, or {@link Filesystem#NONE} if - * no ACE for that user was found. - */ - public FilesystemPermission getUserPermission(final LocalUser u) { - - return fs.getUserPermission(u, localFile.getAbsolutePath()); - } - - /** - * Grant specified permission to a group, and return the former permission. - *

- * Adds the specified permission to the ones that the primary group of the - * given LocalUser u already holds on this file or directory: all - * permission bits that are set in permission will be set in the - * appropriate group ACE in the file ACL. - *

- * If no ACE is present for the specified group, then one is created and its - * permission value is set to permission. - * - * @param u - * the LocalUser whose local account primary GID's ACE is to be - * altered. - * @param permission - * Capabilities to grant. - * @return permission formerly associated to the local account primary GID of - * the given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that group was found. - */ - public FilesystemPermission grantGroupPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.grantGroupPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Grant specified permission to a user, and return the former permission. - *

- * Adds the specified permissions to the ones that the local account UID of - * the given LocalUser u already holds on this file or directory: all - * permission bits that are set in permission will be set in the - * appropriate user ACE in the file ACL. - *

- * If no ACE is present for the specified user, then one is created and its - * permission value is set to permission. - * - * @param u - * the LocalUser whose local account UID's ACE is to be altered. - * @param permission - * Capabilities to grant. - * @return permission formerly associated to the local account UID of the - * given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that user was found. - */ - public FilesystemPermission grantUserPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.grantUserPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Check if the file checksum is already set. - * - * @return true if the checksum attribute is set, - * false otherwise. - */ - public boolean hasDefaultChecksum() { - try { - return ChecksumManager.getInstance().hasDefaultChecksum( - localFile.getAbsolutePath()); - } catch (FileNotFoundException e) { - log.warn("File not found when checking checksum: {}", e.getMessage(), e); - return false; - } - } - - public boolean isDirectory() throws SecurityException { - return localFile.isDirectory(); - } - - /** - * Returns true is the file is present on the disk, - * false otherwise. - * - * @return true is the file is present on the disk, - * false otherwise. - */ - public boolean isOnDisk() throws FSException { - - final boolean isOnDisk = fs.isFileOnDisk(localFile.getAbsolutePath()); - - if (log.isDebugEnabled()){ - log.debug("File {} is {} on disk.", - localFile.getAbsolutePath(), - (isOnDisk ? "": "NOT")); - } - - return isOnDisk; - } - - /** - * Returns true is the file is stored on the tape, - * false otherwise. - * - * @return true is the file is stored on the tape, - * false otherwise. - */ - public boolean isOnTape() { - - return StormEA.getMigrated(localFile.getAbsolutePath()); - } - - /** - * Method that returns the size in bytes of This file, as per contract of - * java.io.File: refer there for further info. - */ - public long length() throws SecurityException { - - return localFile.length(); - } - - // overridden from java.io.File to change return value - public LocalFile[] listFiles() { - - java.io.File[] _children = localFile.listFiles(); - LocalFile[] children = new LocalFile[_children.length]; - for (int i = 0; i < _children.length; ++i) { - children[i] = new LocalFile(_children[i].getAbsolutePath(), this.fs); - } - return children; - } - - /** - * Method that creates a new directory, as per contract of java.io.File: refer - * there for further info. - */ - public boolean mkdir() throws SecurityException { - - return localFile.mkdir(); - } - - /** - * Method that creates a new directory, as per contract of java.io.File: refer - * there for further info. - */ - public boolean mkdirs() throws SecurityException { - - return localFile.mkdirs(); - } - - /** - * Return true if the parent directory of this pathname exists. - */ - public boolean parentExists() { - - java.io.File parent = localFile.getParentFile(); - assert (null != parent) : "Null parent in " + this.toString(); - return parent.exists(); - } - - /** - * Remove a group's ACE, and return the (now deleted) permission. - *

- * Removes the ACE (if any) of the primary group of the given LocalUser - * u from this file or directory ACL. Returns the permission formerly - * associated with that group. - *

- * If the given group is the file owning group, then its ACE is set to - * {@link Filesystem#NONE}, rather than removed. - * - * @param u - * the LocalUser whose local account primary GID's ACE is to be - * altered. - * @return permission formerly associated to the local account primary GID of - * the given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that group was found. - */ - public FilesystemPermission removeGroupPermission(final LocalUser u) { - - return fs.removeGroupPermission(u, localFile.getAbsolutePath()); - } - - /** - * Remove a user's ACE, and return the (now deleted) permission. - *

- * Removes the ACE (if any) of the primary user of the given LocalUser - * u from this file or directory ACL. Returns the permission formerly - * associated with that user. - *

- * If the given user is the file owner, then its ACE is set to - * {@link Filesystem#NONE}, rather than removed. - * - * @param u - * the LocalUser whose local account UID's ACE is to be altered. - * @return permission formerly associated to the local account UID of the - * given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that user was found. - */ - public FilesystemPermission removeUserPermission(final LocalUser u) { - - return fs.removeUserPermission(u, localFile.getAbsolutePath()); - } - - /** - * Method that renames This file, as per contract of java.io.File: refer there - * for further info. The only notable difference is that this method requires - * a String rather than java.io.File parameter. - */ - public boolean renameTo(String newName) throws SecurityException, - NullPointerException { - - return localFile.renameTo(new File(newName)); - } - - /** - * Revoke specified permission from a group's ACE, and return the former - * permission. - *

- * Removes the specified permission from the ones that the primary group of - * the given LocalUser u local account already holds on this file or - * directory: all permission bits that are set in permission - * will be cleared in the appropriate group ACE in the file ACL. - *

- * If no ACE is present for the specified group, then one is created and its - * permission value is set to {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose local account primary GID's ACE is to be - * altered. - * @param permission - * Capabilities to revoke. - * @return permission formerly associated to the local account primary GID of - * the given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that group was found. - * @see fs_acl::revoke_group_perm() - */ - public FilesystemPermission revokeGroupPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.revokeGroupPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Revoke specified permission from a user's ACE, and return the former - * permission. - *

- * Removes the specified permission from the ones that the primary user of the - * given LocalUser u local account already holds on this file or - * directory: all permission bits that are set in permission - * will be cleared in the appropriate user ACE in the file ACL. - *

- * If no ACE is present for the specified user, then one is created and its - * permission value is set to {@link Filesystem#NONE}. - * - * @param u - * the LocalUser whose local account UID's ACE is to be altered. - * @param permission - * Capabilities to revoke. - * @return permission formerly associated to the local account UID of the - * given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that user was found. - */ - public FilesystemPermission revokeUserPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.revokeUserPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Change file group. - * - * @param groupName - * name of the group - * @return true if the group was correctly set, - * false otherwise - */ - public void setGroupOwnership(String groupName) throws FSException { - - try { - - fs.changeFileGroupOwnership(localFile.getAbsolutePath(), groupName); - - } catch(FilesystemError filesystemError) { - - throw new FSException(filesystemError); - - } - } - - /** - * Set the specified permission in a group's ACE, and return the former - * permission. - *

- * Sets the ACE of the primary group of the given LocalUser u to the - * given permission. Returns the permission formerly associated with - * that group. - * - * @param u - * the localUser whose local account primary GID's ACE is to be - * altered. - * @param permission - * Permission to set in the group ACE. - * @return permission formerly associated to the local account primary GID of - * the given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that group was found. - */ - public FilesystemPermission setGroupPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.setGroupPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Set the specified permission in a user's ACE on a file or directory, and - * return the former permission. - *

- * Sets the ACE of the primary user of the given LocalUser u to the - * given permission. Returns the permission formerly associated with - * that user. - * - * @param u - * the Grid user whose local account UID's ACE is to be altered. - * @param permission - * Permission to set in the user ACE. - * @return permission formerly associated to the local account UID of the - * given LocalUser u in this file ACL, or - * {@link Filesystem#NONE} if no ACE for that user was found. - */ - public FilesystemPermission setUserPermission(final LocalUser u, - final FilesystemPermission permission) { - - return fs.setUserPermission(u, localFile.getAbsolutePath(), permission); - } - - /** - * Return a string representation of this object. - */ - @Override - public String toString() { - - return File.class.toString() + ":" + localFile.toString(); - } - - /** - * Truncate the file to the desired size - * - * @param desired_size - * @return - */ - - public int truncateFile(long desired_size) { - - return fs.truncateFile(localFile.getAbsolutePath(), desired_size); - } - - /** - * Return the unique absolute canonical path of the file - */ - public String getCanonicalPath() throws IOException { - - return localFile.getCanonicalPath(); - } - + private static final Logger log = LoggerFactory.getLogger(LocalFile.class); + + /** The Filesystem interface to operate on the wrapped pathname. */ + private final FilesystemIF fs; + + private final java.io.File localFile; + + /** + * Constructor, taking parent pathname (as a {@link File}), child name (as a String ) + * and the hosting filesystem object. From parent and child an absolute pathname + * must result, following the rules in {@link java.io.File#File(java.io.File,String)}. + * + * @param parent {@link File} instance of the parent directory + * @param child pathname of the child + * @param fs The {@link it.grid.storm.filesystem.Filesystem} object to use for operations on this + * file. + * @see it.grid.storm.filesystem.Filesystem + * @see java.io.File#File(java.io.File,String) + */ + public LocalFile(final LocalFile parent, final String name, final FilesystemIF fs) + throws NullPointerException { + + localFile = new java.io.File(parent.localFile, name); + + assert (localFile.isAbsolute()) + : "Non-absolute path in constructor File(File,String,Filesystem)"; + assert (null != fs) : "Null filesystem in constructor File(File,String,Filesystem)"; + + this.fs = fs; + } + + /** + * Constructor, taking string pathname and the hosting filesystem object. The path + * parameter must be a non-empty absolute pathname. + * + * @param pathname The pathname wrapped by this PFN; must be absolute, or code will fail + * in an assertion. + * @param fs The {@link it.grid.storm.filesystem.Filesystem} object to use for operations on this + * file. + * @see it.grid.storm.filesystem.Filesystem + */ + public LocalFile(final String pathname, final FilesystemIF fs) throws NullPointerException { + + localFile = new java.io.File(pathname); + + assert (localFile.isAbsolute()) : "Non-absolute path in constructor File(String,Filesystem)"; + assert (null != fs) : "Null Filesystem in constructor File(String,Filesystem)"; + + this.fs = fs; + } + + /** + * Constructor, taking parent pathname (as a String), child name (as a String + * ) and the hosting filesystem object. From parent and child an absolute + * pathname must result, following the rules in {@link java.io.File#File(String,String)}. + * + * @param parent pathname of the parent directory + * @param child pathname of the child + * @param fs The {@link it.grid.storm.filesystem.Filesystem} object to use for operations on this + * file. + * @see it.grid.storm.filesystem.Filesystem + * @see java.io.File#File(String,String) + */ + public LocalFile(final String parent, final String name, final FilesystemIF fs) + throws NullPointerException { + + localFile = new java.io.File(parent, name); + + assert (localFile.isAbsolute()) + : "Non-absolute path in constructor File(String,String,Filesystem)"; + assert (null != fs) : "Null Filesystem in constructor File(String,String,Filesystem)"; + + this.fs = fs; + } + + // ---- public accessor methods ---- + + /** + * Return true if the local user (to which the specified grid user is mapped to) can + * operate on the specified fileOrDirectory in the mode given by accessMode + * , according to the permissions set on the filesystem. + */ + public boolean canAccess(final LocalUser u, final FilesystemPermission accessMode) + throws CannotMapUserException { + + return fs.canAccess(u, localFile.getAbsolutePath(), accessMode); + } + + /** + * Method that creates a new empty file, as per contract of java.io.File: refer there for further + * info. + */ + public boolean createNewFile() throws IOException, SecurityException { + + return localFile.createNewFile(); + } + + /** + * Method that deletes This file, as per contract of java.io.File: refer there for further info. + */ + public boolean delete() throws SecurityException { + + return localFile.delete(); + } + + /** + * Method that checks for the existence of This file, as per contract of java.io.File: refer there + * for further info. + */ + public boolean exists() throws SecurityException { + + return localFile.exists(); + } + + /** Returns the absolute pathname string, as per contract of {@link java.io.File}. */ + public String getAbsolutePath() { + + return localFile.getAbsolutePath(); + } + + /** + * Retrieves the checksum of the file from the corresponding extended attribute. If no checksum is + * found it is computed (scheduled and computed by a separate thread) and stored in an extended + * attribute. + * + * @return the checksum of the file. + */ + public String getDefaultChecksum() { + + try { + return ChecksumManager.getInstance().getDefaultChecksum(localFile.getAbsolutePath()); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + return null; + } + } + + public Map getChecksums() { + + try { + return ChecksumManager.getInstance().getChecksums(localFile.getAbsolutePath()); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + return Maps.newHashMap(); + } + } + + /** + * Returns the algorithm used to compute checksums (as defined in the configuration file). + * + * @return + */ + public ChecksumAlgorithm getChecksumAlgorithm() { + + return ChecksumManager.getInstance().getDefaultAlgorithm(); + } + + /** + * Return the effective permission a group has on this file. Loads the ACL for this file + * or directory, and return the permission associated with the local account primary group of the + * given {@link LocalUser} instance u. If no ACE for that group is found, return {@link + * Filesystem#NONE}. + * + * @param u the LocalUser whose local account primary GID's permissions are to be retrieved. + * @return effective permission associated to the local account primary GID of the given + * LocalUser u in the given file ACL, or null if no ACL entry for that + * group was found. + */ + public FilesystemPermission getEffectiveGroupPermission(final LocalUser u) { + + return fs.getEffectiveGroupPermission(u, localFile.getAbsolutePath()); + } + + /** + * Return the effective permission a user has on this file. Loads the ACL for this file + * or directory, and return the permission associated with the local account UID of the given + * LocalUser u. If no ACE for that user is found, return {@link Filesystem#NONE}. + * + * @param u the LocalUser whose permissions are to be retrieved. + * @return effective permission associated to the local account UID of the given + * LocalUser u in this file ACL, or null if no ACL entry for that user was + * found. + */ + public FilesystemPermission getEffectiveUserPermission(final LocalUser u) { + + return fs.getEffectiveUserPermission(u, localFile.getAbsolutePath()); + } + + /** + * Return up-to-date file last modification time, as a UNIX epoch. Returned value may differ from + * the size returned by {@link java.io.File#lastModified()} on filesystems that do metadata + * caching (GPFS, for instance). Since it may force a metadata update on all cluster nodes, this + * method may be slow. + * + * @return time (seconds since the epoch) this file was last modified. + * @see #lastModified() + * @see #getLastModifiedTime() + */ + public long getExactLastModifiedTime() { + + return fs.getExactLastModifiedTime(localFile.getAbsolutePath()); + } + + /** + * Return up-to-date file size in bytes. Returned value may differ from the size returned by + * {@link java.io.File#length()} on filesystems that do metadata caching (GPFS, for instance). + * Since it may force a metadata update on all cluster nodes, this method may be slow. + * + * @return size (in bytes) of this file + * @see #length() + * @see #getExactSize() + */ + public long getExactSize() { + + return fs.getExactSize(localFile.getAbsolutePath()); + } + + /** + * Return the permission a group has on this file. Loads the ACL for this file or directory, and + * return the permission associated with the local account primary group of the given {@link + * LocalUser} instance u. If no ACE for that group is found, return {@link + * Filesystem#NONE}. + * + * @param u the LocalUser whose local account primary GID's permissions are to be retrieved. + * @return permission associated to the local account primary GID of the given LocalUser u + * in the given file ACL. or {@link Filesystem#NONE} if no ACE for that group was found. + */ + public FilesystemPermission getGroupPermission(final LocalUser u) { + + return fs.getGroupPermission(u, localFile.getAbsolutePath()); + } + + /** + * Return (possibly cached) file last modification time, as a UNIX epoch. Same as {@link + * #lastModified()}, although this uses the host Filesystem native stat() -like + * calls. May return inaccurate results, as some filesystems (notably GPFS) provide the choice + * between filesystem-specific calls for accurate reporting (slower, as it may imply synchronizing + * the metadata cache) or standard libc calls that report possibly outdated information. + * + * @return time (seconds since the epoch) this file was last modified. + * @see #lastModified() + * @see #getExactLastModifiedTime() + */ + public long getLastModifiedTime() { + + return fs.getLastModifiedTime(localFile.getAbsolutePath()); + } + + // overridden from java.io.File to change return value + public LocalFile getParentFile() { + + File parent = localFile.getParentFile(); + if (parent == null) { + return null; + } + return new LocalFile(parent.getAbsolutePath(), this.fs); + } + + /** + * Method that returns a String representing the path of This file, as per contract of + * java.io.File: refer there for further info. + */ + public String getPath() { + + return localFile.getPath(); + } + + /** + * Return the (possibly outdated) size in bytes of this file. Same as {@link #length()}, although + * this uses the host Filesystem native stat()-like calls. May return inaccurate + * results, as some filesystems (notably GPFS) provide the choice between filesystem-specific + * calls for accurate reporting (slower, as it may imply synchronizing the metadata cache) or + * standard libc calls that report possibly outdated information. + * + * @return size (in bytes) of this file + * @see #length() + * @see #getExactSize() + */ + public long getSize() { + + File file = new File(localFile.getAbsolutePath()); + return file.exists() ? file.length() : 0; + } + + /** + * Return the permission a user has on this file. Loads the ACL for this file or directory, and + * return the permission associated with the local account UID of the given LocalUser u. If + * no ACE for that user is found, return {@link Filesystem#NONE}. + * + * @param u the LocalUser whose permissions are to be retrieved. + * @return permission associated to the local account UID of the given LocalUser u in this + * file ACL, or {@link Filesystem#NONE} if no ACE for that user was found. + */ + public FilesystemPermission getUserPermission(final LocalUser u) { + + return fs.getUserPermission(u, localFile.getAbsolutePath()); + } + + /** + * Grant specified permission to a group, and return the former permission. + * + *

Adds the specified permission to the ones that the primary group of the given LocalUser + * u already holds on this file or directory: all permission bits that are set in + * permission will be set in the appropriate group ACE in the file ACL. + * + *

If no ACE is present for the specified group, then one is created and its permission value + * is set to permission. + * + * @param u the LocalUser whose local account primary GID's ACE is to be altered. + * @param permission Capabilities to grant. + * @return permission formerly associated to the local account primary GID of the given LocalUser + * u in this file ACL, or {@link Filesystem#NONE} if no ACE for that group was found. + */ + public FilesystemPermission grantGroupPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.grantGroupPermission(u, localFile.getAbsolutePath(), permission); + } + + /** + * Grant specified permission to a user, and return the former permission. + * + *

Adds the specified permissions to the ones that the local account UID of the given LocalUser + * u already holds on this file or directory: all permission bits that are set in + * permission will be set in the appropriate user ACE in the file ACL. + * + *

If no ACE is present for the specified user, then one is created and its permission value is + * set to permission. + * + * @param u the LocalUser whose local account UID's ACE is to be altered. + * @param permission Capabilities to grant. + * @return permission formerly associated to the local account UID of the given LocalUser u + * in this file ACL, or {@link Filesystem#NONE} if no ACE for that user was found. + */ + public FilesystemPermission grantUserPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.grantUserPermission(u, localFile.getAbsolutePath(), permission); + } + + /** + * Check if the file checksum is already set. + * + * @return true if the checksum attribute is set, false otherwise. + */ + public boolean hasDefaultChecksum() { + try { + return ChecksumManager.getInstance().hasDefaultChecksum(localFile.getAbsolutePath()); + } catch (FileNotFoundException e) { + log.warn("File not found when checking checksum: {}", e.getMessage(), e); + return false; + } + } + + public boolean isDirectory() throws SecurityException { + return localFile.isDirectory(); + } + + /** + * Returns true is the file is present on the disk, false otherwise. + * + * @return true is the file is present on the disk, false otherwise. + */ + public boolean isOnDisk() throws FSException { + + final boolean isOnDisk = fs.isFileOnDisk(localFile.getAbsolutePath()); + + if (log.isDebugEnabled()) { + log.debug("File {} is {} on disk.", localFile.getAbsolutePath(), (isOnDisk ? "" : "NOT")); + } + + return isOnDisk; + } + + /** + * Returns true is the file is stored on the tape, false otherwise. + * + * @return true is the file is stored on the tape, false otherwise. + */ + public boolean isOnTape() { + + return StormEA.getMigrated(localFile.getAbsolutePath()); + } + + /** + * Method that returns the size in bytes of This file, as per contract of java.io.File: refer + * there for further info. + */ + public long length() throws SecurityException { + + return localFile.length(); + } + + // overridden from java.io.File to change return value + public LocalFile[] listFiles() { + + java.io.File[] _children = localFile.listFiles(); + LocalFile[] children = new LocalFile[_children.length]; + for (int i = 0; i < _children.length; ++i) { + children[i] = new LocalFile(_children[i].getAbsolutePath(), this.fs); + } + return children; + } + + /** + * Method that creates a new directory, as per contract of java.io.File: refer there for further + * info. + */ + public boolean mkdir() throws SecurityException { + + return localFile.mkdir(); + } + + /** + * Method that creates a new directory, as per contract of java.io.File: refer there for further + * info. + */ + public boolean mkdirs() throws SecurityException { + + return localFile.mkdirs(); + } + + /** Return true if the parent directory of this pathname exists. */ + public boolean parentExists() { + + java.io.File parent = localFile.getParentFile(); + assert (null != parent) : "Null parent in " + this.toString(); + return parent.exists(); + } + + /** + * Remove a group's ACE, and return the (now deleted) permission. + * + *

Removes the ACE (if any) of the primary group of the given LocalUser u from this file + * or directory ACL. Returns the permission formerly associated with that group. + * + *

If the given group is the file owning group, then its ACE is set to {@link Filesystem#NONE}, + * rather than removed. + * + * @param u the LocalUser whose local account primary GID's ACE is to be altered. + * @return permission formerly associated to the local account primary GID of the given LocalUser + * u in this file ACL, or {@link Filesystem#NONE} if no ACE for that group was found. + */ + public FilesystemPermission removeGroupPermission(final LocalUser u) { + + return fs.removeGroupPermission(u, localFile.getAbsolutePath()); + } + + /** + * Remove a user's ACE, and return the (now deleted) permission. + * + *

Removes the ACE (if any) of the primary user of the given LocalUser u from this file + * or directory ACL. Returns the permission formerly associated with that user. + * + *

If the given user is the file owner, then its ACE is set to {@link Filesystem#NONE}, rather + * than removed. + * + * @param u the LocalUser whose local account UID's ACE is to be altered. + * @return permission formerly associated to the local account UID of the given LocalUser u + * in this file ACL, or {@link Filesystem#NONE} if no ACE for that user was found. + */ + public FilesystemPermission removeUserPermission(final LocalUser u) { + + return fs.removeUserPermission(u, localFile.getAbsolutePath()); + } + + /** + * Method that renames This file, as per contract of java.io.File: refer there for further info. + * The only notable difference is that this method requires a String rather than java.io.File + * parameter. + */ + public boolean renameTo(String newName) throws SecurityException, NullPointerException { + + return localFile.renameTo(new File(newName)); + } + + /** + * Revoke specified permission from a group's ACE, and return the former permission. + * + *

Removes the specified permission from the ones that the primary group of the given LocalUser + * u local account already holds on this file or directory: all permission bits that are + * set in permission will be cleared in the appropriate group ACE in the + * file ACL. + * + *

If no ACE is present for the specified group, then one is created and its permission value + * is set to {@link Filesystem#NONE}. + * + * @param u the LocalUser whose local account primary GID's ACE is to be altered. + * @param permission Capabilities to revoke. + * @return permission formerly associated to the local account primary GID of the given LocalUser + * u in this file ACL, or {@link Filesystem#NONE} if no ACE for that group was found. + * @see fs_acl::revoke_group_perm() + */ + public FilesystemPermission revokeGroupPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.revokeGroupPermission(u, localFile.getAbsolutePath(), permission); + } + + /** + * Revoke specified permission from a user's ACE, and return the former permission. + * + *

Removes the specified permission from the ones that the primary user of the given LocalUser + * u local account already holds on this file or directory: all permission bits that are + * set in permission will be cleared in the appropriate user ACE in the + * file ACL. + * + *

If no ACE is present for the specified user, then one is created and its permission value is + * set to {@link Filesystem#NONE}. + * + * @param u the LocalUser whose local account UID's ACE is to be altered. + * @param permission Capabilities to revoke. + * @return permission formerly associated to the local account UID of the given LocalUser u + * in this file ACL, or {@link Filesystem#NONE} if no ACE for that user was found. + */ + public FilesystemPermission revokeUserPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.revokeUserPermission(u, localFile.getAbsolutePath(), permission); + } + + /** + * Change file group. + * + * @param groupName name of the group + * @return true if the group was correctly set, false otherwise + */ + public void setGroupOwnership(String groupName) throws FSException { + + try { + + fs.changeFileGroupOwnership(localFile.getAbsolutePath(), groupName); + + } catch (FilesystemError filesystemError) { + + throw new FSException(filesystemError); + } + } + + /** + * Set the specified permission in a group's ACE, and return the former permission. + * + *

Sets the ACE of the primary group of the given LocalUser u to the given + * permission. Returns the permission formerly associated with that group. + * + * @param u the localUser whose local account primary GID's ACE is to be altered. + * @param permission Permission to set in the group ACE. + * @return permission formerly associated to the local account primary GID of the given LocalUser + * u in this file ACL, or {@link Filesystem#NONE} if no ACE for that group was found. + */ + public FilesystemPermission setGroupPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.setGroupPermission(u, localFile.getAbsolutePath(), permission); + } + + /** + * Set the specified permission in a user's ACE on a file or directory, and return the former + * permission. + * + *

Sets the ACE of the primary user of the given LocalUser u to the given + * permission. Returns the permission formerly associated with that user. + * + * @param u the Grid user whose local account UID's ACE is to be altered. + * @param permission Permission to set in the user ACE. + * @return permission formerly associated to the local account UID of the given LocalUser u + * in this file ACL, or {@link Filesystem#NONE} if no ACE for that user was found. + */ + public FilesystemPermission setUserPermission( + final LocalUser u, final FilesystemPermission permission) { + + return fs.setUserPermission(u, localFile.getAbsolutePath(), permission); + } + + /** Return a string representation of this object. */ + @Override + public String toString() { + + return File.class.toString() + ":" + localFile.toString(); + } + + /** + * Truncate the file to the desired size + * + * @param desired_size + * @return + */ + public int truncateFile(long desired_size) { + + return fs.truncateFile(localFile.getAbsolutePath(), desired_size); + } + + /** Return the unique absolute canonical path of the file */ + public String getCanonicalPath() throws IOException { + + return localFile.getCanonicalPath(); + } } diff --git a/src/main/java/it/grid/storm/filesystem/MetricsFilesystemAdapter.java b/src/main/java/it/grid/storm/filesystem/MetricsFilesystemAdapter.java index 7e6e0fc7..cb6f9263 100644 --- a/src/main/java/it/grid/storm/filesystem/MetricsFilesystemAdapter.java +++ b/src/main/java/it/grid/storm/filesystem/MetricsFilesystemAdapter.java @@ -1,12 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; - import it.grid.storm.griduser.LocalUser; public class MetricsFilesystemAdapter implements FilesystemIF { @@ -29,7 +27,6 @@ public String getOpName() { return opName; } - } final FilesystemIF delegate; @@ -46,18 +43,12 @@ public MetricsFilesystemAdapter(FilesystemIF fs, MetricRegistry r) { delegate = fs; registry = r; - fileAttributeAccessTimer = registry - .timer(FilesystemMetric.FILE_ATTRIBUTE_OP.getOpName()); - fileOnDiskTimer = registry - .timer(FilesystemMetric.FILE_ONDISK_OP.getOpName()); - aclOperationTimer = registry - .timer(FilesystemMetric.FILE_ACL_OP.getOpName()); - fileTruncateTimer = registry - .timer(FilesystemMetric.FILE_TRUNCATE_OP.getOpName()); - fileOwnershipTimer = registry - .timer(FilesystemMetric.FILE_CHOWN_OP.getOpName()); - getFreeSpaceTimer = registry - .timer(FilesystemMetric.GET_FREE_SPACE_OP.getOpName()); + fileAttributeAccessTimer = registry.timer(FilesystemMetric.FILE_ATTRIBUTE_OP.getOpName()); + fileOnDiskTimer = registry.timer(FilesystemMetric.FILE_ONDISK_OP.getOpName()); + aclOperationTimer = registry.timer(FilesystemMetric.FILE_ACL_OP.getOpName()); + fileTruncateTimer = registry.timer(FilesystemMetric.FILE_TRUNCATE_OP.getOpName()); + fileOwnershipTimer = registry.timer(FilesystemMetric.FILE_CHOWN_OP.getOpName()); + getFreeSpaceTimer = registry.timer(FilesystemMetric.GET_FREE_SPACE_OP.getOpName()); } public long getSize(String file) { @@ -69,7 +60,6 @@ public long getSize(String file) { } finally { context.stop(); } - } public long getLastModifiedTime(String fileOrDirectory) { @@ -92,7 +82,6 @@ public long getExactSize(String file) { } finally { context.stop(); } - } public long getExactLastModifiedTime(String fileOrDirectory) { @@ -123,7 +112,6 @@ public boolean isFileOnDisk(String filename) { } finally { context.stop(); } - } public long getFileBlockSize(String filename) { @@ -135,7 +123,6 @@ public long getFileBlockSize(String filename) { } finally { context.stop(); } - } public void changeFileGroupOwnership(String filename, String groupName) { @@ -156,11 +143,9 @@ public long getFreeSpace() { } finally { context.stop(); } - } - public boolean canAccess(LocalUser u, String fileOrDirectory, - FilesystemPermission accessMode) { + public boolean canAccess(LocalUser u, String fileOrDirectory, FilesystemPermission accessMode) { final Timer.Context context = aclOperationTimer.time(); @@ -169,11 +154,9 @@ public boolean canAccess(LocalUser u, String fileOrDirectory, } finally { context.stop(); } - } - public FilesystemPermission getEffectiveGroupPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission getEffectiveGroupPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); @@ -182,11 +165,9 @@ public FilesystemPermission getEffectiveGroupPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission getEffectiveUserPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission getEffectiveUserPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); @@ -195,11 +176,9 @@ public FilesystemPermission getEffectiveUserPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission getGroupPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission getGroupPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); @@ -208,11 +187,9 @@ public FilesystemPermission getGroupPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission getUserPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission getUserPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); @@ -221,11 +198,10 @@ public FilesystemPermission getUserPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission grantGroupPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission grantGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); @@ -234,11 +210,10 @@ public FilesystemPermission grantGroupPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission grantUserPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission grantUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); try { @@ -248,8 +223,7 @@ public FilesystemPermission grantUserPermission(LocalUser u, } } - public FilesystemPermission removeGroupPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission removeGroupPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); try { @@ -257,11 +231,9 @@ public FilesystemPermission removeGroupPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission removeUserPermission(LocalUser u, - String fileOrDirectory) { + public FilesystemPermission removeUserPermission(LocalUser u, String fileOrDirectory) { final Timer.Context context = aclOperationTimer.time(); try { @@ -269,11 +241,10 @@ public FilesystemPermission removeUserPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission revokeGroupPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission revokeGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); try { @@ -283,8 +254,8 @@ public FilesystemPermission revokeGroupPermission(LocalUser u, } } - public FilesystemPermission revokeUserPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission revokeUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); try { @@ -294,8 +265,8 @@ public FilesystemPermission revokeUserPermission(LocalUser u, } } - public FilesystemPermission setGroupPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission setGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); try { @@ -303,11 +274,10 @@ public FilesystemPermission setGroupPermission(LocalUser u, } finally { context.stop(); } - } - public FilesystemPermission setUserPermission(LocalUser u, - String fileOrDirectory, FilesystemPermission permission) { + public FilesystemPermission setUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { final Timer.Context context = aclOperationTimer.time(); try { @@ -316,5 +286,4 @@ public FilesystemPermission setUserPermission(LocalUser u, context.stop(); } } - } diff --git a/src/main/java/it/grid/storm/filesystem/MockSpaceSystem.java b/src/main/java/it/grid/storm/filesystem/MockSpaceSystem.java index da30e135..b230df42 100644 --- a/src/main/java/it/grid/storm/filesystem/MockSpaceSystem.java +++ b/src/main/java/it/grid/storm/filesystem/MockSpaceSystem.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import java.io.IOException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,7 +11,7 @@ * Class that represents a SpaceSystem that always acknowledges a successful space reservation; it * is meant to be used with filesystems that do not support natively space reservation operations. * It acts as a mock object! - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date June 2006 @@ -21,8 +19,8 @@ public class MockSpaceSystem implements SpaceSystem { private String mountpoint = ""; // String representing the local mount point, - // that is the root from which This - // SpaceSystem operates! + // that is the root from which This + // SpaceSystem operates! private static Logger log = LoggerFactory.getLogger(MockSpaceSystem.class); public MockSpaceSystem(String mountpoint) throws SpaceSystemException { @@ -44,8 +42,8 @@ public long reserveSpace(String pathToFile, long size) throws ReservationExcepti localFile.createNewFile(); } catch (IOException e) { log.error(e.getMessage(), e); - throw new ReservationException("IO exception while creating local File named : " + pathToFile, - e); + throw new ReservationException( + "IO exception while creating local File named : " + pathToFile, e); } catch (SecurityException e) { log.error(e.getMessage(), e); throw new ReservationException( diff --git a/src/main/java/it/grid/storm/filesystem/MtabRow.java b/src/main/java/it/grid/storm/filesystem/MtabRow.java index 62e1b789..2f6c08b3 100644 --- a/src/main/java/it/grid/storm/filesystem/MtabRow.java +++ b/src/main/java/it/grid/storm/filesystem/MtabRow.java @@ -1,167 +1,176 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; import java.util.ArrayList; import java.util.List; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class MtabRow { - private final String device; - private final String mountPoint; - private final String fileSystem; - private final List mountOptions = new ArrayList(); - private final boolean dump; - private final int fsckOrderPosition; - - public MtabRow(String device, String mountPoint, String fileSystem, - List mountOptions, boolean dump, int fsckOrderPosition) { - - if (device == null || device.trim().equals("") || mountPoint == null - || mountPoint.trim().equals("") || fileSystem == null - || fileSystem.trim().equals("")) { - throw new IllegalArgumentException( - "Received invalid arguments : device = " + device + " mountPoint = " - + mountPoint + " fileSystem = " + fileSystem); - } - this.device = device; - this.mountPoint = mountPoint; - this.fileSystem = fileSystem; - this.mountOptions.addAll(mountOptions); - this.dump = dump; - this.fsckOrderPosition = fsckOrderPosition; - } - - /** - * Build from ordered parameter list - * - * @param elementsList - * [device , mountPoint, fileSystem, mountOptions, dump, - * fsckOrderPosition] - * @throws IllegalArgumentException - */ - public MtabRow(List elementsList) throws IllegalArgumentException { - - if (elementsList.size() < 5) { - throw new IllegalArgumentException( - "Received an argument list of few than 5 elements (mandatory) : " - + elementsList.toString()); - } - for (int i = 0; i < 5; i++) { - /* - * all the arguments must be strings not empty apart from the one at index - * 4 (mount options) - */ - if ((elementsList.get(i) == null || elementsList.get(i).trim().length() == 0) - && i != 3) { - throw new IllegalArgumentException( - "Received an argument list where element at index " + i - + " is invalid : \'" + elementsList.get(i) + "\'"); - } - } - this.device = elementsList.get(0); - this.mountPoint = elementsList.get(1); - this.fileSystem = elementsList.get(2); - if (elementsList.get(3) != null && elementsList.get(3).trim().length() > 0) { - String[] mountOptionsArray = elementsList.get(3).trim().split(","); - for (String mountOption : mountOptionsArray) { - if (mountOption.trim().length() > 0) { - mountOptions.add(mountOption.trim()); - } - } - } - Integer dumpValue; - try { - dumpValue = Integer.parseInt(elementsList.get(4)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "Received an illegal argument at index 4. " - + "It as to be an integer (the dump value), received value : " - + elementsList.get(4) + " . NumberFormatException : " - + e.getMessage()); - } - - if (dumpValue != 1 && dumpValue != 0) { - throw new IllegalArgumentException( - "Received an illegal argument at index 4. " - + "It can be only 0 or 1, received value : " + elementsList.get(4)); - } - this.dump = (dumpValue == 0 ? false : true); - try { - this.fsckOrderPosition = Integer.parseInt(elementsList.get(5)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "Received an illegal argument at index 5. " - + "It as to be an integer (the fsck order position value), received value : " - + elementsList.get(4) + " . NumberFormatException : " - + e.getMessage()); - } - } - - /** - * @return the device - */ - public final String getDevice() { - - return device; - } - - /** - * @return the mountPoint - */ - public final String getMountPoint() { - - return mountPoint; - } - - /** - * @return the fileSystem - */ - public final String getFileSystem() { - - return fileSystem; - } - - /** - * @return the mountOptions - */ - public final List getMountOptions() { - - return mountOptions; - } - - /** - * @return the dump - */ - public final boolean isDump() { - - return dump; - } - - /** - * @return the fsckOrderPosition - */ - public final int getFsckOrderPosition() { - - return fsckOrderPosition; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - return "MtabRow [device=" + device + ", mountPoint=" + mountPoint - + ", fileSystem=" + fileSystem + ", mountOptions=" + mountOptions - + ", dump=" + dump + ", fsckOrderPosition=" + fsckOrderPosition + "]"; - } + private final String device; + private final String mountPoint; + private final String fileSystem; + private final List mountOptions = new ArrayList(); + private final boolean dump; + private final int fsckOrderPosition; + + public MtabRow( + String device, + String mountPoint, + String fileSystem, + List mountOptions, + boolean dump, + int fsckOrderPosition) { + + if (device == null + || device.trim().equals("") + || mountPoint == null + || mountPoint.trim().equals("") + || fileSystem == null + || fileSystem.trim().equals("")) { + throw new IllegalArgumentException( + "Received invalid arguments : device = " + + device + + " mountPoint = " + + mountPoint + + " fileSystem = " + + fileSystem); + } + this.device = device; + this.mountPoint = mountPoint; + this.fileSystem = fileSystem; + this.mountOptions.addAll(mountOptions); + this.dump = dump; + this.fsckOrderPosition = fsckOrderPosition; + } + + /** + * Build from ordered parameter list + * + * @param elementsList [device , mountPoint, fileSystem, mountOptions, dump, fsckOrderPosition] + * @throws IllegalArgumentException + */ + public MtabRow(List elementsList) throws IllegalArgumentException { + + if (elementsList.size() < 5) { + throw new IllegalArgumentException( + "Received an argument list of few than 5 elements (mandatory) : " + + elementsList.toString()); + } + for (int i = 0; i < 5; i++) { + /* + * all the arguments must be strings not empty apart from the one at index + * 4 (mount options) + */ + if ((elementsList.get(i) == null || elementsList.get(i).trim().length() == 0) && i != 3) { + throw new IllegalArgumentException( + "Received an argument list where element at index " + + i + + " is invalid : \'" + + elementsList.get(i) + + "\'"); + } + } + this.device = elementsList.get(0); + this.mountPoint = elementsList.get(1); + this.fileSystem = elementsList.get(2); + if (elementsList.get(3) != null && elementsList.get(3).trim().length() > 0) { + String[] mountOptionsArray = elementsList.get(3).trim().split(","); + for (String mountOption : mountOptionsArray) { + if (mountOption.trim().length() > 0) { + mountOptions.add(mountOption.trim()); + } + } + } + Integer dumpValue; + try { + dumpValue = Integer.parseInt(elementsList.get(4)); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "Received an illegal argument at index 4. " + + "It as to be an integer (the dump value), received value : " + + elementsList.get(4) + + " . NumberFormatException : " + + e.getMessage()); + } + + if (dumpValue != 1 && dumpValue != 0) { + throw new IllegalArgumentException( + "Received an illegal argument at index 4. " + + "It can be only 0 or 1, received value : " + + elementsList.get(4)); + } + this.dump = (dumpValue == 0 ? false : true); + try { + this.fsckOrderPosition = Integer.parseInt(elementsList.get(5)); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "Received an illegal argument at index 5. " + + "It as to be an integer (the fsck order position value), received value : " + + elementsList.get(4) + + " . NumberFormatException : " + + e.getMessage()); + } + } + + /** @return the device */ + public final String getDevice() { + + return device; + } + + /** @return the mountPoint */ + public final String getMountPoint() { + + return mountPoint; + } + + /** @return the fileSystem */ + public final String getFileSystem() { + + return fileSystem; + } + + /** @return the mountOptions */ + public final List getMountOptions() { + + return mountOptions; + } + + /** @return the dump */ + public final boolean isDump() { + + return dump; + } + + /** @return the fsckOrderPosition */ + public final int getFsckOrderPosition() { + + return fsckOrderPosition; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + return "MtabRow [device=" + + device + + ", mountPoint=" + + mountPoint + + ", fileSystem=" + + fileSystem + + ", mountOptions=" + + mountOptions + + ", dump=" + + dump + + ", fsckOrderPosition=" + + fsckOrderPosition + + "]"; + } } diff --git a/src/main/java/it/grid/storm/filesystem/MtabUtil.java b/src/main/java/it/grid/storm/filesystem/MtabUtil.java index 1c80f4a5..13c1b266 100644 --- a/src/main/java/it/grid/storm/filesystem/MtabUtil.java +++ b/src/main/java/it/grid/storm/filesystem/MtabUtil.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -17,162 +16,148 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class MtabUtil { - private static final Logger log = LoggerFactory.getLogger(MtabUtil.class); + private static final Logger log = LoggerFactory.getLogger(MtabUtil.class); + + private static final String MTAB_FILE_PATH = "/etc/mtab"; + + private static final int MTAB_DEVICE_INDEX = 0; + + private static final int MTAB_MOUNT_POINT_INDEX = 1; - private static final String MTAB_FILE_PATH = "/etc/mtab"; + private static final int MTAB_FS_NAME_INDEX = 2; - private static final int MTAB_DEVICE_INDEX = 0; + private static final int MTAB_MOUNT_OPTIONS_INDEX = 3; - private static final int MTAB_MOUNT_POINT_INDEX = 1; - - private static final int MTAB_FS_NAME_INDEX = 2; - - private static final int MTAB_MOUNT_OPTIONS_INDEX = 3; - - private static final int MTAB_DUMP_INDEX = 4; + private static final int MTAB_DUMP_INDEX = 4; - private static final int MTAB_FSC_ORDER_POSITION_INDEX = 5; - - public static String getFilePath() { - - return MTAB_FILE_PATH; - } - - /** - * @return the mtabDeviceIndex - */ - public static final int getMtabDeviceIndex() { - - return MTAB_DEVICE_INDEX; - } - - public static int getMountPointIndex() { - - return MTAB_MOUNT_POINT_INDEX; - } - - public static int getFsNameIndex() { - - return MTAB_FS_NAME_INDEX; - } - - /** - * @return the mtabMountOptionsIndex - */ - public static final int getMtabMountOptionsIndex() { - - return MTAB_MOUNT_OPTIONS_INDEX; - } - - /** - * @return the mtabDumpIndex - */ - public static final int getMtabDumpIndex() { - - return MTAB_DUMP_INDEX; - } - - /** - * @return the mtabFscOrderPositionIndex - */ - public static final int getMtabFscOrderPositionIndex() { - - return MTAB_FSC_ORDER_POSITION_INDEX; - } - - protected static boolean skipLineForMountPoints(String line) { - - return line.startsWith("#") || line.isEmpty(); - } - - public static Map getFSMountPoints() throws Exception { - - HashMap mountPointToFSMap = new HashMap(); - BufferedReader mtab = null; - try { - try { - mtab = new BufferedReader(new FileReader(getFilePath())); - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - throw new Exception("Unable to get mount points. mtab file not found",e); - } - String line; - try { - while ((line = mtab.readLine()) != null) { - if (skipLineForMountPoints(line)) { - continue; - } - LinkedList elementsList = tokenizeLine(line); - if ((elementsList.size() - 1) < getMountPointIndex() - || (elementsList.size() - 1) < getFsNameIndex()) { - log.warn("FS mount point parsing error. " - + "Not enough elements found: {}. Skipping current line...", - elementsList); - } else { - mountPointToFSMap.put(elementsList.get(getMountPointIndex()), - elementsList.get(getFsNameIndex())); - } - } - } catch (IOException e) { - log.error(e.getMessage(), e); - throw new Exception( - "Unable to get mount points. Erro reading from mtab"); - } - } finally { - if (mtab != null) { - try { - mtab.close(); - } catch (IOException e) { - } - } - } - return mountPointToFSMap; - } - - public static List getRows() throws IOException { - - List rows = new ArrayList(); - BufferedReader mtab = new BufferedReader(new FileReader(getFilePath())); - String line; - while ((line = mtab.readLine()) != null) { - if (skipLineForMountPoints(line)) { - continue; - } - log.debug("mtab row from string {}", line); - MtabRow row = null; - try { - row = produceRow(line); - } catch (IllegalArgumentException e) { - log.warn("Skipping line {}. {}", line, e.getMessage(), e); - } - if (row != null) { - rows.add(row); - } - } - log.debug("Parsed {} mtab rows from file {}", - rows.size(), MTAB_FILE_PATH); - return rows; - } - - private static MtabRow produceRow(String line) - throws IllegalArgumentException { - - LinkedList elementsList = tokenizeLine(line); - return new MtabRow(elementsList); - } - - public static LinkedList tokenizeLine(String line) { - - String[] elementsArray = line.split(" "); - LinkedList elementsList = new LinkedList( - Arrays.asList(elementsArray)); - while (elementsList.remove("")) { - } - return elementsList; - } -} \ No newline at end of file + private static final int MTAB_FSC_ORDER_POSITION_INDEX = 5; + + public static String getFilePath() { + + return MTAB_FILE_PATH; + } + + /** @return the mtabDeviceIndex */ + public static final int getMtabDeviceIndex() { + + return MTAB_DEVICE_INDEX; + } + + public static int getMountPointIndex() { + + return MTAB_MOUNT_POINT_INDEX; + } + + public static int getFsNameIndex() { + + return MTAB_FS_NAME_INDEX; + } + + /** @return the mtabMountOptionsIndex */ + public static final int getMtabMountOptionsIndex() { + + return MTAB_MOUNT_OPTIONS_INDEX; + } + + /** @return the mtabDumpIndex */ + public static final int getMtabDumpIndex() { + + return MTAB_DUMP_INDEX; + } + + /** @return the mtabFscOrderPositionIndex */ + public static final int getMtabFscOrderPositionIndex() { + + return MTAB_FSC_ORDER_POSITION_INDEX; + } + + protected static boolean skipLineForMountPoints(String line) { + + return line.startsWith("#") || line.isEmpty(); + } + + public static Map getFSMountPoints() throws Exception { + + HashMap mountPointToFSMap = new HashMap(); + BufferedReader mtab = null; + try { + try { + mtab = new BufferedReader(new FileReader(getFilePath())); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + throw new Exception("Unable to get mount points. mtab file not found", e); + } + String line; + try { + while ((line = mtab.readLine()) != null) { + if (skipLineForMountPoints(line)) { + continue; + } + LinkedList elementsList = tokenizeLine(line); + if ((elementsList.size() - 1) < getMountPointIndex() + || (elementsList.size() - 1) < getFsNameIndex()) { + log.warn( + "FS mount point parsing error. " + + "Not enough elements found: {}. Skipping current line...", + elementsList); + } else { + mountPointToFSMap.put( + elementsList.get(getMountPointIndex()), elementsList.get(getFsNameIndex())); + } + } + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new Exception("Unable to get mount points. Erro reading from mtab"); + } + } finally { + if (mtab != null) { + try { + mtab.close(); + } catch (IOException e) { + } + } + } + return mountPointToFSMap; + } + + public static List getRows() throws IOException { + + List rows = new ArrayList(); + BufferedReader mtab = new BufferedReader(new FileReader(getFilePath())); + String line; + while ((line = mtab.readLine()) != null) { + if (skipLineForMountPoints(line)) { + continue; + } + log.debug("mtab row from string {}", line); + MtabRow row = null; + try { + row = produceRow(line); + } catch (IllegalArgumentException e) { + log.warn("Skipping line {}. {}", line, e.getMessage(), e); + } + if (row != null) { + rows.add(row); + } + } + log.debug("Parsed {} mtab rows from file {}", rows.size(), MTAB_FILE_PATH); + return rows; + } + + private static MtabRow produceRow(String line) throws IllegalArgumentException { + + LinkedList elementsList = tokenizeLine(line); + return new MtabRow(elementsList); + } + + public static LinkedList tokenizeLine(String line) { + + String[] elementsArray = line.split(" "); + LinkedList elementsList = new LinkedList(Arrays.asList(elementsArray)); + while (elementsList.remove("")) {} + return elementsList; + } +} diff --git a/src/main/java/it/grid/storm/filesystem/NullGPFSFilesystemException.java b/src/main/java/it/grid/storm/filesystem/NullGPFSFilesystemException.java index 5e97005e..adfdfc0b 100644 --- a/src/main/java/it/grid/storm/filesystem/NullGPFSFilesystemException.java +++ b/src/main/java/it/grid/storm/filesystem/NullGPFSFilesystemException.java @@ -1,21 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; /** - * Class that represents an Exception thrown by the GPFSSpaceSystem if it is - * instantited with a null GPFS filesystem - * + * Class that represents an Exception thrown by the GPFSSpaceSystem if it is instantited with a null + * GPFS filesystem + * * @author EGRID - ICTP Trieste * @version 1.0 * @date May 2006 */ public class NullGPFSFilesystemException extends Exception { - public String toString() { + public String toString() { - return "Supplied GPFS filesystem was null!"; - } + return "Supplied GPFS filesystem was null!"; + } } diff --git a/src/main/java/it/grid/storm/filesystem/RandomWaitFilesystemAdapter.java b/src/main/java/it/grid/storm/filesystem/RandomWaitFilesystemAdapter.java index 6f1320e9..b2f1c75f 100644 --- a/src/main/java/it/grid/storm/filesystem/RandomWaitFilesystemAdapter.java +++ b/src/main/java/it/grid/storm/filesystem/RandomWaitFilesystemAdapter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; @@ -8,13 +7,11 @@ import static java.lang.System.getProperty; import static java.util.Objects.isNull; +import it.grid.storm.griduser.LocalUser; import java.util.Random; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.griduser.LocalUser; - public class RandomWaitFilesystemAdapter implements FilesystemIF { public static final Logger LOG = LoggerFactory.getLogger(RandomWaitFilesystemAdapter.class); @@ -40,18 +37,18 @@ private long randomTimeinMsec() { private RandomWaitFilesystemAdapter(FilesystemIF delegate) { - LOG.warn("RandomWaitFilesystemAdapter is ENABLED ({} property is defined)", - RANDOM_SLEEP_ENABLED); + LOG.warn( + "RandomWaitFilesystemAdapter is ENABLED ({} property is defined)", RANDOM_SLEEP_ENABLED); - LOG.warn("This adapter introduces synthentic sleep time on each fs call!! Not advisable " - + "in production"); + LOG.warn( + "This adapter introduces synthentic sleep time on each fs call!! Not advisable " + + "in production"); this.delegate = delegate; maxWaitTime = parseInt(getProperty(MAX_WAIT_TIME_MSEC, "1000")); minWaitTime = parseInt(getProperty(MIN_WAIT_TIME_MSEC, "10")); } - public static FilesystemIF maybeWrapFilesystem(FilesystemIF delegate) { if (!isNull(getProperty(RANDOM_SLEEP_ENABLED))) { return new RandomWaitFilesystemAdapter(delegate); @@ -71,7 +68,6 @@ protected void sleepSomeTime() { } } - @Override public long getSize(String file) { sleepSomeTime(); @@ -143,14 +139,14 @@ public FilesystemPermission getUserPermission(LocalUser u, String fileOrDirector return delegate.getUserPermission(u, fileOrDirectory); } - public FilesystemPermission grantGroupPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission grantGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.grantGroupPermission(u, fileOrDirectory, permission); } - public FilesystemPermission grantUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission grantUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.grantUserPermission(u, fileOrDirectory, permission); } @@ -165,28 +161,27 @@ public FilesystemPermission removeUserPermission(LocalUser u, String fileOrDirec return delegate.removeUserPermission(u, fileOrDirectory); } - public FilesystemPermission revokeGroupPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission revokeGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.revokeGroupPermission(u, fileOrDirectory, permission); } - public FilesystemPermission revokeUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission revokeUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.revokeUserPermission(u, fileOrDirectory, permission); } - public FilesystemPermission setGroupPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission setGroupPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.setGroupPermission(u, fileOrDirectory, permission); } - public FilesystemPermission setUserPermission(LocalUser u, String fileOrDirectory, - FilesystemPermission permission) { + public FilesystemPermission setUserPermission( + LocalUser u, String fileOrDirectory, FilesystemPermission permission) { sleepSomeTime(); return delegate.setUserPermission(u, fileOrDirectory, permission); } - } diff --git a/src/main/java/it/grid/storm/filesystem/ReservationException.java b/src/main/java/it/grid/storm/filesystem/ReservationException.java index 702470ad..c8f44a70 100644 --- a/src/main/java/it/grid/storm/filesystem/ReservationException.java +++ b/src/main/java/it/grid/storm/filesystem/ReservationException.java @@ -1,38 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; /** * Super class that represents a generic reservation exception. - * + * * @author EGRID - ICTP Trieste * @version 1.0 * @date May 2006 */ public class ReservationException extends Exception { - private String error = ""; + private String error = ""; - /** - * Public constructor requiring a String explaining the nature of the error. - * If the String is null, then an empty one is used instead. - */ - public ReservationException(String error) { + /** + * Public constructor requiring a String explaining the nature of the error. If the String is + * null, then an empty one is used instead. + */ + public ReservationException(String error) { + + if (error != null) this.error = error; + } - if (error != null) - this.error = error; - } - public ReservationException(String error, Throwable cause) { super(error, cause); } - - public String toString() { - return error; - } + return error; + } } diff --git a/src/main/java/it/grid/storm/filesystem/Space.java b/src/main/java/it/grid/storm/filesystem/Space.java index 93a4decb..55a5e7ba 100644 --- a/src/main/java/it/grid/storm/filesystem/Space.java +++ b/src/main/java/it/grid/storm/filesystem/Space.java @@ -1,300 +1,270 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file Space.java * @author Riccardo Murri - * - * Definition of the Space interface + *

Definition of the Space interface */ /* * Copyright (c) 2006 Riccardo Murri for the EGRID/INFN * joint project StoRM. - * + * * You may copy, modify and distribute this file under the same terms as StoRM * itself. */ package it.grid.storm.filesystem; -import java.io.IOException; -import it.grid.storm.srm.types.TSpaceToken; -import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.common.types.SizeUnit; +import it.grid.storm.srm.types.TSizeInBytes; +import it.grid.storm.srm.types.TSpaceToken; +import java.io.IOException; /** - * Provides an interface for SRM-style advance space reservation, and adapts it - * to filesystem-level actual space reservation methods. - * This is a rough draft, and should be furhter discussed. - * - *

- * This interface is a draft! Due to the unsettled state of the - * SRM spec regarding to reserved space semantics, and the differences between - * SRM space reservation and filesystem-level preallocation, this interface will - * probably change in the near future. - * - * At present this interface represents functionality that is only a subset of - * the full SRM 2.1.1 specification. This is so because StoRM was originally - * designed to leverage GPFS native space preallocation, which satisfies the SRM - * specifications only in restricted use cases. - * + * Provides an interface for SRM-style advance space reservation, and adapts it to filesystem-level + * actual space reservation methods. This is a rough draft, and should be furhter + * discussed. + * + *

This interface is a draft! Due to the unsettled state of the SRM spec + * regarding to reserved space semantics, and the differences between SRM space reservation and + * filesystem-level preallocation, this interface will probably change in the near future. + * + *

At present this interface represents functionality that is only a subset of the full SRM 2.1.1 + * specification. This is so because StoRM was originally designed to leverage GPFS native space + * preallocation, which satisfies the SRM specifications only in restricted use cases. + * * @author Riccardo Murri * @author EGRID - ICTP Trieste (further development and modifications) * @version $Revision: 1.9 $ */ public class Space { - private SpaceSystem ss = null; // spacesystem - private TSizeInBytes guaranteedSize = TSizeInBytes.makeEmpty(); // guaranteed - // size - private TSizeInBytes totalSize = TSizeInBytes.makeEmpty(); // total reserved - // size - private TSpaceToken spaceToken = TSpaceToken.makeEmpty(); // TSpaceToken - // associated with - // Space request - private LocalFile spaceFile = null; // space file initialized by constructor! - - /** - * Constructor that requires the guaranteedSize, the totalSize, the spaceFile - * and the SpaceSystem; if any is null, or totalSize is Empty, or - * guaranteedSize is greater than totalSize, then an - * InvalidSpaceAttributesException is thrown. - */ - public Space(TSizeInBytes guaranteedSize, TSizeInBytes totalSize, - LocalFile spaceFile, SpaceSystem ss) throws InvalidSpaceAttributesException { - - boolean ok1 = (guaranteedSize != null) && (totalSize != null) - && (spaceFile != null) && (ss != null) && (!totalSize.isEmpty()); - boolean ok2 = guaranteedSize.isEmpty(); - boolean ok3 = (!guaranteedSize.isEmpty()) - && (guaranteedSize.getSizeIn(SizeUnit.BYTES) <= totalSize - .getSizeIn(SizeUnit.BYTES)); - if (ok1 && (ok2 || ok3)) { - this.guaranteedSize = guaranteedSize; - this.totalSize = totalSize; - this.spaceFile = spaceFile; - this.ss = ss; - } else - throw new InvalidSpaceAttributesException(guaranteedSize, totalSize, - spaceFile, ss); - } - - /** - * Method used to set the TSpaceToken of This Space. If it is null, nothing - * gets set! - */ - public void setSpaceToken(TSpaceToken spaceToken) { - - if (spaceToken != null) - this.spaceToken = spaceToken; - } - - /** - * Method that physically carries out the actual space reservation. In case of - * any problem, a ReservationException is thrown - */ - public void allot() throws ReservationException { - - ss.reserveSpace(spaceFile.getPath(), guaranteedSize.value()); - } - - /** - * Method that just creates the space file but not allocates memory. In case - * of any problem, a ReservationException is thrown - */ - public void fakeAllot() throws ReservationException { - - java.io.File localFile = new java.io.File(spaceFile.getPath()); - try { - localFile.createNewFile(); - } catch (IOException e) { - throw new ReservationException( - "IO exception while creating local File named : " + spaceFile.getPath() - + " : " + e.getMessage()); - } catch (SecurityException e) { - throw new ReservationException( - "Security exception while creating local File named : " - + spaceFile.getPath() + " : " + e.getMessage()); - } - } - - /** - * Method that just removes the space file but not deallocates memory. In case - * of any problem, a ReservationException is thrown - */ - public void fakeRelease() throws ReservationException { - - java.io.File localFile = new java.io.File(spaceFile.getPath()); - try { - localFile.delete(); - } catch (SecurityException e) { - throw new ReservationException( - "Security exception while deleteing local File named : " - + spaceFile.getPath() + " : " + e.getMessage()); - } - } - - /** - * Method that gives back unused blocks to the filesystems' general available - * space. It returns a long representing the size (in bytes) of space that was - * freed. - * - * If anything goes wrong, a ReservationException is thrown. - */ - public long compact() throws ReservationException { - - return ss.compactSpace(spaceFile.getPath()); - } - - /** - * Method that returns the TSpaceToken associated to This reserved space: if - * none is associated, then an Empty one is returned. - */ - public TSpaceToken getSpaceToken() { - - return spaceToken; - } - - /** - * Method that returns the corresponding SpaceFile. - * - * BEWARE! All space reservation implementation are assumed to create a - * physical file that takes up room in the underlying filesystem! It is this - * mock file that gets returned! - */ - public LocalFile getSpaceFile() { - - return spaceFile; - } - - /** - * Method that returns a TSizeInBytes representing the size in bytes of - * guaranteed reserved space. The guaranteed reserved space can only be used - * by the Grid entity (user, VO, ...) that reserved it, and cannot be used up - * by entities outside StoRM. - */ - public TSizeInBytes getGuaranteedSpaceSize() { - - return guaranteedSize; - } - - /** - * Method that returns a TSizeInBytes representing the total size in bytes of - * reserved space: best-effort + guaranteed. The "best-effort" reserved space - * may be used by entities outside StoRM so it may actually be no longer - * available at the time its use is requested. - */ - public TSizeInBytes getTotalReservedSize() { - - return totalSize; - } - - /** - * Method that returns a long representing the size of space that has been - * reserved, but that so far has not been used. At the moment this method is - * not implemented and always returns 0. - */ - public long getUnusedSpace() { - - return 0; - } - - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Space: guaranteedSize="); - sb.append(guaranteedSize.toString()); - sb.append("; totalSize="); - sb.append(totalSize.toString()); - sb.append("; spaceFile="); - sb.append(spaceFile.toString()); - sb.append("; SpaceSystem="); - sb.append(ss.toString()); - sb.append("; TSpaceToken="); - sb.append(spaceToken.toString()); - return sb.toString(); - } - - /** - * Return the remaining unused space, that is, the size of the space that is - * still available to allot for individual files with the - * {@link #allotForFile} and {@link #addFile} methods. - * - * @return remaining size (in bytes) to be used in this space. - */ - // public long getAvailableSize(); - - /** - * Use a part of the reserved space for a file. Restrictions may be put by the - * underlying implementation on the filename, e.g., the file must be created - * under a certain directory. - * - * @param file - * a {@link File} to assign part of the reserved space to. - * @param size - * size (in bytes) of the part of reserved space to use for - * file. - * - * @return size (in bytes) of the reserved space actually alloted for the - * file. - */ - // public long allotForFile(File file, long size); - - /** - * Tell system that file will grow at this reserved space expenses. - * Restrictions may be put by the underlying implementation on the filename, - * e.g., the file must be created under a certain directory, or, worse, this - * call mey not be supported on all implementations. - * - * @param file - * a {@link File} to assign part of the reserved space to. - * - * @return false if this feature is not supported by the - * underlying implementation. - */ - // public boolean addFile(File file); - - /** - * Reserve size bytes on filesystem; return actual size (in bytes) of - * reserved space. If any space is actually reserved by this function, then it - * is considered a guaranteed reservation, that is, only user - * u can operate on it and eventually dispose the space. - * - * @todo FIXME: This should ideally be a constructor, but we cannot - * specify constructors in interfaces... - * - * @param u - * Grid user to reserve the space to. - * @param guaranteedSize - * size (in bytes) of space to reserve. - * @param bestEffortSize - * size (in bytes) of space to reserve. - * - * @return size (in bytes) of guaranteed space actually reserved. - */ - // public long reserveGuaranteedSpace(VomsGridUser u, - // long guaranteedSize, - // long bestEffortSize); - - /** - * Release any reserved space, possibly deleting files and directories still - * existing within the reserved space area. The parameter - * deleteLeftoverFiles controls whether files or directories existing - * within the space should be deleted, or an exception should be thrown. - * - * @param deleteLeftoverFiles - * if true, then delete any files and directories that - * still exist within the reserved space; if false, then - * throw a SpaceNotEmpty exception if any such files - * exist. - * - * @return size (in bytes) of space that was freed and returned to filesystem - * for general usage. - * - * @throws SpaceNotEmpty - * if files or directories exist within the space, and the parameter - * deleteLeftoverFiles was set to false. - */ - // public long release(boolean deleteLeftoverFiles); + private SpaceSystem ss = null; // spacesystem + private TSizeInBytes guaranteedSize = TSizeInBytes.makeEmpty(); // guaranteed + // size + private TSizeInBytes totalSize = TSizeInBytes.makeEmpty(); // total reserved + // size + private TSpaceToken spaceToken = TSpaceToken.makeEmpty(); // TSpaceToken + // associated with + // Space request + private LocalFile spaceFile = null; // space file initialized by constructor! + + /** + * Constructor that requires the guaranteedSize, the totalSize, the spaceFile and the SpaceSystem; + * if any is null, or totalSize is Empty, or guaranteedSize is greater than totalSize, then an + * InvalidSpaceAttributesException is thrown. + */ + public Space( + TSizeInBytes guaranteedSize, TSizeInBytes totalSize, LocalFile spaceFile, SpaceSystem ss) + throws InvalidSpaceAttributesException { + + boolean ok1 = + (guaranteedSize != null) + && (totalSize != null) + && (spaceFile != null) + && (ss != null) + && (!totalSize.isEmpty()); + boolean ok2 = guaranteedSize.isEmpty(); + boolean ok3 = + (!guaranteedSize.isEmpty()) + && (guaranteedSize.getSizeIn(SizeUnit.BYTES) <= totalSize.getSizeIn(SizeUnit.BYTES)); + if (ok1 && (ok2 || ok3)) { + this.guaranteedSize = guaranteedSize; + this.totalSize = totalSize; + this.spaceFile = spaceFile; + this.ss = ss; + } else throw new InvalidSpaceAttributesException(guaranteedSize, totalSize, spaceFile, ss); + } + + /** Method used to set the TSpaceToken of This Space. If it is null, nothing gets set! */ + public void setSpaceToken(TSpaceToken spaceToken) { + + if (spaceToken != null) this.spaceToken = spaceToken; + } + + /** + * Method that physically carries out the actual space reservation. In case of any problem, a + * ReservationException is thrown + */ + public void allot() throws ReservationException { + + ss.reserveSpace(spaceFile.getPath(), guaranteedSize.value()); + } + + /** + * Method that just creates the space file but not allocates memory. In case of any problem, a + * ReservationException is thrown + */ + public void fakeAllot() throws ReservationException { + + java.io.File localFile = new java.io.File(spaceFile.getPath()); + try { + localFile.createNewFile(); + } catch (IOException e) { + throw new ReservationException( + "IO exception while creating local File named : " + + spaceFile.getPath() + + " : " + + e.getMessage()); + } catch (SecurityException e) { + throw new ReservationException( + "Security exception while creating local File named : " + + spaceFile.getPath() + + " : " + + e.getMessage()); + } + } + + /** + * Method that just removes the space file but not deallocates memory. In case of any problem, a + * ReservationException is thrown + */ + public void fakeRelease() throws ReservationException { + + java.io.File localFile = new java.io.File(spaceFile.getPath()); + try { + localFile.delete(); + } catch (SecurityException e) { + throw new ReservationException( + "Security exception while deleteing local File named : " + + spaceFile.getPath() + + " : " + + e.getMessage()); + } + } + + /** + * Method that gives back unused blocks to the filesystems' general available space. It returns a + * long representing the size (in bytes) of space that was freed. + * + *

If anything goes wrong, a ReservationException is thrown. + */ + public long compact() throws ReservationException { + + return ss.compactSpace(spaceFile.getPath()); + } + + /** + * Method that returns the TSpaceToken associated to This reserved space: if none is associated, + * then an Empty one is returned. + */ + public TSpaceToken getSpaceToken() { + + return spaceToken; + } + + /** + * Method that returns the corresponding SpaceFile. + * + *

BEWARE! All space reservation implementation are assumed to create a physical file that + * takes up room in the underlying filesystem! It is this mock file that gets returned! + */ + public LocalFile getSpaceFile() { + + return spaceFile; + } + + /** + * Method that returns a TSizeInBytes representing the size in bytes of guaranteed reserved space. + * The guaranteed reserved space can only be used by the Grid entity (user, VO, ...) that reserved + * it, and cannot be used up by entities outside StoRM. + */ + public TSizeInBytes getGuaranteedSpaceSize() { + + return guaranteedSize; + } + + /** + * Method that returns a TSizeInBytes representing the total size in bytes of reserved space: + * best-effort + guaranteed. The "best-effort" reserved space may be used by entities outside + * StoRM so it may actually be no longer available at the time its use is requested. + */ + public TSizeInBytes getTotalReservedSize() { + + return totalSize; + } + + /** + * Method that returns a long representing the size of space that has been reserved, but that so + * far has not been used. At the moment this method is not implemented and always returns 0. + */ + public long getUnusedSpace() { + + return 0; + } + + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Space: guaranteedSize="); + sb.append(guaranteedSize.toString()); + sb.append("; totalSize="); + sb.append(totalSize.toString()); + sb.append("; spaceFile="); + sb.append(spaceFile.toString()); + sb.append("; SpaceSystem="); + sb.append(ss.toString()); + sb.append("; TSpaceToken="); + sb.append(spaceToken.toString()); + return sb.toString(); + } + + /** + * Return the remaining unused space, that is, the size of the space that is still available to + * allot for individual files with the {@link #allotForFile} and {@link #addFile} methods. + * + * @return remaining size (in bytes) to be used in this space. + */ + // public long getAvailableSize(); + + /** + * Use a part of the reserved space for a file. Restrictions may be put by the underlying + * implementation on the filename, e.g., the file must be created under a certain directory. + * + * @param file a {@link File} to assign part of the reserved space to. + * @param size size (in bytes) of the part of reserved space to use for file. + * @return size (in bytes) of the reserved space actually alloted for the file. + */ + // public long allotForFile(File file, long size); + + /** + * Tell system that file will grow at this reserved space expenses. Restrictions may be put + * by the underlying implementation on the filename, e.g., the file must be created under a + * certain directory, or, worse, this call mey not be supported on all implementations. + * + * @param file a {@link File} to assign part of the reserved space to. + * @return false if this feature is not supported by the underlying implementation. + */ + // public boolean addFile(File file); + + /** + * Reserve size bytes on filesystem; return actual size (in bytes) of reserved space. If + * any space is actually reserved by this function, then it is considered a guaranteed + * reservation, that is, only user u can operate on it and eventually dispose the space. + * + * @todo FIXME: This should ideally be a constructor, but we cannot specify constructors + * in interfaces... + * @param u Grid user to reserve the space to. + * @param guaranteedSize size (in bytes) of space to reserve. + * @param bestEffortSize size (in bytes) of space to reserve. + * @return size (in bytes) of guaranteed space actually reserved. + */ + // public long reserveGuaranteedSpace(VomsGridUser u, + // long guaranteedSize, + // long bestEffortSize); + + /** + * Release any reserved space, possibly deleting files and directories still existing within the + * reserved space area. The parameter deleteLeftoverFiles controls whether files or + * directories existing within the space should be deleted, or an exception should be thrown. + * + * @param deleteLeftoverFiles if true, then delete any files and directories that + * still exist within the reserved space; if false, then throw a + * SpaceNotEmpty exception if any such files exist. + * @return size (in bytes) of space that was freed and returned to filesystem for general usage. + * @throws SpaceNotEmpty if files or directories exist within the space, and the parameter + * deleteLeftoverFiles was set to false. + */ + // public long release(boolean deleteLeftoverFiles); } diff --git a/src/main/java/it/grid/storm/filesystem/SpaceSystem.java b/src/main/java/it/grid/storm/filesystem/SpaceSystem.java index 6f100796..ed98472c 100644 --- a/src/main/java/it/grid/storm/filesystem/SpaceSystem.java +++ b/src/main/java/it/grid/storm/filesystem/SpaceSystem.java @@ -1,62 +1,51 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; /** - * Interface that represents the Space functionality that some filesystems may - * have natively present, or that must be added as an external feature to those - * that do not. - * + * Interface that represents the Space functionality that some filesystems may have natively + * present, or that must be added as an external feature to those that do not. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date May 2006 */ public interface SpaceSystem { - /** - * Method that pre-allocates size bytes on pathToFile; it returns a long - * representing the actual size in bytes of reserved space. Notice that - * pathToFile is the complete path to the desired file, including the name of - * the file itself. - * - * The method throws a ReservationException if the operation cannot be - * completed. - */ - public long reserveSpace(String pathToFile, long size) - throws ReservationException; + /** + * Method that pre-allocates size bytes on pathToFile; it returns a long representing the actual + * size in bytes of reserved space. Notice that pathToFile is the complete path to the desired + * file, including the name of the file itself. + * + *

The method throws a ReservationException if the operation cannot be completed. + */ + public long reserveSpace(String pathToFile, long size) throws ReservationException; - /** - * Method that gives back to the filesystem any space previously pre-allocated - * to pathToFile, but presently unused. Notice that pathToFile is the complete - * path to the desired file, including the name of the file itself. The method - * returns a long representing the file size after compacting. - * - * The method throws a ReservationException if the operation cannot be - * completed. - */ - public long compactSpace(String pathToFile) throws ReservationException; + /** + * Method that gives back to the filesystem any space previously pre-allocated to pathToFile, but + * presently unused. Notice that pathToFile is the complete path to the desired file, including + * the name of the file itself. The method returns a long representing the file size after + * compacting. + * + *

The method throws a ReservationException if the operation cannot be completed. + */ + public long compactSpace(String pathToFile) throws ReservationException; - /** - * Method that de-allocates space previously assigned to pathToFile. Notice - * that pathToFile is the complete path to the file, including the name of the - * file itself. - * - * The method throws a ReservationException if the operation cannot be - * completed. - */ - public void removeSpace(String pathToFile) throws ReservationException; + /** + * Method that de-allocates space previously assigned to pathToFile. Notice that pathToFile is the + * complete path to the file, including the name of the file itself. + * + *

The method throws a ReservationException if the operation cannot be completed. + */ + public void removeSpace(String pathToFile) throws ReservationException; - /** - * Method used to modify the size already reserved for a file: it requires the - * String pathToFile and the long newSize. Notice that pathToFile is the - * complete path to the file, including the file itself. The method returns - * the actual changed size after the operation completes. - * - * The method throws a ReservationException if the operation cannot be - * completed. - */ - public long changeSize(String pathToFile, long newSize) - throws ReservationException; + /** + * Method used to modify the size already reserved for a file: it requires the String pathToFile + * and the long newSize. Notice that pathToFile is the complete path to the file, including the + * file itself. The method returns the actual changed size after the operation completes. + * + *

The method throws a ReservationException if the operation cannot be completed. + */ + public long changeSize(String pathToFile, long newSize) throws ReservationException; } diff --git a/src/main/java/it/grid/storm/filesystem/SpaceSystemException.java b/src/main/java/it/grid/storm/filesystem/SpaceSystemException.java index 3204a2bf..ae701ef4 100644 --- a/src/main/java/it/grid/storm/filesystem/SpaceSystemException.java +++ b/src/main/java/it/grid/storm/filesystem/SpaceSystemException.java @@ -1,33 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem; /** - * Class that represents an Exception thrown whenever a SpaceSystem cannot be - * instantiated. - * + * Class that represents an Exception thrown whenever a SpaceSystem cannot be instantiated. + * * @author EGRID - ICTP Trieste * @version 1.0 * @date June 2006 */ public class SpaceSystemException extends Exception { - private String explanation = ""; + private String explanation = ""; - /** - * Constructor that requires a non-null String describing the problem - * encountered. If a null is supplied, then an empty String is used instead. - */ - public SpaceSystemException(String explanation) { + /** + * Constructor that requires a non-null String describing the problem encountered. If a null is + * supplied, then an empty String is used instead. + */ + public SpaceSystemException(String explanation) { - if (explanation != null) - this.explanation = explanation; - } + if (explanation != null) this.explanation = explanation; + } - public String toString() { + public String toString() { - return explanation; - } + return explanation; + } } diff --git a/src/main/java/it/grid/storm/filesystem/WrongFilesystemType.java b/src/main/java/it/grid/storm/filesystem/WrongFilesystemType.java index cb0eb1d3..c2d5e351 100644 --- a/src/main/java/it/grid/storm/filesystem/WrongFilesystemType.java +++ b/src/main/java/it/grid/storm/filesystem/WrongFilesystemType.java @@ -1,42 +1,37 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @file WrongFilesystemType.java * @author Riccardo Murri - * - * Source code for class WrongFilesystemType - * + *

Source code for class WrongFilesystemType */ /* * Copyright (c) 2006, Riccardo Murri for the * EGRID/INFN joint project StoRM. - * + * * You may copy, distribute and modify this file under the terms of the * LICENSE.txt file at the root of the StoRM backend source tree. - * + * * $Id: WrongFilesystemType.java,v 1.1 2006/03/31 13:35:01 rmurri Exp $ */ package it.grid.storm.filesystem; /** - * Thrown by genericfs subclasses ctors when the filesystem the passed pathname - * resides on, is not of a type supported by the class. - * - * Corresponds in usage to fs::wrong_filesystem_type exception thrown by C++ - * filesystem code. - * + * Thrown by genericfs subclasses ctors when the filesystem the passed pathname resides on, is not + * of a type supported by the class. + * + *

Corresponds in usage to fs::wrong_filesystem_type exception thrown by C++ filesystem code. + * * @see fs::wrong_filesystem_type - * * @author Riccardo Murri * @version $Revision: 1.1 $ */ public class WrongFilesystemType extends FilesystemError { - public WrongFilesystemType(final String msg) { + public WrongFilesystemType(final String msg) { - super(msg); - } + super(msg); + } } diff --git a/src/main/java/it/grid/storm/filesystem/swig/test.java b/src/main/java/it/grid/storm/filesystem/swig/test.java index 7fc1ae78..a415189e 100644 --- a/src/main/java/it/grid/storm/filesystem/swig/test.java +++ b/src/main/java/it/grid/storm/filesystem/swig/test.java @@ -1,27 +1,25 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.filesystem.swig; import it.grid.storm.ea.StormEA; import it.grid.storm.filesystem.AclNotSupported; import it.grid.storm.filesystem.FilesystemError; -import it.grid.storm.filesystem.swig.posixfs; public class test extends posixfs { public test(String mntpath) throws AclNotSupported, FilesystemError { - super(mntpath); + super(mntpath); } protected test(long cPtr, boolean cMemoryOwn) { - super(cPtr, cMemoryOwn); + super(cPtr, cMemoryOwn); } @Override public boolean is_file_on_disk(String filename) throws it.grid.storm.filesystem.FilesystemError { - return StormEA.getOnline(filename); + return StormEA.getOnline(filename); } } diff --git a/src/main/java/it/grid/storm/griduser/AbstractGridUser.java b/src/main/java/it/grid/storm/griduser/AbstractGridUser.java index 4a95a2a0..f157a1e0 100644 --- a/src/main/java/it/grid/storm/griduser/AbstractGridUser.java +++ b/src/main/java/it/grid/storm/griduser/AbstractGridUser.java @@ -1,119 +1,116 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Original design made by Riccardo Zappi , 2007 - * + * * $Id: AbstractGridUser.java 3604 2007-05-22 11:16:27Z rzappi $ */ package it.grid.storm.griduser; import it.grid.storm.common.types.VO; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractGridUser implements GridUserInterface { - protected static final Logger log = LoggerFactory - .getLogger(AbstractGridUser.class); - protected DistinguishedName subjectDN = null; - protected String proxyString = null; - protected MapperInterface userMapperClass = null; - protected LocalUser localUser = null; - - protected AbstractGridUser(MapperInterface mapperClass, - String distinguishedName) { + protected static final Logger log = LoggerFactory.getLogger(AbstractGridUser.class); + protected DistinguishedName subjectDN = null; + protected String proxyString = null; + protected MapperInterface userMapperClass = null; + protected LocalUser localUser = null; - if (mapperClass == null || distinguishedName == null) { - throw new IllegalArgumentException( - "Provided null parameter: mapperClass=\'" + mapperClass - + "\' distinguishedName=\'" + distinguishedName + "\'"); - } - this.userMapperClass = mapperClass; - this.setDistinguishedName(distinguishedName); - } + protected AbstractGridUser(MapperInterface mapperClass, String distinguishedName) { - protected AbstractGridUser(MapperInterface mapperClass, - String distinguishedName, String proxy) { + if (mapperClass == null || distinguishedName == null) { + throw new IllegalArgumentException( + "Provided null parameter: mapperClass=\'" + + mapperClass + + "\' distinguishedName=\'" + + distinguishedName + + "\'"); + } + this.userMapperClass = mapperClass; + this.setDistinguishedName(distinguishedName); + } - this(mapperClass, distinguishedName); - this.setProxyString(proxy); - } + protected AbstractGridUser(MapperInterface mapperClass, String distinguishedName, String proxy) { - void setUserMapper(MapperInterface mapperClass) { + this(mapperClass, distinguishedName); + this.setProxyString(proxy); + } - if (mapperClass == null) { - throw new IllegalArgumentException("Provided null MapperInterface!"); - } - this.userMapperClass = mapperClass; - } + void setUserMapper(MapperInterface mapperClass) { - void setDistinguishedName(String dnString) { + if (mapperClass == null) { + throw new IllegalArgumentException("Provided null MapperInterface!"); + } + this.userMapperClass = mapperClass; + } - if (dnString == null) { - throw new IllegalArgumentException("Provided null DistinguishedName!"); - } - this.subjectDN = new DistinguishedName(dnString); - } + void setDistinguishedName(String dnString) { - public String getDn() { + if (dnString == null) { + throw new IllegalArgumentException("Provided null DistinguishedName!"); + } + this.subjectDN = new DistinguishedName(dnString); + } - String dn = this.subjectDN.getDN(); - return dn; - } + public String getDn() { - public DistinguishedName getDistinguishedName() { + String dn = this.subjectDN.getDN(); + return dn; + } - return subjectDN; - } + public DistinguishedName getDistinguishedName() { - void setProxyString(String proxy) { + return subjectDN; + } - this.proxyString = proxy; - } + void setProxyString(String proxy) { - public String getProxyString() { + this.proxyString = proxy; + } - return this.proxyString; - } + public String getProxyString() { - public String getUserCredentials() { + return this.proxyString; + } - return this.proxyString; - } + public String getUserCredentials() { - public LocalUser getLocalUser() throws CannotMapUserException { + return this.proxyString; + } - if (localUser == null) { - try { - if (this.hasVoms()) { - localUser = userMapperClass.map(getDn(), this.getFQANsAsString()); - } else { - localUser = userMapperClass.map(getDn(), null); - } - } catch (CannotMapUserException e) { - log.error("Mapping error: {}. Subject='{}'",e.getMessage(), - subjectDN.getX500DN_rfc1779(),e); - throw e; - } - } - return localUser; - } + public LocalUser getLocalUser() throws CannotMapUserException { - public abstract String[] getFQANsAsString(); + if (localUser == null) { + try { + if (this.hasVoms()) { + localUser = userMapperClass.map(getDn(), this.getFQANsAsString()); + } else { + localUser = userMapperClass.map(getDn(), null); + } + } catch (CannotMapUserException e) { + log.error( + "Mapping error: {}. Subject='{}'", e.getMessage(), subjectDN.getX500DN_rfc1779(), e); + throw e; + } + } + return localUser; + } - public abstract FQAN[] getFQANs(); + public abstract String[] getFQANsAsString(); - public abstract boolean hasVoms(); + public abstract FQAN[] getFQANs(); - public abstract VO getVO(); + public abstract boolean hasVoms(); + public abstract VO getVO(); } diff --git a/src/main/java/it/grid/storm/griduser/CannotMapUserException.java b/src/main/java/it/grid/storm/griduser/CannotMapUserException.java index 99f8ebdf..55d5d94e 100644 --- a/src/main/java/it/grid/storm/griduser/CannotMapUserException.java +++ b/src/main/java/it/grid/storm/griduser/CannotMapUserException.java @@ -1,32 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -/** - * Signal that something went wrong during the LCMAPS call. - */ +/** Signal that something went wrong during the LCMAPS call. */ public class CannotMapUserException extends GridUserException { - public CannotMapUserException() { - - super(); - } + public CannotMapUserException() { - public CannotMapUserException(String message) { + super(); + } - super(message); - } + public CannotMapUserException(String message) { - public CannotMapUserException(String message, Throwable cause) { + super(message); + } - super(message, cause); - } + public CannotMapUserException(String message, Throwable cause) { - public CannotMapUserException(Throwable cause) { + super(message, cause); + } - super(cause); - } + public CannotMapUserException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/it/grid/storm/griduser/DNMatchingRule.java b/src/main/java/it/grid/storm/griduser/DNMatchingRule.java index 9046b659..97cf4eae 100644 --- a/src/main/java/it/grid/storm/griduser/DNMatchingRule.java +++ b/src/main/java/it/grid/storm/griduser/DNMatchingRule.java @@ -1,377 +1,364 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Original design made by Riccardo Zappi , 2007 - * + * * $Id: DNMatchingRule.java,v 1.4 2007/05/22 19:54:54 rzappi Exp $ */ package it.grid.storm.griduser; import java.util.ArrayList; -import java.util.regex.Pattern; -import java.util.regex.Matcher; import java.util.Iterator; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DNMatchingRule { - private static final Logger log = LoggerFactory - .getLogger(DNMatchingRule.class); - - private enum DNFields { - COUNTRY("C"), ORGANIZATION("O"), ORGANIZATIONALUNIT("OU"), LOCALITY("L"), COMMONNAME( - "CN"), DOMAINCOMPONENT("DC"), UNKNOWN(""); - - private final String code; - - private DNFields(String code) throws IllegalArgumentException { - - if (code == null) { - throw new IllegalArgumentException("Received null code argument"); - } - this.code = code; - } - - public static DNFields fromString(String code) { - - for (DNFields field : DNFields.values()) { - if (field.code.equals(code)) { - return field; - } - } - return UNKNOWN; - } - } - - private static final String ADMIT_ALL = ".*"; - - private String countryPatternString; - private String organizationPatternString; - private String organizationalUnitPatternString; - private String localityPatternString; - private String commonNamePatternString; - private String domainComponentPatternString; - - private Pattern countryPattern; - private Pattern organizationPattern; - private Pattern organizationalUnitPattern; - private Pattern localityPattern; - private Pattern commonNamePattern; - private Pattern domainComponentPattern; - - public static DNMatchingRule buildMatchAllDNMatchingRule() { - - return new DNMatchingRule(ADMIT_ALL, ADMIT_ALL, ADMIT_ALL, ADMIT_ALL, - ADMIT_ALL, ADMIT_ALL); - } - - /** - * Constructor with implicit Pattern String - * - * @param regularExpressionRule - * String - */ - public DNMatchingRule(String regularExpressionRule) { - - countryPatternString = ADMIT_ALL; - organizationPatternString = ADMIT_ALL; - organizationalUnitPatternString = ADMIT_ALL; - localityPatternString = ADMIT_ALL; - commonNamePatternString = ADMIT_ALL; - domainComponentPatternString = ADMIT_ALL; - if (!(regularExpressionRule == null - || regularExpressionRule.trim().equals("*") || regularExpressionRule - .trim().equals(".*"))) { - for (String rule : regularExpressionRule.split("/")) { - if (!rule.contains("=")) { - if (!(rule.trim().isEmpty() || rule.equals(ADMIT_ALL))) { - log.warn("Malformed DN regex element '{}' " - + "it does not contains \'=\' key-value separator", rule); - } - continue; - } - String[] ruleCoupple = rule.split("="); - if (ruleCoupple.length != 2) { - log - .warn("Malformed DN regex element '{}' it does not contains " - + "the key or contains more \'=\' separators", rule); - continue; - } - switch (DNFields.fromString(ruleCoupple[0])) { - case COUNTRY: - countryPatternString = ruleCoupple[1]; - break; - case ORGANIZATION: - organizationPatternString = ruleCoupple[1]; - break; - case ORGANIZATIONALUNIT: - organizationalUnitPatternString = ruleCoupple[1]; - break; - case LOCALITY: - localityPatternString = ruleCoupple[1]; - break; - case COMMONNAME: - commonNamePatternString = ruleCoupple[1]; - break; - case DOMAINCOMPONENT: - domainComponentPatternString = ruleCoupple[1]; - break; - default: - break; - } - } - } - initPatterns(); - } - - private void initPatterns() { - - // C country - if (isMatchAll(countryPatternString)) { - countryPattern = Pattern.compile(ADMIT_ALL); - } else { - countryPattern = Pattern.compile(this.countryPatternString); - } - - // O organization - if (isMatchAll(organizationPatternString)) { - organizationPattern = Pattern.compile(ADMIT_ALL); - } else { - organizationPattern = Pattern.compile(this.organizationPatternString); - } - - // OU organizationalUnit - if (isMatchAll(organizationalUnitPatternString)) { - organizationalUnitPattern = Pattern.compile(ADMIT_ALL); - } else { - organizationalUnitPattern = Pattern - .compile(this.organizationalUnitPatternString); - } - - // L locality - if (isMatchAll(localityPatternString)) { - localityPattern = Pattern.compile(ADMIT_ALL); - } else { - localityPattern = Pattern.compile(this.localityPatternString); - } - - // CN Common Name - if (isMatchAll(commonNamePatternString)) { - commonNamePattern = Pattern.compile(ADMIT_ALL); - } else { - commonNamePattern = Pattern.compile(this.commonNamePatternString); - } - - // DC Domain Component - if (isMatchAll(domainComponentPatternString)) { - domainComponentPattern = Pattern.compile(ADMIT_ALL); - } else { - domainComponentPattern = Pattern - .compile(this.domainComponentPatternString); - } - } - - private static boolean isMatchAll(String pattern) { - - return pattern == null || pattern.trim().equals("*") - || pattern.trim().equals(".*"); - } - - public DNMatchingRule(String countryPatternString, - String organizationPatternString, String organizationalUnitPatternString, - String localityPatternString, String commonNamePatternString, - String domainComponentPatternString) { - - this.countryPatternString = countryPatternString; - this.organizationPatternString = organizationPatternString; - this.organizationalUnitPatternString = organizationalUnitPatternString; - this.localityPatternString = localityPatternString; - this.commonNamePatternString = commonNamePatternString; - this.domainComponentPatternString = domainComponentPatternString; - initPatterns(); - } - - public boolean isMatchAll() { - - return isMatchAll(countryPatternString) - && isMatchAll(organizationPatternString) - && isMatchAll(organizationalUnitPatternString) - && isMatchAll(localityPatternString) - && isMatchAll(commonNamePatternString) - && isMatchAll(domainComponentPatternString); - } - - public boolean match(DistinguishedName principalDN) - throws IllegalArgumentException { - - if (principalDN == null) { - throw new IllegalArgumentException( - "Unable to perform rule matching. Received null argument: principalDN=" - + principalDN); - } - if (this.isMatchAll()) { - return true; - } - boolean result = false; - boolean countryMatch = false; - boolean organizationMatch = false; - boolean localityMatch = false; - boolean organizationalUnitMatch = false; - boolean commonNameMatch = false; - boolean domainComponentMatch = false; - - // C - String countryName = principalDN.getCountryName(); - if (countryName != null) { - CharSequence country = countryName.subSequence(0, countryName.length()); - Matcher countryMatcher = countryPattern.matcher(country); - countryMatch = countryMatcher.find(); - } else { - countryMatch = countryPatternString.equals(ADMIT_ALL); - } - if (!(countryMatch)) - return false; - - // O - String organizationName = principalDN.getOrganizationName(); - if (organizationName != null) { - CharSequence organization = organizationName.subSequence(0, - organizationName.length()); - Matcher organizationMatcher = organizationPattern.matcher(organization); - organizationMatch = organizationMatcher.find(); - } else { - organizationMatch = organizationPatternString.equals(ADMIT_ALL); - } - if (!(organizationMatch)) - return false; - - // L - String localityName = principalDN.getLocalityName(); - if (localityName != null) { - CharSequence locality = localityName - .subSequence(0, localityName.length()); - Matcher localityMatcher = localityPattern.matcher(locality); - localityMatch = localityMatcher.find(); - } else { - localityMatch = localityPatternString.equals(ADMIT_ALL); - } - if (!(localityMatch)) - return false; - - // OU ArrayList - ArrayList organizationalUnitNames = principalDN - .getOrganizationalUnitNames(); - if ((organizationalUnitNames != null) - && (!(organizationalUnitNames.isEmpty()))) { - CharSequence organizationalUnit = null; - String nameStr = null; - Matcher organizationalUnitMatcher = null; - for (Iterator name = organizationalUnitNames.iterator(); name - .hasNext();) { - nameStr = name.next(); - organizationalUnit = nameStr.subSequence(0, nameStr.length()); - organizationalUnitMatcher = organizationalUnitPattern - .matcher(organizationalUnit); - organizationalUnitMatch = organizationalUnitMatcher.find(); - if (organizationalUnitMatch) - break; - } - } else { - organizationalUnitMatch = organizationalUnitPatternString - .equals(ADMIT_ALL); - } - if (!(organizationalUnitMatch)) - return false; - - // CN ArrayList - ArrayList commonNames = principalDN.getCommonNames(); - if ((commonNames != null) && (!(commonNames.isEmpty()))) { - CharSequence commonName = null; - String commonNameStr = null; - Matcher commonNameMatcher = null; - for (Iterator scanCN = commonNames.iterator(); scanCN.hasNext();) { - commonNameStr = scanCN.next(); - commonName = commonNameStr.subSequence(0, commonNameStr.length()); - commonNameMatcher = commonNamePattern.matcher(commonName); - commonNameMatch = commonNameMatcher.find(); - if (commonNameMatch) - break; - } - } else { - commonNameMatch = commonNamePatternString.equals(ADMIT_ALL); - } - if (!(commonNameMatch)) - return false; - - // DC ArrayList - ArrayList domainComponents = principalDN.getDomainComponents(); - if ((domainComponents != null) && (!(domainComponents.isEmpty()))) { - CharSequence domainComponent = null; - String domainComponentStr = null; - Matcher domainComponentMatcher = null; - for (Iterator scanDC = domainComponents.iterator(); scanDC - .hasNext();) { - domainComponentStr = scanDC.next(); - domainComponent = domainComponentStr.subSequence(0, - domainComponentStr.length()); - domainComponentMatcher = domainComponentPattern - .matcher(domainComponent); - domainComponentMatch = domainComponentMatcher.find(); - if (domainComponentMatch) - break; - } - } else { - domainComponentMatch = commonNamePatternString.equals(ADMIT_ALL); - } - if (!(domainComponentMatch)) - return false; - - // Total Result - // NOTE : At this point result should be always TRUE! - result = countryMatch && organizationMatch && organizationalUnitMatch - && localityMatch && commonNameMatch && domainComponentMatch; - return result; - } - - public String toString() { - - StringBuilder result = new StringBuilder(); - result.append(" C=" + countryPatternString); - result.append(" O=" + organizationPatternString); - result.append(" OU=" + organizationalUnitPatternString); - result.append(" L=" + localityPatternString); - result.append(" CN=" + commonNamePatternString); - return result.toString(); - } - - public String toShortSlashSeparatedString() { - - StringBuilder result = new StringBuilder(); - if (!countryPatternString.equals(ADMIT_ALL)) { - result.append("/C=" + countryPatternString); - } - if (!organizationPatternString.equals(ADMIT_ALL)) { - result.append("/O=" + organizationPatternString); - } - if (!organizationalUnitPatternString.equals(ADMIT_ALL)) { - result.append("/OU=" + organizationalUnitPatternString); - } - if (!localityPatternString.equals(ADMIT_ALL)) { - result.append("/L=" + localityPatternString); - } - if (!commonNamePatternString.equals(ADMIT_ALL)) { - result.append("/CN=" + commonNamePatternString); - } - return result.toString(); - } - + private static final Logger log = LoggerFactory.getLogger(DNMatchingRule.class); + + private enum DNFields { + COUNTRY("C"), + ORGANIZATION("O"), + ORGANIZATIONALUNIT("OU"), + LOCALITY("L"), + COMMONNAME("CN"), + DOMAINCOMPONENT("DC"), + UNKNOWN(""); + + private final String code; + + private DNFields(String code) throws IllegalArgumentException { + + if (code == null) { + throw new IllegalArgumentException("Received null code argument"); + } + this.code = code; + } + + public static DNFields fromString(String code) { + + for (DNFields field : DNFields.values()) { + if (field.code.equals(code)) { + return field; + } + } + return UNKNOWN; + } + } + + private static final String ADMIT_ALL = ".*"; + + private String countryPatternString; + private String organizationPatternString; + private String organizationalUnitPatternString; + private String localityPatternString; + private String commonNamePatternString; + private String domainComponentPatternString; + + private Pattern countryPattern; + private Pattern organizationPattern; + private Pattern organizationalUnitPattern; + private Pattern localityPattern; + private Pattern commonNamePattern; + private Pattern domainComponentPattern; + + public static DNMatchingRule buildMatchAllDNMatchingRule() { + + return new DNMatchingRule(ADMIT_ALL, ADMIT_ALL, ADMIT_ALL, ADMIT_ALL, ADMIT_ALL, ADMIT_ALL); + } + + /** + * Constructor with implicit Pattern String + * + * @param regularExpressionRule String + */ + public DNMatchingRule(String regularExpressionRule) { + + countryPatternString = ADMIT_ALL; + organizationPatternString = ADMIT_ALL; + organizationalUnitPatternString = ADMIT_ALL; + localityPatternString = ADMIT_ALL; + commonNamePatternString = ADMIT_ALL; + domainComponentPatternString = ADMIT_ALL; + if (!(regularExpressionRule == null + || regularExpressionRule.trim().equals("*") + || regularExpressionRule.trim().equals(".*"))) { + for (String rule : regularExpressionRule.split("/")) { + if (!rule.contains("=")) { + if (!(rule.trim().isEmpty() || rule.equals(ADMIT_ALL))) { + log.warn( + "Malformed DN regex element '{}' " + + "it does not contains \'=\' key-value separator", + rule); + } + continue; + } + String[] ruleCoupple = rule.split("="); + if (ruleCoupple.length != 2) { + log.warn( + "Malformed DN regex element '{}' it does not contains " + + "the key or contains more \'=\' separators", + rule); + continue; + } + switch (DNFields.fromString(ruleCoupple[0])) { + case COUNTRY: + countryPatternString = ruleCoupple[1]; + break; + case ORGANIZATION: + organizationPatternString = ruleCoupple[1]; + break; + case ORGANIZATIONALUNIT: + organizationalUnitPatternString = ruleCoupple[1]; + break; + case LOCALITY: + localityPatternString = ruleCoupple[1]; + break; + case COMMONNAME: + commonNamePatternString = ruleCoupple[1]; + break; + case DOMAINCOMPONENT: + domainComponentPatternString = ruleCoupple[1]; + break; + default: + break; + } + } + } + initPatterns(); + } + + private void initPatterns() { + + // C country + if (isMatchAll(countryPatternString)) { + countryPattern = Pattern.compile(ADMIT_ALL); + } else { + countryPattern = Pattern.compile(this.countryPatternString); + } + + // O organization + if (isMatchAll(organizationPatternString)) { + organizationPattern = Pattern.compile(ADMIT_ALL); + } else { + organizationPattern = Pattern.compile(this.organizationPatternString); + } + + // OU organizationalUnit + if (isMatchAll(organizationalUnitPatternString)) { + organizationalUnitPattern = Pattern.compile(ADMIT_ALL); + } else { + organizationalUnitPattern = Pattern.compile(this.organizationalUnitPatternString); + } + + // L locality + if (isMatchAll(localityPatternString)) { + localityPattern = Pattern.compile(ADMIT_ALL); + } else { + localityPattern = Pattern.compile(this.localityPatternString); + } + + // CN Common Name + if (isMatchAll(commonNamePatternString)) { + commonNamePattern = Pattern.compile(ADMIT_ALL); + } else { + commonNamePattern = Pattern.compile(this.commonNamePatternString); + } + + // DC Domain Component + if (isMatchAll(domainComponentPatternString)) { + domainComponentPattern = Pattern.compile(ADMIT_ALL); + } else { + domainComponentPattern = Pattern.compile(this.domainComponentPatternString); + } + } + + private static boolean isMatchAll(String pattern) { + + return pattern == null || pattern.trim().equals("*") || pattern.trim().equals(".*"); + } + + public DNMatchingRule( + String countryPatternString, + String organizationPatternString, + String organizationalUnitPatternString, + String localityPatternString, + String commonNamePatternString, + String domainComponentPatternString) { + + this.countryPatternString = countryPatternString; + this.organizationPatternString = organizationPatternString; + this.organizationalUnitPatternString = organizationalUnitPatternString; + this.localityPatternString = localityPatternString; + this.commonNamePatternString = commonNamePatternString; + this.domainComponentPatternString = domainComponentPatternString; + initPatterns(); + } + + public boolean isMatchAll() { + + return isMatchAll(countryPatternString) + && isMatchAll(organizationPatternString) + && isMatchAll(organizationalUnitPatternString) + && isMatchAll(localityPatternString) + && isMatchAll(commonNamePatternString) + && isMatchAll(domainComponentPatternString); + } + + public boolean match(DistinguishedName principalDN) throws IllegalArgumentException { + + if (principalDN == null) { + throw new IllegalArgumentException( + "Unable to perform rule matching. Received null argument: principalDN=" + principalDN); + } + if (this.isMatchAll()) { + return true; + } + boolean result = false; + boolean countryMatch = false; + boolean organizationMatch = false; + boolean localityMatch = false; + boolean organizationalUnitMatch = false; + boolean commonNameMatch = false; + boolean domainComponentMatch = false; + + // C + String countryName = principalDN.getCountryName(); + if (countryName != null) { + CharSequence country = countryName.subSequence(0, countryName.length()); + Matcher countryMatcher = countryPattern.matcher(country); + countryMatch = countryMatcher.find(); + } else { + countryMatch = countryPatternString.equals(ADMIT_ALL); + } + if (!(countryMatch)) return false; + + // O + String organizationName = principalDN.getOrganizationName(); + if (organizationName != null) { + CharSequence organization = organizationName.subSequence(0, organizationName.length()); + Matcher organizationMatcher = organizationPattern.matcher(organization); + organizationMatch = organizationMatcher.find(); + } else { + organizationMatch = organizationPatternString.equals(ADMIT_ALL); + } + if (!(organizationMatch)) return false; + + // L + String localityName = principalDN.getLocalityName(); + if (localityName != null) { + CharSequence locality = localityName.subSequence(0, localityName.length()); + Matcher localityMatcher = localityPattern.matcher(locality); + localityMatch = localityMatcher.find(); + } else { + localityMatch = localityPatternString.equals(ADMIT_ALL); + } + if (!(localityMatch)) return false; + + // OU ArrayList + ArrayList organizationalUnitNames = principalDN.getOrganizationalUnitNames(); + if ((organizationalUnitNames != null) && (!(organizationalUnitNames.isEmpty()))) { + CharSequence organizationalUnit = null; + String nameStr = null; + Matcher organizationalUnitMatcher = null; + for (Iterator name = organizationalUnitNames.iterator(); name.hasNext(); ) { + nameStr = name.next(); + organizationalUnit = nameStr.subSequence(0, nameStr.length()); + organizationalUnitMatcher = organizationalUnitPattern.matcher(organizationalUnit); + organizationalUnitMatch = organizationalUnitMatcher.find(); + if (organizationalUnitMatch) break; + } + } else { + organizationalUnitMatch = organizationalUnitPatternString.equals(ADMIT_ALL); + } + if (!(organizationalUnitMatch)) return false; + + // CN ArrayList + ArrayList commonNames = principalDN.getCommonNames(); + if ((commonNames != null) && (!(commonNames.isEmpty()))) { + CharSequence commonName = null; + String commonNameStr = null; + Matcher commonNameMatcher = null; + for (Iterator scanCN = commonNames.iterator(); scanCN.hasNext(); ) { + commonNameStr = scanCN.next(); + commonName = commonNameStr.subSequence(0, commonNameStr.length()); + commonNameMatcher = commonNamePattern.matcher(commonName); + commonNameMatch = commonNameMatcher.find(); + if (commonNameMatch) break; + } + } else { + commonNameMatch = commonNamePatternString.equals(ADMIT_ALL); + } + if (!(commonNameMatch)) return false; + + // DC ArrayList + ArrayList domainComponents = principalDN.getDomainComponents(); + if ((domainComponents != null) && (!(domainComponents.isEmpty()))) { + CharSequence domainComponent = null; + String domainComponentStr = null; + Matcher domainComponentMatcher = null; + for (Iterator scanDC = domainComponents.iterator(); scanDC.hasNext(); ) { + domainComponentStr = scanDC.next(); + domainComponent = domainComponentStr.subSequence(0, domainComponentStr.length()); + domainComponentMatcher = domainComponentPattern.matcher(domainComponent); + domainComponentMatch = domainComponentMatcher.find(); + if (domainComponentMatch) break; + } + } else { + domainComponentMatch = commonNamePatternString.equals(ADMIT_ALL); + } + if (!(domainComponentMatch)) return false; + + // Total Result + // NOTE : At this point result should be always TRUE! + result = + countryMatch + && organizationMatch + && organizationalUnitMatch + && localityMatch + && commonNameMatch + && domainComponentMatch; + return result; + } + + public String toString() { + + StringBuilder result = new StringBuilder(); + result.append(" C=" + countryPatternString); + result.append(" O=" + organizationPatternString); + result.append(" OU=" + organizationalUnitPatternString); + result.append(" L=" + localityPatternString); + result.append(" CN=" + commonNamePatternString); + return result.toString(); + } + + public String toShortSlashSeparatedString() { + + StringBuilder result = new StringBuilder(); + if (!countryPatternString.equals(ADMIT_ALL)) { + result.append("/C=" + countryPatternString); + } + if (!organizationPatternString.equals(ADMIT_ALL)) { + result.append("/O=" + organizationPatternString); + } + if (!organizationalUnitPatternString.equals(ADMIT_ALL)) { + result.append("/OU=" + organizationalUnitPatternString); + } + if (!localityPatternString.equals(ADMIT_ALL)) { + result.append("/L=" + localityPatternString); + } + if (!commonNamePatternString.equals(ADMIT_ALL)) { + result.append("/CN=" + commonNamePatternString); + } + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/griduser/DistinguishedName.java b/src/main/java/it/grid/storm/griduser/DistinguishedName.java index 509ef6b7..3e9e2fae 100644 --- a/src/main/java/it/grid/storm/griduser/DistinguishedName.java +++ b/src/main/java/it/grid/storm/griduser/DistinguishedName.java @@ -1,332 +1,319 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Original design made by Riccardo Zappi , 2007 - * + * * $Id: DistinguishedName.java,v 1.5 2007/05/22 19:54:54 rzappi Exp $ */ package it.grid.storm.griduser; +import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; - import javax.security.auth.x500.X500Principal; - -import com.google.common.collect.Lists; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DistinguishedName implements SubjectAttribute { - private static final Logger log = LoggerFactory - .getLogger(DistinguishedName.class); - - private String countryName = null; - private String provinceName = null; - private String organizationName = null; - private String localityName = null; - private String canonizedProxyDN = null; - private final ArrayList organizationalUnitNames = new ArrayList(); - private final ArrayList commonNames = new ArrayList(); - private final ArrayList domainComponents = new ArrayList(); - - private String eMailAddress = null; - private String distinguishedName = null; - - private X500Principal x500DN = null; - - public DistinguishedName(String stringDN) { - - if (stringDN != null) { - distinguishedName = stringDN; - // Check the format of DN - int slashIndex = distinguishedName.indexOf('/'); - int commaIndex = distinguishedName.indexOf(','); - if (slashIndex > -1) { - parseDNslahed(); - buildX500DN(); - } - if (commaIndex > -1) { - parseDNcommed(); - builderWithMap(stringDN); - } - - } else { - distinguishedName = "empty"; - } - } - - /** - * @param stringDN - */ - private void builderWithMap(String stringDN) { - - String[] couples = stringDN.split(","); - Map pairs = new HashMap(); - for (String couple : couples) { - if (couple.contains("=")) { - String key = couple.split("=")[0]; - String value = couple.split("=")[1]; - pairs.put(key, value); - } - } - if (pairs.size() > 0) { - log - .error("To use this functionality (DN rfc 2253) you have to recompile with Java 1.6"); - } - - } - - private void assignAttributes(String[] dnChunk) { - - if (dnChunk != null) { - int length = dnChunk.length; - for (int i = 0; i < length; i++) { - if (dnChunk[i].startsWith("C=")) { - countryName = dnChunk[i].substring(2, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("ST=")) { - provinceName = dnChunk[i].substring(3, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("O=")) { - organizationName = dnChunk[i].substring(2, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("OU=")) { - organizationalUnitNames.add(dnChunk[i].substring(3, - dnChunk[i].length())); - } - if (dnChunk[i].startsWith("L=")) { - localityName = dnChunk[i].substring(2, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("CN=")) { - commonNames.add(dnChunk[i].substring(3, dnChunk[i].length())); - } - if (dnChunk[i].startsWith("DC=")) { - domainComponents.add(dnChunk[i].substring(3, dnChunk[i].length())); - } - - /** - * @todo : Implement case insentive for Attribute email - */ - if (dnChunk[i].startsWith("Email=")) { - eMailAddress = dnChunk[i].substring(6, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("E=")) { - eMailAddress = dnChunk[i].substring(2, dnChunk[i].length()); - } - if (dnChunk[i].startsWith("EMailAddress=")) { - eMailAddress = dnChunk[i].substring(13, dnChunk[i].length()); - } - } - } - } - - private void parseDNslahed() { - - List list = Lists.newArrayList(); - String dn = distinguishedName; - boolean stop = false; - - while (!stop) { - // Get index of lat '=' - int indexOfEq = dn.lastIndexOf('='); - // Exit if it does not exists - if (indexOfEq == -1) { - stop = true; - continue; - } - - String tmpDN = dn.substring(0, indexOfEq); - // Get index of the first '/' char on the left of the '=' - int indexOfAttr = tmpDN.lastIndexOf('/'); - - // the substring from the indexOfAttr obtained to end of the String - // is a attr-value pair! - // Add it to the results List. - list.add(dn.substring(indexOfAttr + 1, dn.length())); - - // Cut the result from the working DN string, and iterate. - dn = dn.substring(0, indexOfAttr); - } - - StringBuilder sb = new StringBuilder(); - String[] attributes = new String[list.size()]; - - // Create a string representation of the DN. - // Note that the result List contains attribute-value pair Strings in - // reverse order! + private static final Logger log = LoggerFactory.getLogger(DistinguishedName.class); + + private String countryName = null; + private String provinceName = null; + private String organizationName = null; + private String localityName = null; + private String canonizedProxyDN = null; + private final ArrayList organizationalUnitNames = new ArrayList(); + private final ArrayList commonNames = new ArrayList(); + private final ArrayList domainComponents = new ArrayList(); + + private String eMailAddress = null; + private String distinguishedName = null; + + private X500Principal x500DN = null; + + public DistinguishedName(String stringDN) { + + if (stringDN != null) { + distinguishedName = stringDN; + // Check the format of DN + int slashIndex = distinguishedName.indexOf('/'); + int commaIndex = distinguishedName.indexOf(','); + if (slashIndex > -1) { + parseDNslahed(); + buildX500DN(); + } + if (commaIndex > -1) { + parseDNcommed(); + builderWithMap(stringDN); + } + + } else { + distinguishedName = "empty"; + } + } + + /** @param stringDN */ + private void builderWithMap(String stringDN) { + + String[] couples = stringDN.split(","); + Map pairs = new HashMap(); + for (String couple : couples) { + if (couple.contains("=")) { + String key = couple.split("=")[0]; + String value = couple.split("=")[1]; + pairs.put(key, value); + } + } + if (pairs.size() > 0) { + log.error("To use this functionality (DN rfc 2253) you have to recompile with Java 1.6"); + } + } + + private void assignAttributes(String[] dnChunk) { + + if (dnChunk != null) { + int length = dnChunk.length; + for (int i = 0; i < length; i++) { + if (dnChunk[i].startsWith("C=")) { + countryName = dnChunk[i].substring(2, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("ST=")) { + provinceName = dnChunk[i].substring(3, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("O=")) { + organizationName = dnChunk[i].substring(2, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("OU=")) { + organizationalUnitNames.add(dnChunk[i].substring(3, dnChunk[i].length())); + } + if (dnChunk[i].startsWith("L=")) { + localityName = dnChunk[i].substring(2, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("CN=")) { + commonNames.add(dnChunk[i].substring(3, dnChunk[i].length())); + } + if (dnChunk[i].startsWith("DC=")) { + domainComponents.add(dnChunk[i].substring(3, dnChunk[i].length())); + } + + /** @todo : Implement case insentive for Attribute email */ + if (dnChunk[i].startsWith("Email=")) { + eMailAddress = dnChunk[i].substring(6, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("E=")) { + eMailAddress = dnChunk[i].substring(2, dnChunk[i].length()); + } + if (dnChunk[i].startsWith("EMailAddress=")) { + eMailAddress = dnChunk[i].substring(13, dnChunk[i].length()); + } + } + } + } + + private void parseDNslahed() { + + List list = Lists.newArrayList(); + String dn = distinguishedName; + boolean stop = false; + + while (!stop) { + // Get index of lat '=' + int indexOfEq = dn.lastIndexOf('='); + // Exit if it does not exists + if (indexOfEq == -1) { + stop = true; + continue; + } + + String tmpDN = dn.substring(0, indexOfEq); + // Get index of the first '/' char on the left of the '=' + int indexOfAttr = tmpDN.lastIndexOf('/'); + + // the substring from the indexOfAttr obtained to end of the String + // is a attr-value pair! + // Add it to the results List. + list.add(dn.substring(indexOfAttr + 1, dn.length())); + + // Cut the result from the working DN string, and iterate. + dn = dn.substring(0, indexOfAttr); + } + + StringBuilder sb = new StringBuilder(); + String[] attributes = new String[list.size()]; - for (int i = 0; i < list.size(); i++) { - if (i == list.size() - 1) { - sb.append(list.get(list.size() - 1 - i)); - } else { - sb.append(list.get(list.size() - 1 - i) + ","); - } + // Create a string representation of the DN. + // Note that the result List contains attribute-value pair Strings in + // reverse order! - // Prepare the array for attributes evaluation - attributes[i] = list.get((list.size() - 1 - i)); - } + for (int i = 0; i < list.size(); i++) { + if (i == list.size() - 1) { + sb.append(list.get(list.size() - 1 - i)); + } else { + sb.append(list.get(list.size() - 1 - i) + ","); + } - canonizedProxyDN = sb.toString(); - assignAttributes(attributes); + // Prepare the array for attributes evaluation + attributes[i] = list.get((list.size() - 1 - i)); + } - } + canonizedProxyDN = sb.toString(); + assignAttributes(attributes); + } - private void parseDNcommed() { + private void parseDNcommed() { - String[] attributes = distinguishedName.split(","); - assignAttributes(attributes); - } + String[] attributes = distinguishedName.split(","); + assignAttributes(attributes); + } - private void buildX500DN() { + private void buildX500DN() { - x500DN = new X500Principal(canonizedProxyDN); - } + x500DN = new X500Principal(canonizedProxyDN); + } - public String getX500DNString(String format) { + public String getX500DNString(String format) { - return (x500DN != null ? x500DN.getName(format) : ""); - } + return (x500DN != null ? x500DN.getName(format) : ""); + } - public X500Principal getX500DN() { + public X500Principal getX500DN() { - return x500DN; - } + return x500DN; + } - public String getX500DN_rfc1779() { + public String getX500DN_rfc1779() { - return (x500DN != null ? x500DN.getName(X500Principal.RFC1779) : ""); - } + return (x500DN != null ? x500DN.getName(X500Principal.RFC1779) : ""); + } - public String getX500DN_canonical() { + public String getX500DN_canonical() { - return (x500DN != null ? x500DN.getName(X500Principal.CANONICAL) : ""); - } + return (x500DN != null ? x500DN.getName(X500Principal.CANONICAL) : ""); + } - public String getX500DN_rfc2253() { + public String getX500DN_rfc2253() { - return (x500DN != null ? x500DN.getName(X500Principal.RFC2253) : ""); - } + return (x500DN != null ? x500DN.getName(X500Principal.RFC2253) : ""); + } - public String getCountryName() { + public String getCountryName() { - return countryName; - } + return countryName; + } - public String getProvinceName() { + public String getProvinceName() { - return provinceName; - } + return provinceName; + } - public String getOrganizationName() { + public String getOrganizationName() { - return organizationName; - } + return organizationName; + } - public ArrayList getOrganizationalUnitNames() { + public ArrayList getOrganizationalUnitNames() { - return organizationalUnitNames; - } + return organizationalUnitNames; + } - public ArrayList getDomainComponents() { + public ArrayList getDomainComponents() { - return domainComponents; - } + return domainComponents; + } - public String getLocalityName() { + public String getLocalityName() { - return localityName; - } + return localityName; + } - public ArrayList getCommonNames() { + public ArrayList getCommonNames() { - return commonNames; - } + return commonNames; + } - public String getEMail() { + public String getEMail() { - return eMailAddress; - } + return eMailAddress; + } - public String getDN() { + public String getDN() { - return distinguishedName; - } + return distinguishedName; + } - @Override - public boolean equals(Object o) { + @Override + public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof DistinguishedName)) { - return false; - } + if (this == o) { + return true; + } + if (!(o instanceof DistinguishedName)) { + return false; + } - final DistinguishedName dn = (DistinguishedName) o; + final DistinguishedName dn = (DistinguishedName) o; - if (!x500DN.equals(dn.getX500DN())) { - return false; - } + if (!x500DN.equals(dn.getX500DN())) { + return false; + } - return true; - } + return true; + } - public int hashCode() { + public int hashCode() { - int result = 17; - if (x500DN != null) { - result += 31 * x500DN.hashCode(); - } - return result; - } + int result = 17; + if (x500DN != null) { + result += 31 * x500DN.hashCode(); + } + return result; + } - @Override - public String toString() { + @Override + public String toString() { - StringBuilder result = new StringBuilder(); - if (countryName != null) { - result.append("C=" + countryName + "\n"); - } - if (provinceName != null) { - result.append("ST=" + provinceName + "\n"); - } - if (organizationName != null) { - result.append("O=" + organizationName + "\n"); - } - if (organizationalUnitNames != null) { - for (String string : organizationalUnitNames) { - result.append("OU=" + string + "\n"); - } - } + StringBuilder result = new StringBuilder(); + if (countryName != null) { + result.append("C=" + countryName + "\n"); + } + if (provinceName != null) { + result.append("ST=" + provinceName + "\n"); + } + if (organizationName != null) { + result.append("O=" + organizationName + "\n"); + } + if (organizationalUnitNames != null) { + for (String string : organizationalUnitNames) { + result.append("OU=" + string + "\n"); + } + } - if (localityName != null) { - result.append("L=" + localityName + "\n"); - } - if (commonNames != null) { - for (String string : commonNames) { - result.append("CN=" + string + "\n"); - } - } - if (domainComponents != null) { - for (String string : domainComponents) { - result.append("DC=" + string + "\n"); - } - } - if (eMailAddress != null) { - result.append("EMail=" + eMailAddress + "\n"); - } - return result.toString(); - } + if (localityName != null) { + result.append("L=" + localityName + "\n"); + } + if (commonNames != null) { + for (String string : commonNames) { + result.append("CN=" + string + "\n"); + } + } + if (domainComponents != null) { + for (String string : domainComponents) { + result.append("DC=" + string + "\n"); + } + } + if (eMailAddress != null) { + result.append("EMail=" + eMailAddress + "\n"); + } + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/griduser/DnMatch.java b/src/main/java/it/grid/storm/griduser/DnMatch.java index f1d58983..e6462a7b 100644 --- a/src/main/java/it/grid/storm/griduser/DnMatch.java +++ b/src/main/java/it/grid/storm/griduser/DnMatch.java @@ -1,19 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -/** - * Interface that the DN matching algorithms must implement. - */ +/** Interface that the DN matching algorithms must implement. */ public interface DnMatch { - /** - * Compare two DNs and return true if they match, according to the implemented - * criterion. - * - * @return true if the DNs do match. - */ - public boolean match(final String proxyDn, final String fixedDn); + /** + * Compare two DNs and return true if they match, according to the implemented criterion. + * + * @return true if the DNs do match. + */ + public boolean match(final String proxyDn, final String fixedDn); } diff --git a/src/main/java/it/grid/storm/griduser/ExactDnMatch.java b/src/main/java/it/grid/storm/griduser/ExactDnMatch.java index 667d4702..ef49eb79 100644 --- a/src/main/java/it/grid/storm/griduser/ExactDnMatch.java +++ b/src/main/java/it/grid/storm/griduser/ExactDnMatch.java @@ -1,40 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -/** - * Match a proxy DN against a fixed-string pattern. - */ +/** Match a proxy DN against a fixed-string pattern. */ public class ExactDnMatch implements DnMatch { - /** - * Return true if fixedDn is the initial segment of - * proxyDn, and the remaining part of proxyDn - * consists only of "CN=..." fields. (That is, proxyDn may be a - * proxy DN derived from fixedDn by a proxy delegation process, - * according to RFC3820. - * - * @return true if the DNs do match. - */ - public boolean match(final String proxyDn, final String fixedDn) { + /** + * Return true if fixedDn is the initial segment of proxyDn + * , and the remaining part of proxyDn consists only of "CN=..." fields. (That is, + * proxyDn may be a proxy DN derived from fixedDn by a proxy delegation + * process, according to RFC3820. + * + * @return true if the DNs do match. + */ + public boolean match(final String proxyDn, final String fixedDn) { - assert (null != proxyDn); - assert (null != fixedDn); + assert (null != proxyDn); + assert (null != fixedDn); - if (!proxyDn.startsWith(fixedDn)) - return false; + if (!proxyDn.startsWith(fixedDn)) return false; - if (!(proxyDn.charAt(1 + fixedDn.length()) == '/')) - /* fixedDn did not match up to DN field boundary, fail */ - return false; + if (!(proxyDn.charAt(1 + fixedDn.length()) == '/')) + /* fixedDn did not match up to DN field boundary, fail */ + return false; - final String[] tails = proxyDn.substring(fixedDn.length()).split("/"); - for (int i = 1; i < tails.length; i++) - if (!tails[i].toUpperCase().startsWith("CN=")) - return false; + final String[] tails = proxyDn.substring(fixedDn.length()).split("/"); + for (int i = 1; i < tails.length; i++) + if (!tails[i].toUpperCase().startsWith("CN=")) return false; - return true; - } + return true; + } } diff --git a/src/main/java/it/grid/storm/griduser/FQAN.java b/src/main/java/it/grid/storm/griduser/FQAN.java index c7b73d3e..892762fa 100644 --- a/src/main/java/it/grid/storm/griduser/FQAN.java +++ b/src/main/java/it/grid/storm/griduser/FQAN.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; @@ -9,278 +8,265 @@ import java.util.regex.Pattern; /** - * Represents a single FQAN. Provides methods to access the individual parts of - * the FQAN, as well as the standard string representation of an FQAN. - * + * Represents a single FQAN. Provides methods to access the individual parts of the FQAN, as well as + * the standard string representation of an FQAN. */ public class FQAN implements SubjectAttribute { - static private Pattern fqanPattern = Pattern - .compile("/[\\w-\\.]+(/[\\w-\\.]+)*(/Role=[\\w-\\.]+)?(/Capability=[\\w-\\.]+)?"); - private static final char VO_FQAN_ESCAPE_CHAR = '/'; - private String fqan; - private String vo; - private String group; - private String role; - private String capability; - private boolean checkFormedness = true; - - // --- constructors --- // - - /** - * Constructor, taking a single FQAN passed as string; assumes VO name is - * first part of group name. - */ - public FQAN(String fqan) throws IllegalArgumentException { - - this(fqan, true); - } - - public FQAN(String fqan, boolean checkFormedness) - throws IllegalArgumentException { - - this.checkFormedness = checkFormedness; - setFqan(fqan); - if (parseFqan(fqan)) { - generateFqan(); - } else { - throw new IllegalArgumentException( - "The VO for a FQAN can't be null! FQAN string = " + fqan); - } - } - - public FQAN(String vo, String group, String role, String capability) { - - if (vo == null) { - throw new IllegalArgumentException("The VO for a FQAN can't be null"); - } - setVo(vo); - setGroup(group); - setRole(role); - setCapability(capability); - generateFqan(); - } - - /** - * Produce an FQAN object for the provided VO name - * - * @param voName - * @return - */ - public static FQAN makeVoFQAN(String voName) { - - return new FQAN(VO_FQAN_ESCAPE_CHAR + voName); - } - - // --- public accessor methods --- // - public String getVo() { - - return vo; - } - - public String getGroup() { - - StringBuilder sb = new StringBuilder(); - sb.append("/"); - sb.append(vo); - if (group != null) { - sb.append(group); - } - return sb.toString(); - } - - public String getSubGroup() { - - return group; - } - - public String getRole() { - - return role; - } - - public boolean isRoleNULL() { - - if ((role == null) || role.toUpperCase().equals("NULL")) { - return true; - } else { - return false; - } - } - - public boolean isCapabilityNULL() { - - if ((capability == null) || capability.toUpperCase().equals("NULL")) { - return true; - } else { - return false; - } - } - - public String getCapability() { - - return capability; - } - - private void generateFqan() { - - if (vo == null) { - fqan = null; - return; - } - StringBuilder bf = new StringBuilder(); - bf.append('/'); - bf.append(vo); - if (group != null) { - bf.append(group); - } - if (role != null) { - bf.append("/Role="); - bf.append(role); - } - if (capability != null) { - bf.append("/Capability="); - bf.append(capability); - } - fqan = bf.toString(); - } - - private boolean parseFqan(String fqan) { - - // Matches to the specification. - Matcher m = fqanPattern.matcher(fqan); - if (!m.matches()) { - if (checkFormedness) { - throw new IllegalArgumentException( - "FQAN '" - + fqan - + "' is malformed (syntax: /VO[/group[/subgroup(s)]][/Role=role][/Capability=cap])"); - } else { - return false; - } - } - - vo = null; - group = null; - role = null; - capability = null; - - StringTokenizer stk = new StringTokenizer(fqan, "/"); - if (!stk.hasMoreTokens()) { - return false; - } - vo = stk.nextToken(); - if (!stk.hasMoreTokens()) { - return true; - } - String tempGroup = ""; - String token = stk.nextToken(); - while ((!token.startsWith("Role=") && !token.startsWith("Capability="))) { - tempGroup = tempGroup + "/" + token; - group = tempGroup; - if (!stk.hasMoreTokens()) { - return true; - } - token = stk.nextToken(); - } - if (token.startsWith("Role=")) { - setRole(token.substring(5)); - if (!stk.hasMoreTokens()) { - return true; - } - token = stk.nextToken(); - } - if (token.startsWith("Capability=")) { - setCapability(token.substring(11)); - } - return true; - } - - private void setCapability(String capability) { - - if ((capability != null) && (!capability.matches("[\\w-\\.]+"))) { - throw new IllegalArgumentException("The capability '" + capability - + "' is malformed"); - } - this.capability = capability; - } - - private void setFqan(String fqan) { - - this.fqan = fqan; - } - - private void setGroup(String group) { - - if ((group != null) && (!group.matches("(/[\\w-\\.]+)+"))) { - throw new IllegalArgumentException("The group '" + group - + "' is malformed"); - } - this.group = group; - } - - private void setRole(String role) { - - if ((role != null) && (!role.matches("[\\w-\\.]+"))) { - throw new IllegalArgumentException("The role '" + role + "' is malformed"); - } - this.role = role; - if ("NULL".equalsIgnoreCase(role)) { - this.role = null; - } - } - - private void setVo(String vo) { - - if ((vo != null) && (!vo.matches("[\\w-\\.]+"))) { - throw new IllegalArgumentException("The vo '" + vo + "' is malformed"); - } - this.vo = vo; - } - - /** - * - * @return int - */ - public int hashCode() { - - if (fqan == null) { - return 0; - } - return fqan.hashCode(); - } - - /** - * - * @param obj - * Object - * @return boolean - */ - public boolean equals(Object obj) { - - if (obj == null) { - return false; - } - if (obj instanceof FQAN) { - FQAN fqan2 = (FQAN) obj; - return (fqan2.fqan == null) ? fqan == null : fqan2.fqan - .equalsIgnoreCase(fqan); - } else { - return false; - } - } - - /** - * Return the usual string representation of the FQAN. - */ - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb = sb.append(getGroup()); - sb.append("/Role=" + ((role != null) ? getRole() : "NULL")); - if (capability != null) { - sb.append("/Capability=" + getCapability()); - } - return sb.toString(); - } + private static Pattern fqanPattern = + Pattern.compile("/[\\w-\\.]+(/[\\w-\\.]+)*(/Role=[\\w-\\.]+)?(/Capability=[\\w-\\.]+)?"); + private static final char VO_FQAN_ESCAPE_CHAR = '/'; + private String fqan; + private String vo; + private String group; + private String role; + private String capability; + private boolean checkFormedness = true; + + // --- constructors --- // + + /** + * Constructor, taking a single FQAN passed as string; assumes VO name is first part of group + * name. + */ + public FQAN(String fqan) throws IllegalArgumentException { + + this(fqan, true); + } + + public FQAN(String fqan, boolean checkFormedness) throws IllegalArgumentException { + + this.checkFormedness = checkFormedness; + setFqan(fqan); + if (parseFqan(fqan)) { + generateFqan(); + } else { + throw new IllegalArgumentException("The VO for a FQAN can't be null! FQAN string = " + fqan); + } + } + + public FQAN(String vo, String group, String role, String capability) { + + if (vo == null) { + throw new IllegalArgumentException("The VO for a FQAN can't be null"); + } + setVo(vo); + setGroup(group); + setRole(role); + setCapability(capability); + generateFqan(); + } + + /** + * Produce an FQAN object for the provided VO name + * + * @param voName + * @return + */ + public static FQAN makeVoFQAN(String voName) { + + return new FQAN(VO_FQAN_ESCAPE_CHAR + voName); + } + + // --- public accessor methods --- // + public String getVo() { + + return vo; + } + + public String getGroup() { + + StringBuilder sb = new StringBuilder(); + sb.append("/"); + sb.append(vo); + if (group != null) { + sb.append(group); + } + return sb.toString(); + } + + public String getSubGroup() { + + return group; + } + + public String getRole() { + + return role; + } + + public boolean isRoleNULL() { + + if ((role == null) || role.toUpperCase().equals("NULL")) { + return true; + } else { + return false; + } + } + + public boolean isCapabilityNULL() { + + if ((capability == null) || capability.toUpperCase().equals("NULL")) { + return true; + } else { + return false; + } + } + + public String getCapability() { + + return capability; + } + + private void generateFqan() { + + if (vo == null) { + fqan = null; + return; + } + StringBuilder bf = new StringBuilder(); + bf.append('/'); + bf.append(vo); + if (group != null) { + bf.append(group); + } + if (role != null) { + bf.append("/Role="); + bf.append(role); + } + if (capability != null) { + bf.append("/Capability="); + bf.append(capability); + } + fqan = bf.toString(); + } + + private boolean parseFqan(String fqan) { + + // Matches to the specification. + Matcher m = fqanPattern.matcher(fqan); + if (!m.matches()) { + if (checkFormedness) { + throw new IllegalArgumentException( + "FQAN '" + + fqan + + "' is malformed (syntax: /VO[/group[/subgroup(s)]][/Role=role][/Capability=cap])"); + } else { + return false; + } + } + + vo = null; + group = null; + role = null; + capability = null; + + StringTokenizer stk = new StringTokenizer(fqan, "/"); + if (!stk.hasMoreTokens()) { + return false; + } + vo = stk.nextToken(); + if (!stk.hasMoreTokens()) { + return true; + } + String tempGroup = ""; + String token = stk.nextToken(); + while ((!token.startsWith("Role=") && !token.startsWith("Capability="))) { + tempGroup = tempGroup + "/" + token; + group = tempGroup; + if (!stk.hasMoreTokens()) { + return true; + } + token = stk.nextToken(); + } + if (token.startsWith("Role=")) { + setRole(token.substring(5)); + if (!stk.hasMoreTokens()) { + return true; + } + token = stk.nextToken(); + } + if (token.startsWith("Capability=")) { + setCapability(token.substring(11)); + } + return true; + } + + private void setCapability(String capability) { + + if ((capability != null) && (!capability.matches("[\\w-\\.]+"))) { + throw new IllegalArgumentException("The capability '" + capability + "' is malformed"); + } + this.capability = capability; + } + + private void setFqan(String fqan) { + + this.fqan = fqan; + } + + private void setGroup(String group) { + + if ((group != null) && (!group.matches("(/[\\w-\\.]+)+"))) { + throw new IllegalArgumentException("The group '" + group + "' is malformed"); + } + this.group = group; + } + + private void setRole(String role) { + + if ((role != null) && (!role.matches("[\\w-\\.]+"))) { + throw new IllegalArgumentException("The role '" + role + "' is malformed"); + } + this.role = role; + if ("NULL".equalsIgnoreCase(role)) { + this.role = null; + } + } + + private void setVo(String vo) { + + if ((vo != null) && (!vo.matches("[\\w-\\.]+"))) { + throw new IllegalArgumentException("The vo '" + vo + "' is malformed"); + } + this.vo = vo; + } + + /** @return int */ + public int hashCode() { + + if (fqan == null) { + return 0; + } + return fqan.hashCode(); + } + + /** + * @param obj Object + * @return boolean + */ + public boolean equals(Object obj) { + + if (obj == null) { + return false; + } + if (obj instanceof FQAN) { + FQAN fqan2 = (FQAN) obj; + return (fqan2.fqan == null) ? fqan == null : fqan2.fqan.equalsIgnoreCase(fqan); + } else { + return false; + } + } + + /** Return the usual string representation of the FQAN. */ + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb = sb.append(getGroup()); + sb.append("/Role=" + ((role != null) ? getRole() : "NULL")); + if (capability != null) { + sb.append("/Capability=" + getCapability()); + } + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/griduser/GridUser.java b/src/main/java/it/grid/storm/griduser/GridUser.java index ac022c40..7b32df71 100644 --- a/src/main/java/it/grid/storm/griduser/GridUser.java +++ b/src/main/java/it/grid/storm/griduser/GridUser.java @@ -1,15 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Original design made by Riccardo Zappi , 2007 - * + * * $Id: GridUser.java 3604 2007-05-22 11:16:27Z rzappi $ */ @@ -19,59 +18,58 @@ class GridUser extends AbstractGridUser { - GridUser(MapperInterface mapper, String distinguishedName) { + GridUser(MapperInterface mapper, String distinguishedName) { - super(mapper, distinguishedName); - } + super(mapper, distinguishedName); + } - GridUser(MapperInterface mapper, String distinguishedName, String proxy) { + GridUser(MapperInterface mapper, String distinguishedName, String proxy) { - super(mapper, distinguishedName, proxy); - } + super(mapper, distinguishedName, proxy); + } - public VO getVO() { - VO vo = VO.makeNoVo(); - return vo; - } + public VO getVO() { + VO vo = VO.makeNoVo(); + return vo; + } - @Override - public boolean hasVoms() { + @Override + public boolean hasVoms() { - return false; - } + return false; + } - @Override - public String[] getFQANsAsString() { + @Override + public String[] getFQANsAsString() { - return new String[0]; - } + return new String[0]; + } - @Override - public FQAN[] getFQANs() { + @Override + public FQAN[] getFQANs() { - return new FQAN[0]; - } + return new FQAN[0]; + } - @Override - public boolean equals(Object obj) { + @Override + public boolean equals(Object obj) { - boolean result = false; - if (obj != null) { - if (obj instanceof GridUserInterface) { - GridUserInterface other = (GridUserInterface) obj; - if (other.getDistinguishedName().equals(this.getDistinguishedName())) { - result = true; - } else { - result = false; - } - } - } - return result; - } + boolean result = false; + if (obj != null) { + if (obj instanceof GridUserInterface) { + GridUserInterface other = (GridUserInterface) obj; + if (other.getDistinguishedName().equals(this.getDistinguishedName())) { + result = true; + } else { + result = false; + } + } + } + return result; + } - public String toString() { + public String toString() { - return "Grid User (no VOMS): '" - + getDistinguishedName().getX500DN_rfc1779() + "'"; - } + return "Grid User (no VOMS): '" + getDistinguishedName().getX500DN_rfc1779() + "'"; + } } diff --git a/src/main/java/it/grid/storm/griduser/GridUserException.java b/src/main/java/it/grid/storm/griduser/GridUserException.java index 9a59b4e5..6de13078 100644 --- a/src/main/java/it/grid/storm/griduser/GridUserException.java +++ b/src/main/java/it/grid/storm/griduser/GridUserException.java @@ -1,32 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -/** - * Root class for errors arising with the GridUser instanciation. - */ +/** Root class for errors arising with the GridUser instanciation. */ public class GridUserException extends Exception { - public GridUserException() { - - super(); - } + public GridUserException() { - public GridUserException(String message) { + super(); + } - super(message); - } + public GridUserException(String message) { - public GridUserException(String message, Throwable cause) { + super(message); + } - super(message, cause); - } + public GridUserException(String message, Throwable cause) { - public GridUserException(Throwable cause) { + super(message, cause); + } - super(cause); - } + public GridUserException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/it/grid/storm/griduser/GridUserFactory.java b/src/main/java/it/grid/storm/griduser/GridUserFactory.java index 47d22c20..e98fde4f 100644 --- a/src/main/java/it/grid/storm/griduser/GridUserFactory.java +++ b/src/main/java/it/grid/storm/griduser/GridUserFactory.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Riccardo Zappi , 2007 $Id: * GridUserFactory.java 3604 2007-05-22 11:16:27Z rzappi $ */ @@ -19,228 +18,225 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Map; - import org.slf4j.Logger; public class GridUserFactory { - private static final Logger log = GridUserManager.log; - private MapperInterface defaultMapperClass = null; - - private static GridUserFactory instance = null; - - private GridUserFactory() throws GridUserException { - - defaultMapperClass = makeMapperClass(GridUserManager.getMapperClassName()); - } - - static GridUserFactory getInstance() { - - if (instance == null) { - try { - instance = new GridUserFactory(); - } catch (GridUserException ex) { - log.error("Unable to load GridUser Mapper Driver!", ex); - } - } - return instance; - } - - /** - * Build a simple GridUser. No VOMS attributes are passed.. - * - * @return GridUserInterface - */ - GridUserInterface createGridUser(String distinguishName) { - - GridUserInterface user = new GridUser(defaultMapperClass, distinguishName); - log.debug("Created new Grid User (NO VOMS) : {}", user); - return user; - } - - /** - * Build a simple GridUser. Parsing of proxy is not performed here! This - * methos is meaningful only for srmCopy call. - * - * @return GridUserInterface - */ - GridUserInterface createGridUser(String distinguishName, String proxyString) { - - GridUserInterface user = new GridUser(defaultMapperClass, distinguishName, - proxyString); - log.debug("Created new Grid User (NO VOMS with PROXY) : {}", user); - return user; - } - - /** - * Build a VOMS Grid User, if FQAN passed are not null. Otherwise a simple - * GridUser instance wil be returned. - * - * @return GridUserInterface - */ - GridUserInterface createGridUser(String distinguishName, FQAN[] fqans) - throws IllegalArgumentException { - - GridUserInterface user = null; - try { - user = new VomsGridUser(defaultMapperClass, distinguishName, fqans); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - throw e; - } - log.debug("Created new Grid User (VOMS USER) : {}", user); - return user; - } - - /** - * Build a VOMS Grid User, if FQAN passed are not null. Otherwise a simple - * GridUser instance wil be returned. - * - * @return GridUserInterface - */ - GridUserInterface createGridUser(String distinguishName, FQAN[] fqans, - String proxyString) throws IllegalArgumentException { - - GridUserInterface user = null; - try { - user = new VomsGridUser(defaultMapperClass, distinguishName, proxyString, - fqans); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - throw e; - } - log.debug("Created new Grid User (VOMS USER with PROXY) : {}" , user); - return user; - } - - GridUserInterface decode(Map inputParam) { - - // Member name for VomsGridUser Creation - String member_DN = new String("userDN"); - String member_Fqans = new String("userFQANS"); - - // Get DN and FQANs[] - String dnString = (String) inputParam.get(member_DN); - Object[] fqansArr = (Object[]) inputParam.get(member_Fqans); - - // Destination Fqans array - FQAN[] fqans = null; - - if (fqansArr != null) { - // Define FQAN[] - fqans = new FQAN[fqansArr.length]; - log.debug("fqans_vector Size: {}" , fqansArr.length); - - for (int i = 0; i < fqansArr.length; i++) { - - log.debug("FQAN[{}]: {}",i, (String) fqansArr[i]); - fqans[i] = new FQAN((String) fqansArr[i]); - } - } - - if (dnString != null) { - log.debug("DN: {}" , dnString); - // Creation of srm GridUser type - if (fqans != null && fqans.length > 0) { - log.debug("VomsGU with FQAN"); - try { - return createGridUser(dnString, fqans); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - } - } else { - return createGridUser(dnString); - } - } - return null; - } - - private MapperInterface makeMapperClass(String mapperClassName) - throws GridUserException { - - MapperInterface mapper = null; - Class mapperClass = null; - if (mapperClassName == null) { - throw new GridUserException( - "Cannot load Mapper Driver without a valid Mapper Driver Class Name!"); - } - - // Retrieve the Class of driver - try { - mapperClass = Class.forName(mapperClassName); - } catch (ClassNotFoundException e) { - throw new GridUserException( - "Cannot load Mapper Driver instance without a valid Mapper Driver Class Name!", - e); - } - - // Check if the Class implements the right interface - if (!MapperInterface.class.isAssignableFrom(mapperClass)) { - throw new GridUserException( - "Cannot load Mapper Driver instance without a valid Mapper Driver Class Name!"); - } - try { - Constructor[] constructors = (Constructor[]) mapperClass - .getConstructors(); - boolean found = false; - for (Constructor constructor : constructors) { - if (constructor.getParameterTypes().length == 0) { - found = true; - break; - } - } - if (found) { - mapper = (MapperInterface) mapperClass.newInstance(); - } else { - try { - Method method = ((Class) mapperClass).getMethod( - "getInstance", null); - if (Modifier.isStatic(method.getModifiers())) { - try { - mapper = (MapperInterface) method.invoke(this, null); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" - + mapperClassName + "'"); - } catch (InvocationTargetException e) { - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" - + mapperClassName + "'"); - } - } else { - log - .error("Unable to instantiate the class using eiter no args constructor niether getInstance method. getInstance exists but is not static"); - throw new GridUserException( - "Cannot instantiate Mapper Driver using new or getInstance for Mapper Driver named :'" - + mapperClassName + "'"); - } - } catch (SecurityException e) { - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" - + mapperClassName + "'"); - } catch (NoSuchMethodException e) { - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot instantiate Mapper Driver using new or getInstance for Mapper Driver named :'" - + mapperClassName + "'"); - } - } - - } catch (IllegalAccessException e) { - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot create a new Instance of the Mapper Driver named :'" - + mapperClassName + "'"); - } catch (InstantiationException e) { - - log.error(e.getMessage(), e); - throw new GridUserException( - "Cannot create a new Instance of the Mapper Driver named :'" - + mapperClassName + "'"); - } - return mapper; - } + private static final Logger log = GridUserManager.log; + private MapperInterface defaultMapperClass = null; + + private static GridUserFactory instance = null; + + private GridUserFactory() throws GridUserException { + + defaultMapperClass = makeMapperClass(GridUserManager.getMapperClassName()); + } + + static GridUserFactory getInstance() { + + if (instance == null) { + try { + instance = new GridUserFactory(); + } catch (GridUserException ex) { + log.error("Unable to load GridUser Mapper Driver!", ex); + } + } + return instance; + } + + /** + * Build a simple GridUser. No VOMS attributes are passed.. + * + * @return GridUserInterface + */ + GridUserInterface createGridUser(String distinguishName) { + + GridUserInterface user = new GridUser(defaultMapperClass, distinguishName); + log.debug("Created new Grid User (NO VOMS) : {}", user); + return user; + } + + /** + * Build a simple GridUser. Parsing of proxy is not performed here! This methos is meaningful only + * for srmCopy call. + * + * @return GridUserInterface + */ + GridUserInterface createGridUser(String distinguishName, String proxyString) { + + GridUserInterface user = new GridUser(defaultMapperClass, distinguishName, proxyString); + log.debug("Created new Grid User (NO VOMS with PROXY) : {}", user); + return user; + } + + /** + * Build a VOMS Grid User, if FQAN passed are not null. Otherwise a simple GridUser instance wil + * be returned. + * + * @return GridUserInterface + */ + GridUserInterface createGridUser(String distinguishName, FQAN[] fqans) + throws IllegalArgumentException { + + GridUserInterface user = null; + try { + user = new VomsGridUser(defaultMapperClass, distinguishName, fqans); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + throw e; + } + log.debug("Created new Grid User (VOMS USER) : {}", user); + return user; + } + + /** + * Build a VOMS Grid User, if FQAN passed are not null. Otherwise a simple GridUser instance wil + * be returned. + * + * @return GridUserInterface + */ + GridUserInterface createGridUser(String distinguishName, FQAN[] fqans, String proxyString) + throws IllegalArgumentException { + + GridUserInterface user = null; + try { + user = new VomsGridUser(defaultMapperClass, distinguishName, proxyString, fqans); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + throw e; + } + log.debug("Created new Grid User (VOMS USER with PROXY) : {}", user); + return user; + } + + GridUserInterface decode(Map inputParam) { + + // Member name for VomsGridUser Creation + String member_DN = new String("userDN"); + String member_Fqans = new String("userFQANS"); + + // Get DN and FQANs[] + String dnString = (String) inputParam.get(member_DN); + Object[] fqansArr = (Object[]) inputParam.get(member_Fqans); + + // Destination Fqans array + FQAN[] fqans = null; + + if (fqansArr != null) { + // Define FQAN[] + fqans = new FQAN[fqansArr.length]; + log.debug("fqans_vector Size: {}", fqansArr.length); + + for (int i = 0; i < fqansArr.length; i++) { + + log.debug("FQAN[{}]: {}", i, (String) fqansArr[i]); + fqans[i] = new FQAN((String) fqansArr[i]); + } + } + + if (dnString != null) { + log.debug("DN: {}", dnString); + // Creation of srm GridUser type + if (fqans != null && fqans.length > 0) { + log.debug("VomsGU with FQAN"); + try { + return createGridUser(dnString, fqans); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + } + } else { + return createGridUser(dnString); + } + } + return null; + } + + private MapperInterface makeMapperClass(String mapperClassName) throws GridUserException { + + MapperInterface mapper = null; + Class mapperClass = null; + if (mapperClassName == null) { + throw new GridUserException( + "Cannot load Mapper Driver without a valid Mapper Driver Class Name!"); + } + + // Retrieve the Class of driver + try { + mapperClass = Class.forName(mapperClassName); + } catch (ClassNotFoundException e) { + throw new GridUserException( + "Cannot load Mapper Driver instance without a valid Mapper Driver Class Name!", e); + } + + // Check if the Class implements the right interface + if (!MapperInterface.class.isAssignableFrom(mapperClass)) { + throw new GridUserException( + "Cannot load Mapper Driver instance without a valid Mapper Driver Class Name!"); + } + try { + Constructor[] constructors = + (Constructor[]) mapperClass.getConstructors(); + boolean found = false; + for (Constructor constructor : constructors) { + if (constructor.getParameterTypes().length == 0) { + found = true; + break; + } + } + if (found) { + mapper = (MapperInterface) mapperClass.newInstance(); + } else { + try { + Method method = ((Class) mapperClass).getMethod("getInstance", null); + if (Modifier.isStatic(method.getModifiers())) { + try { + mapper = (MapperInterface) method.invoke(this, null); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" + + mapperClassName + + "'"); + } catch (InvocationTargetException e) { + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" + + mapperClassName + + "'"); + } + } else { + log.error( + "Unable to instantiate the class using eiter no args constructor niether getInstance method. getInstance exists but is not static"); + throw new GridUserException( + "Cannot instantiate Mapper Driver using new or getInstance for Mapper Driver named :'" + + mapperClassName + + "'"); + } + } catch (SecurityException e) { + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot instantiate Mapper Driver using getInstance for Mapper Driver named :'" + + mapperClassName + + "'"); + } catch (NoSuchMethodException e) { + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot instantiate Mapper Driver using new or getInstance for Mapper Driver named :'" + + mapperClassName + + "'"); + } + } + + } catch (IllegalAccessException e) { + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot create a new Instance of the Mapper Driver named :'" + mapperClassName + "'"); + } catch (InstantiationException e) { + + log.error(e.getMessage(), e); + throw new GridUserException( + "Cannot create a new Instance of the Mapper Driver named :'" + mapperClassName + "'"); + } + return mapper; + } } diff --git a/src/main/java/it/grid/storm/griduser/GridUserInterface.java b/src/main/java/it/grid/storm/griduser/GridUserInterface.java index 118fe6d5..35e33caa 100644 --- a/src/main/java/it/grid/storm/griduser/GridUserInterface.java +++ b/src/main/java/it/grid/storm/griduser/GridUserInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * (c)2004 INFN / ICTP-eGrid This file can be distributed and/or modified under @@ -12,25 +11,17 @@ /** * Common Interface for GridUser. - * + * * @author Magnoni Luca */ - public interface GridUserInterface { - /** - * Return the Local User where the GridUser is mapped. - */ - public LocalUser getLocalUser() throws CannotMapUserException; - - /** - * Get GridUser Domain Name. Used for metadada purpose. - */ - public String getDn(); + /** Return the Local User where the GridUser is mapped. */ + public LocalUser getLocalUser() throws CannotMapUserException; - /** - * Get GridUser Domain Name. Used for metadada pouprose. - */ - public DistinguishedName getDistinguishedName(); + /** Get GridUser Domain Name. Used for metadada purpose. */ + public String getDn(); + /** Get GridUser Domain Name. Used for metadada pouprose. */ + public DistinguishedName getDistinguishedName(); } diff --git a/src/main/java/it/grid/storm/griduser/GridUserManager.java b/src/main/java/it/grid/storm/griduser/GridUserManager.java index 679dcce3..d3d0d809 100644 --- a/src/main/java/it/grid/storm/griduser/GridUserManager.java +++ b/src/main/java/it/grid/storm/griduser/GridUserManager.java @@ -1,134 +1,133 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * You may copy, distribute and modify this file under the terms of the INFN * GRID licence. For a copy of the licence please visit - * + * * http://www.cnaf.infn.it/license.html - * + * * Original design made by Riccardo Zappi , 2007 - * + * * $Id: GridUserManager.java 3604 2007-05-22 11:16:27Z rzappi $ */ package it.grid.storm.griduser; import it.grid.storm.config.Configuration; - import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GridUserManager { - static final Logger log = LoggerFactory.getLogger(GridUserManager.class); - static Configuration config = Configuration.getInstance(); - static GridUserFactory userFactory = null; - - static { - log.debug("Initializing Grid User Director..."); - userFactory = initializeFactory(); - } - - private GridUserManager() { - - super(); - } - - private static GridUserFactory initializeFactory() { - - return GridUserFactory.getInstance(); - } - - public static String getMapperClassName() { - - return config.getGridUserMapperClassname(); - } - - public static GridUserInterface makeVOMSGridUser(String dn, String proxy, - FQAN[] fqans) throws IllegalArgumentException { - - if (proxy == null || dn == null || fqans == null || fqans.length == 0) { - throw new IllegalArgumentException( - "Unable to make VomsGridUser. Inavlid arguments: dn=\'" + dn - + "\' fqans=\'" + fqans + "\' proxy=\'" + proxy + "\'"); - } - GridUserInterface gridUser = null; - try { - gridUser = userFactory.createGridUser(dn, fqans, proxy); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - } - return gridUser; - - } - - public static GridUserInterface makeVOMSGridUser(String dn, String vo) - throws IllegalArgumentException { - - if (vo == null || dn == null) { - throw new IllegalArgumentException( - "Unable to make VomsGridUser. Inavlid arguments: dn=\'" + dn - + "\' vo=\'" + vo + "\'"); - } - GridUserInterface gridUser = null; - FQAN[] fqans = new FQAN[1]; - fqans[0] = FQAN.makeVoFQAN(vo); - try { - gridUser = userFactory.createGridUser(dn, fqans); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(), e); - } - return gridUser; - } - - public static GridUserInterface makeVOMSGridUser(String dn, - String[] fqansString) throws IllegalArgumentException { - - if (fqansString == null || fqansString.length == 0) { - throw new IllegalArgumentException( - "Unable to make VomsGridUser. Invalid fqansString argument: " - + fqansString); - } - FQAN[] fqans = new FQAN[fqansString.length]; - for (int i = 0; i < fqansString.length; i++) { - fqans[i] = new FQAN(fqansString[i]); - } - return userFactory.createGridUser(dn, fqans); - } - - public static GridUserInterface makeVOMSGridUser(String dn, FQAN[] fqans) - throws IllegalArgumentException { - - if (fqans == null || fqans.length == 0) { - throw new IllegalArgumentException( - "Unable to make VomsGridUser. Invalid fqans argument: " + fqans); - } - return userFactory.createGridUser(dn, fqans); - } - - public static GridUserInterface makeGridUser(String dn) { - - return userFactory.createGridUser(dn); - } - - public static GridUserInterface makeGridUser(String dn, String proxy) { - - return userFactory.createGridUser(dn, proxy); - } - - public static GridUserInterface makeSAGridUser() { - GridUserInterface result = null; - String dn = "/DC=it/DC=infngrid/OU=Services/CN=storm"; - result = userFactory.createGridUser(dn); - return result; - } - - public static GridUserInterface decode(Map inputParam) { - - return userFactory.decode(inputParam); - } -} \ No newline at end of file + static final Logger log = LoggerFactory.getLogger(GridUserManager.class); + static Configuration config = Configuration.getInstance(); + static GridUserFactory userFactory = null; + + static { + log.debug("Initializing Grid User Director..."); + userFactory = initializeFactory(); + } + + private GridUserManager() { + + super(); + } + + private static GridUserFactory initializeFactory() { + + return GridUserFactory.getInstance(); + } + + public static String getMapperClassName() { + + return config.getGridUserMapperClassname(); + } + + public static GridUserInterface makeVOMSGridUser(String dn, String proxy, FQAN[] fqans) + throws IllegalArgumentException { + + if (proxy == null || dn == null || fqans == null || fqans.length == 0) { + throw new IllegalArgumentException( + "Unable to make VomsGridUser. Inavlid arguments: dn=\'" + + dn + + "\' fqans=\'" + + fqans + + "\' proxy=\'" + + proxy + + "\'"); + } + GridUserInterface gridUser = null; + try { + gridUser = userFactory.createGridUser(dn, fqans, proxy); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + } + return gridUser; + } + + public static GridUserInterface makeVOMSGridUser(String dn, String vo) + throws IllegalArgumentException { + + if (vo == null || dn == null) { + throw new IllegalArgumentException( + "Unable to make VomsGridUser. Inavlid arguments: dn=\'" + dn + "\' vo=\'" + vo + "\'"); + } + GridUserInterface gridUser = null; + FQAN[] fqans = new FQAN[1]; + fqans[0] = FQAN.makeVoFQAN(vo); + try { + gridUser = userFactory.createGridUser(dn, fqans); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + } + return gridUser; + } + + public static GridUserInterface makeVOMSGridUser(String dn, String[] fqansString) + throws IllegalArgumentException { + + if (fqansString == null || fqansString.length == 0) { + throw new IllegalArgumentException( + "Unable to make VomsGridUser. Invalid fqansString argument: " + fqansString); + } + FQAN[] fqans = new FQAN[fqansString.length]; + for (int i = 0; i < fqansString.length; i++) { + fqans[i] = new FQAN(fqansString[i]); + } + return userFactory.createGridUser(dn, fqans); + } + + public static GridUserInterface makeVOMSGridUser(String dn, FQAN[] fqans) + throws IllegalArgumentException { + + if (fqans == null || fqans.length == 0) { + throw new IllegalArgumentException( + "Unable to make VomsGridUser. Invalid fqans argument: " + fqans); + } + return userFactory.createGridUser(dn, fqans); + } + + public static GridUserInterface makeGridUser(String dn) { + + return userFactory.createGridUser(dn); + } + + public static GridUserInterface makeGridUser(String dn, String proxy) { + + return userFactory.createGridUser(dn, proxy); + } + + public static GridUserInterface makeSAGridUser() { + GridUserInterface result = null; + String dn = "/DC=it/DC=infngrid/OU=Services/CN=storm"; + result = userFactory.createGridUser(dn); + return result; + } + + public static GridUserInterface decode(Map inputParam) { + + return userFactory.decode(inputParam); + } +} diff --git a/src/main/java/it/grid/storm/griduser/InvalidFqanSyntax.java b/src/main/java/it/grid/storm/griduser/InvalidFqanSyntax.java index 1fcb9569..70cd51af 100644 --- a/src/main/java/it/grid/storm/griduser/InvalidFqanSyntax.java +++ b/src/main/java/it/grid/storm/griduser/InvalidFqanSyntax.java @@ -1,45 +1,39 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; /** - * Thrown when a invalid FQAN is detected by the - * {@link it.grid.storm.griduser.VomsGridUser#VomsGridUser(String,String[])} - * constructor. Holds and returns the offending FQAN string. + * Thrown when a invalid FQAN is detected by the {@link + * it.grid.storm.griduser.VomsGridUser#VomsGridUser(String,String[])} constructor. Holds and returns + * the offending FQAN string. */ public class InvalidFqanSyntax extends GridUserException { - /** The FQAN string that does not match the FQAN regexp */ - protected final String _offendingFqan; + /** The FQAN string that does not match the FQAN regexp */ + protected final String _offendingFqan; - /** - * Constructor, with the offending FQAN and a separate exception message. - */ - public InvalidFqanSyntax(String offendingFqan, String message) { + /** Constructor, with the offending FQAN and a separate exception message. */ + public InvalidFqanSyntax(String offendingFqan, String message) { - super(message); + super(message); - assert (null == offendingFqan) : "Null string passed to InvalidFqanSyntax constructor"; + assert (null == offendingFqan) : "Null string passed to InvalidFqanSyntax constructor"; - _offendingFqan = offendingFqan; - } + _offendingFqan = offendingFqan; + } - /** - * Constructor, specifying the offending FQAN only. A standard message is - * constructed. - */ - public InvalidFqanSyntax(String offendingFqan) { + /** Constructor, specifying the offending FQAN only. A standard message is constructed. */ + public InvalidFqanSyntax(String offendingFqan) { - // damn Java syntax, we cannot check offendingFqan before this... - super("Invalid FQAN: " + offendingFqan); + // damn Java syntax, we cannot check offendingFqan before this... + super("Invalid FQAN: " + offendingFqan); - _offendingFqan = offendingFqan; - } + _offendingFqan = offendingFqan; + } - public String getOffendingFqan() { + public String getOffendingFqan() { - return _offendingFqan; - } + return _offendingFqan; + } } diff --git a/src/main/java/it/grid/storm/griduser/InvalidGridUserAttributesException.java b/src/main/java/it/grid/storm/griduser/InvalidGridUserAttributesException.java index 7f7be53d..d3c017d1 100644 --- a/src/main/java/it/grid/storm/griduser/InvalidGridUserAttributesException.java +++ b/src/main/java/it/grid/storm/griduser/InvalidGridUserAttributesException.java @@ -1,11 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; import java.io.Serializable; -public class InvalidGridUserAttributesException extends Exception implements - Serializable { -} +public class InvalidGridUserAttributesException extends Exception implements Serializable {} diff --git a/src/main/java/it/grid/storm/griduser/InvalidSubjectDnSyntax.java b/src/main/java/it/grid/storm/griduser/InvalidSubjectDnSyntax.java index 3ebedc66..ba9b56ef 100644 --- a/src/main/java/it/grid/storm/griduser/InvalidSubjectDnSyntax.java +++ b/src/main/java/it/grid/storm/griduser/InvalidSubjectDnSyntax.java @@ -1,45 +1,40 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; /** - * Thrown when a invalid subject DN is detected by the - * {@link it.grid.storm.griduser.VomsGridUser#VomsGridUser(String,String[])} - * constructor. Holds and returns the offending subject DN string. + * Thrown when a invalid subject DN is detected by the {@link + * it.grid.storm.griduser.VomsGridUser#VomsGridUser(String,String[])} constructor. Holds and returns + * the offending subject DN string. */ public class InvalidSubjectDnSyntax extends GridUserException { - /** The FQAN string that does not match the FQAN regexp */ - protected final String _offendingSubjectDn; + /** The FQAN string that does not match the FQAN regexp */ + protected final String _offendingSubjectDn; - /** - * Constructor, with the offending FQAN and a separate exception message. - */ - public InvalidSubjectDnSyntax(String offendingSubjectDn, String message) { + /** Constructor, with the offending FQAN and a separate exception message. */ + public InvalidSubjectDnSyntax(String offendingSubjectDn, String message) { - super(message); + super(message); - assert (null == offendingSubjectDn) : "Null string passed to InvalidSubjectDnSyntax constructor"; + assert (null == offendingSubjectDn) + : "Null string passed to InvalidSubjectDnSyntax constructor"; - _offendingSubjectDn = offendingSubjectDn; - } + _offendingSubjectDn = offendingSubjectDn; + } - /** - * Constructor, specifying the offending FQAN only. A standard message is - * constructed. - */ - public InvalidSubjectDnSyntax(String offendingSubjectDn) { + /** Constructor, specifying the offending FQAN only. A standard message is constructed. */ + public InvalidSubjectDnSyntax(String offendingSubjectDn) { - // damn Java syntax, we cannot check offendingSubjectDn before this... - super("Invalid FQAN: " + offendingSubjectDn); + // damn Java syntax, we cannot check offendingSubjectDn before this... + super("Invalid FQAN: " + offendingSubjectDn); - _offendingSubjectDn = offendingSubjectDn; - } + _offendingSubjectDn = offendingSubjectDn; + } - public String getOffendingSubjectDn() { + public String getOffendingSubjectDn() { - return _offendingSubjectDn; - } + return _offendingSubjectDn; + } } diff --git a/src/main/java/it/grid/storm/griduser/LcmapsJNAMapper.java b/src/main/java/it/grid/storm/griduser/LcmapsJNAMapper.java index ec70ab35..06cd5304 100644 --- a/src/main/java/it/grid/storm/griduser/LcmapsJNAMapper.java +++ b/src/main/java/it/grid/storm/griduser/LcmapsJNAMapper.java @@ -1,143 +1,137 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -import it.grid.storm.griduser.CannotMapUserException; -import it.grid.storm.griduser.LocalUser; +import com.sun.jna.LastErrorException; import it.grid.storm.jna.lcmaps.LcmapsAccountInterface; import it.grid.storm.jna.lcmaps.LcmapsInterface; import it.grid.storm.jna.lcmaps.LcmapsPoolindexInterface; import it.grid.storm.jna.lcmaps.lcmaps_account_info_t; - import org.apache.commons.lang.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.sun.jna.LastErrorException; -/** - * @author dibenedetto_m - * - */ +/** @author dibenedetto_m */ public class LcmapsJNAMapper implements MapperInterface { - private static final Object lock = new Object(); - - private static final Logger log = LoggerFactory - .getLogger(LcmapsJNAMapper.class); - - private lcmaps_account_info_t account = new lcmaps_account_info_t(); - - private final String LCMAPS_DEFAULT_LOG_FILE = "/var/log/lcmaps.log"; - - private final String LCMAPS_LOG_FILE_PATH_ENV_VARIABLE = "LCMAPS_LOG_FILE"; - - private final short LCMAPS_LOG_TYPE = 3; - - /** - * @return - */ - private String getLcmapsLogFile() { - - String lcmaps_log_file = System.getenv(LCMAPS_LOG_FILE_PATH_ENV_VARIABLE); - if (lcmaps_log_file == null) { - lcmaps_log_file = LCMAPS_DEFAULT_LOG_FILE; - } - return lcmaps_log_file.trim(); - } - - public LocalUser map(String dn, String[] fqans) throws CannotMapUserException { - - LocalUser mappedUser = null; - synchronized (LcmapsJNAMapper.lock) { - log.debug("Mapping user with dn = {} and fqans='{}'", - dn, ArrayUtils.toString(fqans)); - - log.debug("Initializing Lcmaps"); - String lcmapsLogFile = getLcmapsLogFile(); - log.debug("Lcmaps log file is {}", lcmapsLogFile); - - int retVal = LcmapsInterface.INSTANCE.lcmaps_init_and_logfile( - lcmapsLogFile, null, LCMAPS_LOG_TYPE); - if (retVal != 0) { - log.error("Unable to initialize lcmaps. Return value is {}" , retVal); - throw new CannotMapUserException( - "Unable to initialize lcmaps. Return value is " + retVal); - } - retVal = LcmapsAccountInterface.INSTANCE - .lcmaps_account_info_init(account); - if (retVal != 0) { - throw new CannotMapUserException( - "Unable to initialize lcmaps. Return value is " + retVal); - } - int numFqans = (fqans == null ? 0 : fqans.length); - try { - retVal = LcmapsPoolindexInterface.INSTANCE - .lcmaps_return_account_without_gsi(dn, fqans, numFqans, 0, account); - } catch (LastErrorException e) { - log.error("Unable to map user dn <{}> fqans <{}>. Error: {}. Error code: {}", - dn, ArrayUtils.toString(fqans), - e.getMessage(), - e.getErrorCode(), - e); - throw new CannotMapUserException( - "Unable to initialize lcmaps. Return value is " + retVal); - } - if (retVal != 0) { - log.error("Unable to map user dn <{}> fqans <{}>. Retval: {}", - dn, ArrayUtils.toString(fqans), - retVal); - throw new CannotMapUserException("Unable to map user dn <" + dn - + "> fqans <" + ArrayUtils.toString(fqans) + "> . Return value is " - + retVal); - } - - if (account.uid < 0) { - log.error("Negative uid returned by lcmaps: {}", account.uid); - throw new CannotMapUserException( - "Unacceptable lower than zero uid returned by Lcmaps : " - + account.uid + " . Mapping error"); - } - if (account.npgid < 0 || account.nsgid < 0) { - log.error("Negative primary or secondary gid array size. npgid: {} nsgid: {}", - account.npgid, account.nsgid); - - throw new CannotMapUserException( - "Negative primary or secondary gid array size returned by Lcmaps : primary = " - + account.npgid - + ", secondary = " - + account.nsgid +". Mapping error"); - } - int[] gids = null; - int numGids = account.npgid + account.nsgid; - if (numGids > account.npgid) { - gids = new int[numGids]; - int index = 0; - if (account.npgid > 0) { - for (int gid : account.pgid_list.getPointer().getIntArray(0, - account.npgid)) { - gids[index] = gid; - index++; - } - } else { - log.warn("No primary gid returned by Lcmaps! Mapping error"); - } - for (int gid : account.sgid_list.getPointer().getIntArray(0, - account.nsgid)) { - gids[index] = gid; - index++; - } - } else { - if (account.npgid > 0) { - gids = account.pgid_list.getPointer().getIntArray(0, account.npgid); - } - } - log.info("Mapped user to : ", - account.uid, - ArrayUtils.toString(gids)); - mappedUser = new LocalUser(account.uid, gids, numGids); - } - return mappedUser; - } -} \ No newline at end of file + private static final Object lock = new Object(); + + private static final Logger log = LoggerFactory.getLogger(LcmapsJNAMapper.class); + + private lcmaps_account_info_t account = new lcmaps_account_info_t(); + + private final String LCMAPS_DEFAULT_LOG_FILE = "/var/log/lcmaps.log"; + + private final String LCMAPS_LOG_FILE_PATH_ENV_VARIABLE = "LCMAPS_LOG_FILE"; + + private final short LCMAPS_LOG_TYPE = 3; + + /** @return */ + private String getLcmapsLogFile() { + + String lcmaps_log_file = System.getenv(LCMAPS_LOG_FILE_PATH_ENV_VARIABLE); + if (lcmaps_log_file == null) { + lcmaps_log_file = LCMAPS_DEFAULT_LOG_FILE; + } + return lcmaps_log_file.trim(); + } + + public LocalUser map(String dn, String[] fqans) throws CannotMapUserException { + + LocalUser mappedUser = null; + synchronized (LcmapsJNAMapper.lock) { + log.debug("Mapping user with dn = {} and fqans='{}'", dn, ArrayUtils.toString(fqans)); + + log.debug("Initializing Lcmaps"); + String lcmapsLogFile = getLcmapsLogFile(); + log.debug("Lcmaps log file is {}", lcmapsLogFile); + + int retVal = + LcmapsInterface.INSTANCE.lcmaps_init_and_logfile(lcmapsLogFile, null, LCMAPS_LOG_TYPE); + if (retVal != 0) { + log.error("Unable to initialize lcmaps. Return value is {}", retVal); + throw new CannotMapUserException("Unable to initialize lcmaps. Return value is " + retVal); + } + retVal = LcmapsAccountInterface.INSTANCE.lcmaps_account_info_init(account); + if (retVal != 0) { + throw new CannotMapUserException("Unable to initialize lcmaps. Return value is " + retVal); + } + int numFqans = (fqans == null ? 0 : fqans.length); + try { + retVal = + LcmapsPoolindexInterface.INSTANCE.lcmaps_return_account_without_gsi( + dn, fqans, numFqans, 0, account); + } catch (LastErrorException e) { + log.error( + "Unable to map user dn <{}> fqans <{}>. Error: {}. Error code: {}", + dn, + ArrayUtils.toString(fqans), + e.getMessage(), + e.getErrorCode(), + e); + throw new CannotMapUserException("Unable to initialize lcmaps. Return value is " + retVal); + } + if (retVal != 0) { + log.error( + "Unable to map user dn <{}> fqans <{}>. Retval: {}", + dn, + ArrayUtils.toString(fqans), + retVal); + throw new CannotMapUserException( + "Unable to map user dn <" + + dn + + "> fqans <" + + ArrayUtils.toString(fqans) + + "> . Return value is " + + retVal); + } + + if (account.uid < 0) { + log.error("Negative uid returned by lcmaps: {}", account.uid); + throw new CannotMapUserException( + "Unacceptable lower than zero uid returned by Lcmaps : " + + account.uid + + " . Mapping error"); + } + if (account.npgid < 0 || account.nsgid < 0) { + log.error( + "Negative primary or secondary gid array size. npgid: {} nsgid: {}", + account.npgid, + account.nsgid); + + throw new CannotMapUserException( + "Negative primary or secondary gid array size returned by Lcmaps : primary = " + + account.npgid + + ", secondary = " + + account.nsgid + + ". Mapping error"); + } + int[] gids = null; + int numGids = account.npgid + account.nsgid; + if (numGids > account.npgid) { + gids = new int[numGids]; + int index = 0; + if (account.npgid > 0) { + for (int gid : account.pgid_list.getPointer().getIntArray(0, account.npgid)) { + gids[index] = gid; + index++; + } + } else { + log.warn("No primary gid returned by Lcmaps! Mapping error"); + } + for (int gid : account.sgid_list.getPointer().getIntArray(0, account.nsgid)) { + gids[index] = gid; + index++; + } + } else { + if (account.npgid > 0) { + gids = account.pgid_list.getPointer().getIntArray(0, account.npgid); + } + } + log.info("Mapped user to : ", account.uid, ArrayUtils.toString(gids)); + mappedUser = new LocalUser(account.uid, gids, numGids); + } + return mappedUser; + } +} diff --git a/src/main/java/it/grid/storm/griduser/LocalUser.java b/src/main/java/it/grid/storm/griduser/LocalUser.java index 6dd4ce0a..05cacf67 100644 --- a/src/main/java/it/grid/storm/griduser/LocalUser.java +++ b/src/main/java/it/grid/storm/griduser/LocalUser.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; @@ -9,84 +8,81 @@ public class LocalUser { - private static final Logger log = LoggerFactory.getLogger(LocalUser.class); + private static final Logger log = LoggerFactory.getLogger(LocalUser.class); - private int uid; - private int[] gids; + private int uid; + private int[] gids; - public LocalUser(int uid, int[] gids, long ngids) { + public LocalUser(int uid, int[] gids, long ngids) { - this.uid = uid; + this.uid = uid; - this.gids = new int[(int) ngids]; + this.gids = new int[(int) ngids]; - for (int i = 0; i < ngids; i++) - this.gids[i] = gids[i]; - } + for (int i = 0; i < ngids; i++) this.gids[i] = gids[i]; + } - public LocalUser(int uid, int gid, int[] supplementaryGids) { + public LocalUser(int uid, int gid, int[] supplementaryGids) { - this.uid = uid; + this.uid = uid; - this.gids = new int[1 + supplementaryGids.length]; - this.gids[0] = gid; + this.gids = new int[1 + supplementaryGids.length]; + this.gids[0] = gid; - for (int i = 1; i <= supplementaryGids.length; i++) - this.gids[i] = supplementaryGids[i - 1]; - } + for (int i = 1; i <= supplementaryGids.length; i++) this.gids[i] = supplementaryGids[i - 1]; + } - public LocalUser(int uid, int gid) { + public LocalUser(int uid, int gid) { - this(uid, gid, new int[0]); - } + this(uid, gid, new int[0]); + } - public LocalUser(String uidgids) { + public LocalUser(String uidgids) { - this.uid = 501; - this.gids = new int[1]; - this.gids[0] = 501; - if (uidgids != null) { - String[] aux = uidgids.split(","); - // try parsing the chunks provided there are at least two! - if (aux.length >= 2) { - try { - int auxuid = Integer.parseInt(aux[0]); - int[] auxgid = new int[aux.length - 1]; - for (int i = 0; i < aux.length - 1; i++) - auxgid[i] = Integer.parseInt(aux[i + 1]); - this.uid = auxuid; - this.gids = auxgid; - } catch (NumberFormatException e) { - log.error("LocalUser: Error while setting uid/gid. NFE:" + e); - } - } - } - } + this.uid = 501; + this.gids = new int[1]; + this.gids[0] = 501; + if (uidgids != null) { + String[] aux = uidgids.split(","); + // try parsing the chunks provided there are at least two! + if (aux.length >= 2) { + try { + int auxuid = Integer.parseInt(aux[0]); + int[] auxgid = new int[aux.length - 1]; + for (int i = 0; i < aux.length - 1; i++) auxgid[i] = Integer.parseInt(aux[i + 1]); + this.uid = auxuid; + this.gids = auxgid; + } catch (NumberFormatException e) { + log.error("LocalUser: Error while setting uid/gid. NFE:" + e); + } + } + } + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(uid); - for (int i = 0; i < gids.length; i++) { - sb.append(","); - sb.append(gids[i]); - } - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append(uid); + for (int i = 0; i < gids.length; i++) { + sb.append(","); + sb.append(gids[i]); + } + return sb.toString(); + } - public String getLocalUserName() { - return Integer.toString(getUid()); - } + public String getLocalUserName() { + return Integer.toString(getUid()); + } - public int getUid() { - return uid; - } + public int getUid() { + return uid; + } - public int[] getGids() { - return gids; - } + public int[] getGids() { + return gids; + } - public int getPrimaryGid() { - return gids[0]; - } + public int getPrimaryGid() { + return gids[0]; + } } diff --git a/src/main/java/it/grid/storm/griduser/MapperInterface.java b/src/main/java/it/grid/storm/griduser/MapperInterface.java index 8d453136..bc6bf510 100644 --- a/src/main/java/it/grid/storm/griduser/MapperInterface.java +++ b/src/main/java/it/grid/storm/griduser/MapperInterface.java @@ -1,14 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -import it.grid.storm.griduser.CannotMapUserException; -import it.grid.storm.griduser.LocalUser; - interface MapperInterface { - public LocalUser map(final String dn, final String[] fqans) - throws CannotMapUserException; + public LocalUser map(final String dn, final String[] fqans) throws CannotMapUserException; } diff --git a/src/main/java/it/grid/storm/griduser/SimpleUserMapper.java b/src/main/java/it/grid/storm/griduser/SimpleUserMapper.java index 47c5702b..9b785864 100644 --- a/src/main/java/it/grid/storm/griduser/SimpleUserMapper.java +++ b/src/main/java/it/grid/storm/griduser/SimpleUserMapper.java @@ -1,99 +1,91 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; - import org.slf4j.Logger; public class SimpleUserMapper implements MapperInterface { - private static final Logger log = GridUserManager.log; - - public SimpleUserMapper() { - - } - - public LocalUser map(String dn, String[] fqans) throws CannotMapUserException { - - LocalUser localUser = null; - int uid = 0; - int gid = 0; - try { - String retrieveUserCmd = "id -r -u"; - String userIdStr = getOutput(retrieveUserCmd); - // log.debug("Output = "+userIdStr); - uid = Integer.parseInt(userIdStr); - } catch (CannotMapUserException e) { - log.error(e.getMessage(), e); - throw e; - } catch (NumberFormatException nfe) { - log.error("Getting UID returns a result different from a integer"); - throw new CannotMapUserException(nfe); - } - - try { - String retrieveUserCmd = "id -r -g"; - String groupIdStr = getOutput(retrieveUserCmd); - gid = Integer.parseInt(groupIdStr); - } catch (CannotMapUserException ex) { - log.error("Unable to retrieve Group ID from the system." , ex); - throw ex; - } catch (NumberFormatException nfe) { - log.error("Getting GID returns a result different from a integer"); - throw new CannotMapUserException(nfe); - } - - localUser = new LocalUser(uid, gid); - return localUser; - } - - private String getOutput(String command) throws CannotMapUserException { - - String result = null; - try { - Process child = Runtime.getRuntime().exec(command); - BufferedReader stdInput = new BufferedReader(new InputStreamReader( - child.getInputStream())); - BufferedReader stdError = new BufferedReader(new InputStreamReader( - child.getErrorStream())); - - String line; - int row = 0; - while ((line = stdInput.readLine()) != null) { - boolean lineOk = processOutput(row, line); - if (lineOk) { - result = line; - break; - } - row++; - } - - // process the Errors - String errLine; - while ((errLine = stdError.readLine()) != null) { - log.warn("User Info Command Output contains an ERROR message {}", - errLine); - throw new CannotMapUserException(errLine); - } - - } catch (IOException ex) { - log.error("getUserInfo (id) I/O Exception: {}", ex); - throw new CannotMapUserException(ex); - } - return result; - } - - private boolean processOutput(int row, String line) { - boolean result = false; - if (row >= 0) { - result = true; - } - return result; - } - + private static final Logger log = GridUserManager.log; + + public SimpleUserMapper() {} + + public LocalUser map(String dn, String[] fqans) throws CannotMapUserException { + + LocalUser localUser = null; + int uid = 0; + int gid = 0; + try { + String retrieveUserCmd = "id -r -u"; + String userIdStr = getOutput(retrieveUserCmd); + // log.debug("Output = "+userIdStr); + uid = Integer.parseInt(userIdStr); + } catch (CannotMapUserException e) { + log.error(e.getMessage(), e); + throw e; + } catch (NumberFormatException nfe) { + log.error("Getting UID returns a result different from a integer"); + throw new CannotMapUserException(nfe); + } + + try { + String retrieveUserCmd = "id -r -g"; + String groupIdStr = getOutput(retrieveUserCmd); + gid = Integer.parseInt(groupIdStr); + } catch (CannotMapUserException ex) { + log.error("Unable to retrieve Group ID from the system.", ex); + throw ex; + } catch (NumberFormatException nfe) { + log.error("Getting GID returns a result different from a integer"); + throw new CannotMapUserException(nfe); + } + + localUser = new LocalUser(uid, gid); + return localUser; + } + + private String getOutput(String command) throws CannotMapUserException { + + String result = null; + try { + Process child = Runtime.getRuntime().exec(command); + BufferedReader stdInput = new BufferedReader(new InputStreamReader(child.getInputStream())); + BufferedReader stdError = new BufferedReader(new InputStreamReader(child.getErrorStream())); + + String line; + int row = 0; + while ((line = stdInput.readLine()) != null) { + boolean lineOk = processOutput(row, line); + if (lineOk) { + result = line; + break; + } + row++; + } + + // process the Errors + String errLine; + while ((errLine = stdError.readLine()) != null) { + log.warn("User Info Command Output contains an ERROR message {}", errLine); + throw new CannotMapUserException(errLine); + } + + } catch (IOException ex) { + log.error("getUserInfo (id) I/O Exception: {}", ex); + throw new CannotMapUserException(ex); + } + return result; + } + + private boolean processOutput(int row, String line) { + boolean result = false; + if (row >= 0) { + result = true; + } + return result; + } } diff --git a/src/main/java/it/grid/storm/griduser/StormLcmapsJNAMapper.java b/src/main/java/it/grid/storm/griduser/StormLcmapsJNAMapper.java index db7e565b..a630a200 100644 --- a/src/main/java/it/grid/storm/griduser/StormLcmapsJNAMapper.java +++ b/src/main/java/it/grid/storm/griduser/StormLcmapsJNAMapper.java @@ -1,78 +1,64 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -import java.nio.IntBuffer; -import it.grid.storm.griduser.CannotMapUserException; -import it.grid.storm.griduser.LocalUser; import it.grid.storm.jna.lcmaps.StormLcmapsLibrary; import it.grid.storm.jna.lcmaps.StormLcmapsLibrary.Errors; - +import java.nio.IntBuffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class StormLcmapsJNAMapper implements MapperInterface { - private static final Logger log = LoggerFactory - .getLogger(StormLcmapsJNAMapper.class); - - private final Object lock = new Object(); + private static final Logger log = LoggerFactory.getLogger(StormLcmapsJNAMapper.class); - private final String LCMAPS_DEFAULT_LOG_FILE = "/var/log/lcmaps.log"; + private final Object lock = new Object(); - private final String LCMAPS_LOG_FILE_PATH_ENV_VARIABLE = "LCMAPS_LOG_FILE"; + private final String LCMAPS_DEFAULT_LOG_FILE = "/var/log/lcmaps.log"; - private static final StormLcmapsJNAMapper instance = new StormLcmapsJNAMapper(); + private final String LCMAPS_LOG_FILE_PATH_ENV_VARIABLE = "LCMAPS_LOG_FILE"; - private StormLcmapsJNAMapper() { + private static final StormLcmapsJNAMapper instance = new StormLcmapsJNAMapper(); - } + private StormLcmapsJNAMapper() {} - public static StormLcmapsJNAMapper getInstance() { + public static StormLcmapsJNAMapper getInstance() { - return instance; - } + return instance; + } - private String getLcmapsLogFile() { + private String getLcmapsLogFile() { - String lcmapsLogFile = System.getenv(LCMAPS_LOG_FILE_PATH_ENV_VARIABLE); - if (lcmapsLogFile == null) { - lcmapsLogFile = LCMAPS_DEFAULT_LOG_FILE; - } - return lcmapsLogFile.trim(); - } + String lcmapsLogFile = System.getenv(LCMAPS_LOG_FILE_PATH_ENV_VARIABLE); + if (lcmapsLogFile == null) { + lcmapsLogFile = LCMAPS_DEFAULT_LOG_FILE; + } + return lcmapsLogFile.trim(); + } - public LocalUser map(final String dn, final String[] fqans) - throws CannotMapUserException { + public LocalUser map(final String dn, final String[] fqans) throws CannotMapUserException { - IntBuffer userId = IntBuffer.allocate(1), groupId = IntBuffer.allocate(1); - int retVal; - synchronized (lock) { - retVal = StormLcmapsLibrary.INSTANCE.map_user(getLcmapsLogFile(), dn, - fqans, 1, userId, groupId); - } - if (retVal != 0) { - Errors error = StormLcmapsLibrary.Errors.getError(retVal); - if (!error.equals(Errors.UNKNOW_ERROR)) { - log - .error("Unable to call successfully native map_user() method. " - + "Return value is {}", error); - } else { - log - .error("Unable to call successfully native map_user() method. " - + "Unknown return value: {}", retVal); - } - throw new CannotMapUserException( - "LCMAPS error, cannot map user credentials to local user."); - } - LocalUser localUser = new LocalUser(userId.get(), - new int[] { groupId.get() }, 1); - return localUser; - } -} \ No newline at end of file + IntBuffer userId = IntBuffer.allocate(1), groupId = IntBuffer.allocate(1); + int retVal; + synchronized (lock) { + retVal = + StormLcmapsLibrary.INSTANCE.map_user(getLcmapsLogFile(), dn, fqans, 1, userId, groupId); + } + if (retVal != 0) { + Errors error = StormLcmapsLibrary.Errors.getError(retVal); + if (!error.equals(Errors.UNKNOW_ERROR)) { + log.error( + "Unable to call successfully native map_user() method. " + "Return value is {}", error); + } else { + log.error( + "Unable to call successfully native map_user() method. " + "Unknown return value: {}", + retVal); + } + throw new CannotMapUserException("LCMAPS error, cannot map user credentials to local user."); + } + LocalUser localUser = new LocalUser(userId.get(), new int[] {groupId.get()}, 1); + return localUser; + } +} diff --git a/src/main/java/it/grid/storm/griduser/SubjectAttribute.java b/src/main/java/it/grid/storm/griduser/SubjectAttribute.java index 27d57bda..341bd544 100644 --- a/src/main/java/it/grid/storm/griduser/SubjectAttribute.java +++ b/src/main/java/it/grid/storm/griduser/SubjectAttribute.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.griduser; /* @@ -12,17 +9,12 @@ */ public interface SubjectAttribute { - /** - * - * @param obj - * Object - * @return boolean - */ - public abstract boolean equals(Object obj); - - /** - * Return the usual string representation of the FQAN. - */ - public abstract String toString(); + /** + * @param obj Object + * @return boolean + */ + public abstract boolean equals(Object obj); -} \ No newline at end of file + /** Return the usual string representation of the FQAN. */ + public abstract String toString(); +} diff --git a/src/main/java/it/grid/storm/griduser/VONameMatchingRule.java b/src/main/java/it/grid/storm/griduser/VONameMatchingRule.java index b5bf1e9f..fa4f3ac8 100644 --- a/src/main/java/it/grid/storm/griduser/VONameMatchingRule.java +++ b/src/main/java/it/grid/storm/griduser/VONameMatchingRule.java @@ -1,70 +1,68 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; -import java.util.regex.Pattern; import java.util.regex.Matcher; +import java.util.regex.Pattern; public class VONameMatchingRule { - private static final String ADMIT_ALL = ".*"; - - private final String voNameString; - private Pattern voNamePattern = null; - - public VONameMatchingRule(String regularExpressionRule) { - - if ((regularExpressionRule == null) || (regularExpressionRule.equals("*"))) { - voNameString = ADMIT_ALL; - } else { - voNameString = regularExpressionRule; - } - initPattern(); - } - - public static VONameMatchingRule buildMatchAllVONameMatchingRule() { - - return new VONameMatchingRule(ADMIT_ALL); - } - - private void initPattern() { - - // VOName - if (isMatchAll(voNameString)) { - voNamePattern = Pattern.compile(ADMIT_ALL); - } else { - voNamePattern = Pattern.compile(voNameString); - } - } - - private static boolean isMatchAll(String pattern) { - return pattern == null || pattern.trim().equals("*") - || pattern.trim().equals(".*"); - } - - public boolean match(String voName) { - - boolean result = false; - CharSequence voNameSequence = voName.subSequence(0, voName.length()); - Matcher voNameMatcher = voNamePattern.matcher(voNameSequence); - result = voNameMatcher.find(); - return result; - } - - public String toString() { - - StringBuilder result = new StringBuilder(); - result.append(" VONAME=" + voNameString); - return result.toString(); - } - - public boolean isMatchAll() { - return isMatchAll(voNameString); - } - - public String getVOName() { - return voNameString; - } -} \ No newline at end of file + private static final String ADMIT_ALL = ".*"; + + private final String voNameString; + private Pattern voNamePattern = null; + + public VONameMatchingRule(String regularExpressionRule) { + + if ((regularExpressionRule == null) || (regularExpressionRule.equals("*"))) { + voNameString = ADMIT_ALL; + } else { + voNameString = regularExpressionRule; + } + initPattern(); + } + + public static VONameMatchingRule buildMatchAllVONameMatchingRule() { + + return new VONameMatchingRule(ADMIT_ALL); + } + + private void initPattern() { + + // VOName + if (isMatchAll(voNameString)) { + voNamePattern = Pattern.compile(ADMIT_ALL); + } else { + voNamePattern = Pattern.compile(voNameString); + } + } + + private static boolean isMatchAll(String pattern) { + return pattern == null || pattern.trim().equals("*") || pattern.trim().equals(".*"); + } + + public boolean match(String voName) { + + boolean result = false; + CharSequence voNameSequence = voName.subSequence(0, voName.length()); + Matcher voNameMatcher = voNamePattern.matcher(voNameSequence); + result = voNameMatcher.find(); + return result; + } + + public String toString() { + + StringBuilder result = new StringBuilder(); + result.append(" VONAME=" + voNameString); + return result.toString(); + } + + public boolean isMatchAll() { + return isMatchAll(voNameString); + } + + public String getVOName() { + return voNameString; + } +} diff --git a/src/main/java/it/grid/storm/griduser/VomsGridUser.java b/src/main/java/it/grid/storm/griduser/VomsGridUser.java index 6d48937e..195a17cc 100644 --- a/src/main/java/it/grid/storm/griduser/VomsGridUser.java +++ b/src/main/java/it/grid/storm/griduser/VomsGridUser.java @@ -1,180 +1,170 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.griduser; +import it.grid.storm.common.types.VO; import java.io.Serializable; -import java.util.List; import java.util.ArrayList; -import it.grid.storm.common.types.VO; +import java.util.List; /** - * Encapsulates user Grid credentials access, and maps those to a local user - * account. Has methods to extract the permanent identifier (subject DN), VO and - * VOMS group/role membership from the X.509 certificate / GSI proxy that the - * user presented to StoRM. Will also invoke LCMAPS library to map the Grid - * credentials to a local user account. - * - * @todo implement a flyweight pattern, so that we don't have 1'000 different - * GridUser objects for 1'000 requests from the same user... - * - * + * Encapsulates user Grid credentials access, and maps those to a local user account. Has methods to + * extract the permanent identifier (subject DN), VO and VOMS group/role membership from the X.509 + * certificate / GSI proxy that the user presented to StoRM. Will also invoke LCMAPS library to map + * the Grid credentials to a local user account. + * + * @todo implement a flyweight pattern, so that we don't have 1'000 different GridUser objects for + * 1'000 requests from the same user... */ class VomsGridUser extends GridUser implements Serializable { - private static final long serialVersionUID = -117007717079470189L; - private List fqans = new ArrayList(); - private List fqansString = new ArrayList(); - - // --- public accessor methods --- // - - VomsGridUser(MapperInterface mapper, String distinguishedName, String proxy, - FQAN[] fqansArray) throws IllegalArgumentException { - - super(mapper, distinguishedName, proxy); - if (fqansArray == null || fqansArray.length == 0) { - throw new IllegalArgumentException( - "Unable to create VomsGridUser. Inavlid fqansArray argument: " - + fqansArray); - } - this.setFqans(fqansArray); - } - - VomsGridUser(MapperInterface mapper, String distinguishedName, - FQAN[] fqansArray) throws IllegalArgumentException { - - super(mapper, distinguishedName); - if (fqansArray == null || fqansArray.length == 0) { - throw new IllegalArgumentException( - "Unable to create VomsGridUser. Inavlid fqansArray argument: " - + fqansArray); - } - this.setFqans(fqansArray); - } - - private void setFqans(FQAN[] fqans) { - - this.fqans.clear(); - this.fqansString.clear(); - for (FQAN fqan : fqans) { - this.fqans.add(fqan); - this.fqansString.add(fqan.toString()); - } - } - - public void addFqan(FQAN fqan) { - - this.fqans.add(fqan); - this.fqansString.add(fqan.toString()); - - } - - - /** - * Return true if any VOMS attributes are stored in this object. - * - *

- * If the explicit constructor {@link VomsGridUser(String, Fqan[], String)} - * was used, then this flag will be true if the Fqan[] parameter - * was not null in the constructor invocation. - * - * @return true if any VOMS attributes are stored in this object. - */ - public boolean hasVoms() { - - return true; - } - - @Override - public FQAN[] getFQANs() { - - FQAN[] FQANs = null; - if (fqans != null) { - FQANs = fqans.toArray(new FQAN[fqans.size()]); - } - return FQANs; - } - - @Override - public String[] getFQANsAsString() { - - String[] FQANs = null; - if (fqansString != null) { - FQANs = fqansString.toArray(new String[fqansString.size()]); - } - return FQANs; - } - - public VO getVO() { - - VO result = VO.makeNoVo(); - if ((fqans != null) && (fqans.size() > 0)) { - FQAN firstFQAN = fqans.get(0); - String voName = firstFQAN.getVo(); - result = VO.make(voName); - } - return result; - } - - /** - * Print a string representation of this object, in the form - * GridUser:"subject DN". - */ - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("Grid User (VOMS) = "); - sb.append(" DN:'" + getDistinguishedName().getX500DN_rfc1779() + "'"); - sb.append(" FQANS:" + fqans); - return sb.toString(); - } - - public int hashCode() { - - int result = 17; - result += 31 * this.subjectDN.hashCode(); - for (FQAN fqan : fqans) { - result += 37 * fqan.hashCode(); - } - return result; - } - - /** - * Return true if other is a VomsGridUser with the same String representation, - * that is: - same DN, and - same FQANs - */ - @Override - public boolean equals(Object obj) { - - boolean result = false; - if (obj != null) { - if (obj instanceof VomsGridUser) { - VomsGridUser other = (VomsGridUser) obj; - if (!(other.hasVoms())) { - result = this.getDistinguishedName().equals( - other.getDistinguishedName()); - } else { - // Also the other is a VomsGridUser - if (this.getDistinguishedName().equals(other.getDistinguishedName())) { - // Equals if they have the same FQANs - FQAN[] otherFQANs = other.getFQANs(); - FQAN[] thisFQANs = this.getFQANs(); - if (otherFQANs.length == thisFQANs.length) { - result = true; - for (int i = 0; i < otherFQANs.length; i++) { - if (!(otherFQANs[i].equals(thisFQANs[i]))) { - result = false; - break; // Exit from the loop at first fail. - } - } - } - } else { - result = false; - } - } - } - } - return result; - } -} \ No newline at end of file + private static final long serialVersionUID = -117007717079470189L; + private List fqans = new ArrayList(); + private List fqansString = new ArrayList(); + + // --- public accessor methods --- // + + VomsGridUser(MapperInterface mapper, String distinguishedName, String proxy, FQAN[] fqansArray) + throws IllegalArgumentException { + + super(mapper, distinguishedName, proxy); + if (fqansArray == null || fqansArray.length == 0) { + throw new IllegalArgumentException( + "Unable to create VomsGridUser. Inavlid fqansArray argument: " + fqansArray); + } + this.setFqans(fqansArray); + } + + VomsGridUser(MapperInterface mapper, String distinguishedName, FQAN[] fqansArray) + throws IllegalArgumentException { + + super(mapper, distinguishedName); + if (fqansArray == null || fqansArray.length == 0) { + throw new IllegalArgumentException( + "Unable to create VomsGridUser. Inavlid fqansArray argument: " + fqansArray); + } + this.setFqans(fqansArray); + } + + private void setFqans(FQAN[] fqans) { + + this.fqans.clear(); + this.fqansString.clear(); + for (FQAN fqan : fqans) { + this.fqans.add(fqan); + this.fqansString.add(fqan.toString()); + } + } + + public void addFqan(FQAN fqan) { + + this.fqans.add(fqan); + this.fqansString.add(fqan.toString()); + } + + /** + * Return true if any VOMS attributes are stored in this object. + * + *

If the explicit constructor {@link VomsGridUser(String, Fqan[], String)} was used, then this + * flag will be true if the Fqan[] parameter was not null in the constructor + * invocation. + * + * @return true if any VOMS attributes are stored in this object. + */ + public boolean hasVoms() { + + return true; + } + + @Override + public FQAN[] getFQANs() { + + FQAN[] FQANs = null; + if (fqans != null) { + FQANs = fqans.toArray(new FQAN[fqans.size()]); + } + return FQANs; + } + + @Override + public String[] getFQANsAsString() { + + String[] FQANs = null; + if (fqansString != null) { + FQANs = fqansString.toArray(new String[fqansString.size()]); + } + return FQANs; + } + + public VO getVO() { + + VO result = VO.makeNoVo(); + if ((fqans != null) && (fqans.size() > 0)) { + FQAN firstFQAN = fqans.get(0); + String voName = firstFQAN.getVo(); + result = VO.make(voName); + } + return result; + } + + /** + * Print a string representation of this object, in the form GridUser:"subject + * DN". + */ + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("Grid User (VOMS) = "); + sb.append(" DN:'" + getDistinguishedName().getX500DN_rfc1779() + "'"); + sb.append(" FQANS:" + fqans); + return sb.toString(); + } + + public int hashCode() { + + int result = 17; + result += 31 * this.subjectDN.hashCode(); + for (FQAN fqan : fqans) { + result += 37 * fqan.hashCode(); + } + return result; + } + + /** + * Return true if other is a VomsGridUser with the same String representation, that is: - same DN, + * and - same FQANs + */ + @Override + public boolean equals(Object obj) { + + boolean result = false; + if (obj != null) { + if (obj instanceof VomsGridUser) { + VomsGridUser other = (VomsGridUser) obj; + if (!(other.hasVoms())) { + result = this.getDistinguishedName().equals(other.getDistinguishedName()); + } else { + // Also the other is a VomsGridUser + if (this.getDistinguishedName().equals(other.getDistinguishedName())) { + // Equals if they have the same FQANs + FQAN[] otherFQANs = other.getFQANs(); + FQAN[] thisFQANs = this.getFQANs(); + if (otherFQANs.length == thisFQANs.length) { + result = true; + for (int i = 0; i < otherFQANs.length; i++) { + if (!(otherFQANs[i].equals(thisFQANs[i]))) { + result = false; + break; // Exit from the loop at first fail. + } + } + } + } else { + result = false; + } + } + } + } + return result; + } +} diff --git a/src/main/java/it/grid/storm/griduser/swig/lcmaps_interface.java b/src/main/java/it/grid/storm/griduser/swig/lcmaps_interface.java index 4c9f6fb6..dd75e1e0 100644 --- a/src/main/java/it/grid/storm/griduser/swig/lcmaps_interface.java +++ b/src/main/java/it/grid/storm/griduser/swig/lcmaps_interface.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). Version * 1.3.24 - * + * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- @@ -14,5 +13,4 @@ package it.grid.storm.griduser.swig; -public class lcmaps_interface { -} +public class lcmaps_interface {} diff --git a/src/main/java/it/grid/storm/griduser/swig/lcmaps_interfaceJNI.java b/src/main/java/it/grid/storm/griduser/swig/lcmaps_interfaceJNI.java index 5db35e96..be3e5e7a 100644 --- a/src/main/java/it/grid/storm/griduser/swig/lcmaps_interfaceJNI.java +++ b/src/main/java/it/grid/storm/griduser/swig/lcmaps_interfaceJNI.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). Version * 1.3.24 - * + * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- @@ -16,40 +15,33 @@ class lcmaps_interfaceJNI { - static { - try { - System.loadLibrary("lcmaps_interface"); - } catch (UnsatisfiedLinkError e) { - final String libfile = "'" + System.mapLibraryName("lcmaps_interface") - + "'"; - final org.slf4j.Logger log = org.slf4j.LoggerFactory - .getLogger(lcmaps_interfaceJNI.class); - - log.error("Native code library {} failed to load: {}", - libfile, - e.getMessage()); - - final String ldpath = System.getProperty("java.library.path"); - if (null != ldpath) - log.error("Java VM searched for {} in java.library.path: {}", - libfile, ldpath); - else - log.error("Java VM library search path is null!"); - log - .error("Add the library location to the environment variable LD_LIBRARY_PATH or to the Java property java.library.path"); - throw new UnsatisfiedLinkError("Native code library " + libfile - + " failed to load: " + e.getMessage()); - } - } - - public final static native int get_localuser_info_uid(long jarg1); - - public final static native int[] get_localuser_info_gids(long jarg1); - - public final static native long get_localuser_info_ngids(long jarg1); - - public final static native long new_localuser_info(String jarg1, - String[] jarg2); - - public final static native void delete_localuser_info(long jarg1); + static { + try { + System.loadLibrary("lcmaps_interface"); + } catch (UnsatisfiedLinkError e) { + final String libfile = "'" + System.mapLibraryName("lcmaps_interface") + "'"; + final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(lcmaps_interfaceJNI.class); + + log.error("Native code library {} failed to load: {}", libfile, e.getMessage()); + + final String ldpath = System.getProperty("java.library.path"); + if (null != ldpath) + log.error("Java VM searched for {} in java.library.path: {}", libfile, ldpath); + else log.error("Java VM library search path is null!"); + log.error( + "Add the library location to the environment variable LD_LIBRARY_PATH or to the Java property java.library.path"); + throw new UnsatisfiedLinkError( + "Native code library " + libfile + " failed to load: " + e.getMessage()); + } + } + + public static final native int get_localuser_info_uid(long jarg1); + + public static final native int[] get_localuser_info_gids(long jarg1); + + public static final native long get_localuser_info_ngids(long jarg1); + + public static final native long new_localuser_info(String jarg1, String[] jarg2); + + public static final native void delete_localuser_info(long jarg1); } diff --git a/src/main/java/it/grid/storm/griduser/swig/localuser_info.java b/src/main/java/it/grid/storm/griduser/swig/localuser_info.java index deb44f02..64f08e90 100644 --- a/src/main/java/it/grid/storm/griduser/swig/localuser_info.java +++ b/src/main/java/it/grid/storm/griduser/swig/localuser_info.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). Version * 1.3.24 - * + * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- @@ -16,57 +15,56 @@ public class localuser_info { - private long swigCPtr; - protected boolean swigCMemOwn; + private long swigCPtr; + protected boolean swigCMemOwn; - protected localuser_info(long cPtr, boolean cMemoryOwn) { + protected localuser_info(long cPtr, boolean cMemoryOwn) { - swigCMemOwn = cMemoryOwn; - swigCPtr = cPtr; - } + swigCMemOwn = cMemoryOwn; + swigCPtr = cPtr; + } - protected static long getCPtr(localuser_info obj) { + protected static long getCPtr(localuser_info obj) { - return (obj == null) ? 0 : obj.swigCPtr; - } + return (obj == null) ? 0 : obj.swigCPtr; + } - protected localuser_info() { + protected localuser_info() { - this(0, false); - } + this(0, false); + } - protected void finalize() { + protected void finalize() { - delete(); - } + delete(); + } - public void delete() { + public void delete() { - if (swigCPtr != 0 && swigCMemOwn) { - swigCMemOwn = false; - lcmaps_interfaceJNI.delete_localuser_info(swigCPtr); - } - swigCPtr = 0; - } + if (swigCPtr != 0 && swigCMemOwn) { + swigCMemOwn = false; + lcmaps_interfaceJNI.delete_localuser_info(swigCPtr); + } + swigCPtr = 0; + } - public int getUid() { + public int getUid() { - return lcmaps_interfaceJNI.get_localuser_info_uid(swigCPtr); - } + return lcmaps_interfaceJNI.get_localuser_info_uid(swigCPtr); + } - public int[] getGids() { + public int[] getGids() { - return lcmaps_interfaceJNI.get_localuser_info_gids(swigCPtr); - } + return lcmaps_interfaceJNI.get_localuser_info_gids(swigCPtr); + } - public long getNgids() { + public long getNgids() { - return lcmaps_interfaceJNI.get_localuser_info_ngids(swigCPtr); - } + return lcmaps_interfaceJNI.get_localuser_info_ngids(swigCPtr); + } - public localuser_info(String user_dn, String[] fqan_list) { - - this(lcmaps_interfaceJNI.new_localuser_info(user_dn, fqan_list), true); - } + public localuser_info(String user_dn, String[] fqan_list) { + this(lcmaps_interfaceJNI.new_localuser_info(user_dn, fqan_list), true); + } } diff --git a/src/main/java/it/grid/storm/health/BookKeeper.java b/src/main/java/it/grid/storm/health/BookKeeper.java index f41bddb5..1bee61ed 100644 --- a/src/main/java/it/grid/storm/health/BookKeeper.java +++ b/src/main/java/it/grid/storm/health/BookKeeper.java @@ -1,40 +1,35 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; import java.util.ArrayList; - import org.slf4j.Logger; public abstract class BookKeeper { - protected Logger bookKeepingLog = HealthDirector.getBookKeepingLogger(); - protected Logger performanceLog = HealthDirector.getPerformanceLogger(); - - protected ArrayList logbook = new ArrayList(); + protected Logger bookKeepingLog = HealthDirector.getBookKeepingLogger(); + protected Logger performanceLog = HealthDirector.getPerformanceLogger(); - public abstract void addLogEvent(LogEvent logEvent); + protected ArrayList logbook = new ArrayList(); - public synchronized void cleanLogBook() { - logbook.clear(); - } + public abstract void addLogEvent(LogEvent logEvent); - protected void logDebug(String msg) { + public synchronized void cleanLogBook() { + logbook.clear(); + } - if ((HealthDirector.isBookKeepingConfigured()) - && (HealthDirector.isBookKeepingEnabled())) { - bookKeepingLog.debug("BK: {}", msg); - } - } + protected void logDebug(String msg) { - protected void logInfo(String msg) { + if ((HealthDirector.isBookKeepingConfigured()) && (HealthDirector.isBookKeepingEnabled())) { + bookKeepingLog.debug("BK: {}", msg); + } + } - if ((HealthDirector.isBookKeepingConfigured()) - && (HealthDirector.isBookKeepingEnabled())) { - bookKeepingLog.info(msg); - } - } + protected void logInfo(String msg) { + if ((HealthDirector.isBookKeepingConfigured()) && (HealthDirector.isBookKeepingEnabled())) { + bookKeepingLog.info(msg); + } + } } diff --git a/src/main/java/it/grid/storm/health/DetectiveGlance.java b/src/main/java/it/grid/storm/health/DetectiveGlance.java index 15d340c2..0b7aad7b 100644 --- a/src/main/java/it/grid/storm/health/DetectiveGlance.java +++ b/src/main/java/it/grid/storm/health/DetectiveGlance.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; @@ -8,113 +7,108 @@ public class DetectiveGlance { - private static Logger log = HealthDirector.LOGGER; - - private static long totPtGRequest = 0L; - private static long totPtPRequest = 0L; - private static long totSYNCHRequest = 0L; - - /** - * - */ - public DetectiveGlance() { - - } - - /** - * Get current size of heap in bytes - * - * @return long - */ - private static long getHeapSize() { - - long heapSize = Runtime.getRuntime().totalMemory(); - return heapSize; - } - - /** - * Get maximum size of heap in bytes. The heap cannot grow beyond this size. - * Any attempt will result in an OutOfMemoryException. - * - * @return long - */ - private static long getHeapMaxSize() { - - long heapMaxSize = Runtime.getRuntime().maxMemory(); - return heapMaxSize; - } - - /** - * Get amount of free memory within the heap in bytes. This size will increase - * after garbage collection and decrease as new objects are created. - * - * @return long - */ - private static long getHeapFreeSize() { - - long heapFreeSize = Runtime.getRuntime().freeMemory(); - return heapFreeSize; - } - - public static void addPtGRequests(int nrPtGRequest) { - - totPtGRequest += nrPtGRequest; - } - - public static void addPtPRequests(int nrPtPRequest) { - - totPtPRequest += nrPtPRequest; - } - - public static void addSynchRequest(int nrSynchRequest) { - - totSYNCHRequest += nrSynchRequest; - } - - public StoRMStatus haveaLook() { - - log.trace("DetectiveGlance.haveaLook"); - StoRMStatus stormStatus = new StoRMStatus(); - stormStatus.setHeapFreeSize(getHeapFreeSize()); - stormStatus.setMAXHeapSize(getHeapMaxSize()); - stormStatus.setHeapSize(getHeapSize()); - SimpleBookKeeper bk = HealthDirector.getHealthMonitor() - .getSimpleBookKeeper(); - if (bk != null) { - - int ptgReq = bk.getNumberOfRequest(OperationType.PTG); - // Sum partial to the Total - addPtGRequests(ptgReq); - int ptgSucc = bk.getNumberOfSuccess(OperationType.PTG); - long meanPtG = bk.getMeansDuration(OperationType.PTG); - - stormStatus.setPtGNumberRequests(ptgReq); - stormStatus.setPtGSuccessRequests(ptgSucc); - stormStatus.setPtGMeanDuration(meanPtG); - stormStatus.setTotalPtGRequest(totPtGRequest); - - int ptpReq = bk.getNumberOfRequest(OperationType.PTP); - // Sum partial to the Total - addPtPRequests(ptpReq); - int ptpSucc = bk.getNumberOfSuccess(OperationType.PTP); - long meanPtP = bk.getMeansDuration(OperationType.PTP); - - stormStatus.setPtPNumberRequests(ptpReq); - stormStatus.setPtPSuccessRequests(ptpSucc); - stormStatus.setPtPMeanDuration(meanPtP); - stormStatus.setTotalPtPRequest(totPtPRequest); - - int synchRequest = bk.getNumberOfSynchRequest(); - addSynchRequest(synchRequest); - - stormStatus.setSynchRequest(synchRequest); - - stormStatus.calculateLifeTime(); - bk.cleanLogBook(); - log.debug(" .. glance completed."); - } else { - log.warn("No SympleBookKeper available from HealtMonitor!"); - } - return stormStatus; - } + private static Logger log = HealthDirector.LOGGER; + + private static long totPtGRequest = 0L; + private static long totPtPRequest = 0L; + private static long totSYNCHRequest = 0L; + + /** */ + public DetectiveGlance() {} + + /** + * Get current size of heap in bytes + * + * @return long + */ + private static long getHeapSize() { + + long heapSize = Runtime.getRuntime().totalMemory(); + return heapSize; + } + + /** + * Get maximum size of heap in bytes. The heap cannot grow beyond this size. Any attempt will + * result in an OutOfMemoryException. + * + * @return long + */ + private static long getHeapMaxSize() { + + long heapMaxSize = Runtime.getRuntime().maxMemory(); + return heapMaxSize; + } + + /** + * Get amount of free memory within the heap in bytes. This size will increase after garbage + * collection and decrease as new objects are created. + * + * @return long + */ + private static long getHeapFreeSize() { + + long heapFreeSize = Runtime.getRuntime().freeMemory(); + return heapFreeSize; + } + + public static void addPtGRequests(int nrPtGRequest) { + + totPtGRequest += nrPtGRequest; + } + + public static void addPtPRequests(int nrPtPRequest) { + + totPtPRequest += nrPtPRequest; + } + + public static void addSynchRequest(int nrSynchRequest) { + + totSYNCHRequest += nrSynchRequest; + } + + public StoRMStatus haveaLook() { + + log.trace("DetectiveGlance.haveaLook"); + StoRMStatus stormStatus = new StoRMStatus(); + stormStatus.setHeapFreeSize(getHeapFreeSize()); + stormStatus.setMAXHeapSize(getHeapMaxSize()); + stormStatus.setHeapSize(getHeapSize()); + SimpleBookKeeper bk = HealthDirector.getHealthMonitor().getSimpleBookKeeper(); + if (bk != null) { + + int ptgReq = bk.getNumberOfRequest(OperationType.PTG); + // Sum partial to the Total + addPtGRequests(ptgReq); + int ptgSucc = bk.getNumberOfSuccess(OperationType.PTG); + long meanPtG = bk.getMeansDuration(OperationType.PTG); + + stormStatus.setPtGNumberRequests(ptgReq); + stormStatus.setPtGSuccessRequests(ptgSucc); + stormStatus.setPtGMeanDuration(meanPtG); + stormStatus.setTotalPtGRequest(totPtGRequest); + + int ptpReq = bk.getNumberOfRequest(OperationType.PTP); + // Sum partial to the Total + addPtPRequests(ptpReq); + int ptpSucc = bk.getNumberOfSuccess(OperationType.PTP); + long meanPtP = bk.getMeansDuration(OperationType.PTP); + + stormStatus.setPtPNumberRequests(ptpReq); + stormStatus.setPtPSuccessRequests(ptpSucc); + stormStatus.setPtPMeanDuration(meanPtP); + stormStatus.setTotalPtPRequest(totPtPRequest); + + int synchRequest = bk.getNumberOfSynchRequest(); + addSynchRequest(synchRequest); + + stormStatus.setSynchRequest(synchRequest); + + stormStatus.calculateLifeTime(); + bk.cleanLogBook(); + log.debug(" .. glance completed."); + } else { + log.warn("No SympleBookKeper available from HealtMonitor!"); + } + return stormStatus; + } } diff --git a/src/main/java/it/grid/storm/health/HealthDirector.java b/src/main/java/it/grid/storm/health/HealthDirector.java index 00da66a2..942bb8d6 100644 --- a/src/main/java/it/grid/storm/health/HealthDirector.java +++ b/src/main/java/it/grid/storm/health/HealthDirector.java @@ -1,242 +1,196 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; import it.grid.storm.config.Configuration; import it.grid.storm.logging.StoRMLoggers; - import java.text.SimpleDateFormat; import java.util.Date; - import org.slf4j.Logger; public class HealthDirector { - public static final Logger LOGGER = StoRMLoggers.getHBLogger(); - public static final Logger HEARTLOG = StoRMLoggers.getHBLogger(); - private static final Logger BOOKKEEPING = StoRMLoggers.getBKLogger(); - private static final Logger PERFLOG = StoRMLoggers.getPerfLogger(); - - private static boolean initialized = false; - private static HealthMonitor healthMonitorIstance = null; - private static boolean bookKeepingConfigured = false; - private static boolean bookKeepingEnabled = false; - - private static boolean performanceMonitorConfigured = false; - private static boolean performanceMonitorEnabled = false; - - private static long bornInstant = -1L; - private static String bornInstantStr = null; - - public static int timeToLiveLogEventInSec = Configuration.getInstance() - .getPerformanceLogbookTimeInterval(); - - /** - * - * @param testingMode - * boolean - */ - public static void initializeDirector(boolean testingMode) { - - // configureHealthLog(testingMode); - - bookKeepingEnabled = Configuration.getInstance().getBookKeepingEnabled(); - if (bookKeepingEnabled) { - // configureBookKeeping(testingMode); - bookKeepingConfigured = true; - } - - int statusPeriod = Configuration.getInstance().getHearthbeatPeriod(); - if (testingMode) { - statusPeriod = 5; - } - - // Record the born of StoRM instance - bornInstant = System.currentTimeMillis(); - Date date = new Date(bornInstant); - SimpleDateFormat formatter = new SimpleDateFormat("yyyy.MM.dd HH.mm.ss"); - bornInstantStr = formatter.format(date); - - healthMonitorIstance = new HealthMonitor(1, statusPeriod); // Start after 1 - // sec - - // Setting performance rate - performanceMonitorEnabled = Configuration.getInstance() - .getPerformanceMeasuring(); - if (performanceMonitorEnabled) { - // configurePerformanceMonitor(testingMode); - int glanceTimeInterval = Configuration.getInstance() - .getPerformanceGlanceTimeInterval(); - - LOGGER.debug("----- Performance GLANCE Time Interval = " - + glanceTimeInterval); - LOGGER.debug("----- Performance LOGBOOK Time Interval = " - + timeToLiveLogEventInSec); - - healthMonitorIstance.initializePerformanceMonitor( - timeToLiveLogEventInSec, glanceTimeInterval); - - } - - initialized = true; - - } - - - private static String getHealthPatternLayout() { - - /** - * @todo : Retrieve Patter Layout from Configuration .. - */ - String pattern = "[%d{ISO8601}]: %m%n"; - return pattern; - } - - /** - * @return String - */ - private static String getBookKeppingPatternLayout() { - - /** - * @todo : Retrieve Patter Layout from Configuration .. - */ - String pattern = "[%d{ISO8601}]: %-5p [%t] %x -%m%n"; - return pattern; - } - - /** - * @return String - */ - private static String getPerformanceMonitoringPatternLayout() { - - /** - * @todo : Retrieve Patter Layout from Configuration .. - */ - String pattern = "[%d{ISO8601}]: %m%n"; - return pattern; - } - - /** - * - * @return Logger - */ - public static Logger getLogger() { - - return LOGGER; - } - - /** - * - * @return Logger - */ - public static Logger getHealthLogger() { - - return HEARTLOG; - } - - /** - * - * @return Logger - */ - public static Logger getBookkeepingLogger() { - - return BOOKKEEPING; - } - - /** - * - * @return Logger - */ - public static Logger getPerformanceLogger() { - - return PERFLOG; - } - - public static boolean isBookKeepingConfigured() { - - return bookKeepingConfigured; - } - - public static boolean isBookKeepingEnabled() { - - return bookKeepingEnabled; - } - - public static boolean isPerformanceMonitorConfigured() { - - return performanceMonitorConfigured; - } - - public static boolean isPerformanceMonitorEnabled() { - - return performanceMonitorEnabled; - } - - /** - * - * @return Logger - */ - public static Logger getBookKeepingLogger() { - - return BOOKKEEPING; - } - - /** - * - * @return Namespace - */ - public static HealthMonitor getHealthMonitor() { - - if (!(initialized)) { - initializeDirector(false); - } - return healthMonitorIstance; - } - - /** - * - * @return Namespace - */ - public static HealthMonitor getHealthMonitor(boolean testingMode) { - - if (!(initialized)) { - initializeDirector(testingMode); - } - return healthMonitorIstance; - } - - public static long getBornInstant(boolean testingMode) { + public static final Logger LOGGER = StoRMLoggers.getHBLogger(); + public static final Logger HEARTLOG = StoRMLoggers.getHBLogger(); + private static final Logger BOOKKEEPING = StoRMLoggers.getBKLogger(); + private static final Logger PERFLOG = StoRMLoggers.getPerfLogger(); + + private static boolean initialized = false; + private static HealthMonitor healthMonitorIstance = null; + private static boolean bookKeepingConfigured = false; + private static boolean bookKeepingEnabled = false; + + private static boolean performanceMonitorConfigured = false; + private static boolean performanceMonitorEnabled = false; + + private static long bornInstant = -1L; + private static String bornInstantStr = null; + + public static int timeToLiveLogEventInSec = + Configuration.getInstance().getPerformanceLogbookTimeInterval(); + + /** @param testingMode boolean */ + public static void initializeDirector(boolean testingMode) { + + // configureHealthLog(testingMode); + + bookKeepingEnabled = Configuration.getInstance().getBookKeepingEnabled(); + if (bookKeepingEnabled) { + // configureBookKeeping(testingMode); + bookKeepingConfigured = true; + } + + int statusPeriod = Configuration.getInstance().getHearthbeatPeriod(); + if (testingMode) { + statusPeriod = 5; + } + + // Record the born of StoRM instance + bornInstant = System.currentTimeMillis(); + Date date = new Date(bornInstant); + SimpleDateFormat formatter = new SimpleDateFormat("yyyy.MM.dd HH.mm.ss"); + bornInstantStr = formatter.format(date); + + healthMonitorIstance = new HealthMonitor(1, statusPeriod); // Start after 1 + // sec + + // Setting performance rate + performanceMonitorEnabled = Configuration.getInstance().getPerformanceMeasuring(); + if (performanceMonitorEnabled) { + // configurePerformanceMonitor(testingMode); + int glanceTimeInterval = Configuration.getInstance().getPerformanceGlanceTimeInterval(); + + LOGGER.debug("----- Performance GLANCE Time Interval = " + glanceTimeInterval); + LOGGER.debug("----- Performance LOGBOOK Time Interval = " + timeToLiveLogEventInSec); + + healthMonitorIstance.initializePerformanceMonitor( + timeToLiveLogEventInSec, glanceTimeInterval); + } + + initialized = true; + } + + private static String getHealthPatternLayout() { + + /** @todo : Retrieve Patter Layout from Configuration .. */ + String pattern = "[%d{ISO8601}]: %m%n"; + return pattern; + } + + /** @return String */ + private static String getBookKeppingPatternLayout() { + + /** @todo : Retrieve Patter Layout from Configuration .. */ + String pattern = "[%d{ISO8601}]: %-5p [%t] %x -%m%n"; + return pattern; + } + + /** @return String */ + private static String getPerformanceMonitoringPatternLayout() { + + /** @todo : Retrieve Patter Layout from Configuration .. */ + String pattern = "[%d{ISO8601}]: %m%n"; + return pattern; + } + + /** @return Logger */ + public static Logger getLogger() { + + return LOGGER; + } + + /** @return Logger */ + public static Logger getHealthLogger() { + + return HEARTLOG; + } + + /** @return Logger */ + public static Logger getBookkeepingLogger() { + + return BOOKKEEPING; + } + + /** @return Logger */ + public static Logger getPerformanceLogger() { + + return PERFLOG; + } + + public static boolean isBookKeepingConfigured() { + + return bookKeepingConfigured; + } + + public static boolean isBookKeepingEnabled() { + + return bookKeepingEnabled; + } + + public static boolean isPerformanceMonitorConfigured() { + + return performanceMonitorConfigured; + } + + public static boolean isPerformanceMonitorEnabled() { + + return performanceMonitorEnabled; + } + + /** @return Logger */ + public static Logger getBookKeepingLogger() { + + return BOOKKEEPING; + } + + /** @return Namespace */ + public static HealthMonitor getHealthMonitor() { + + if (!(initialized)) { + initializeDirector(false); + } + return healthMonitorIstance; + } + + /** @return Namespace */ + public static HealthMonitor getHealthMonitor(boolean testingMode) { + + if (!(initialized)) { + initializeDirector(testingMode); + } + return healthMonitorIstance; + } - if (!(initialized)) { - initializeDirector(testingMode); - } - return bornInstant; - } + public static long getBornInstant(boolean testingMode) { - public static String getBornInstantStr(boolean testingMode) { + if (!(initialized)) { + initializeDirector(testingMode); + } + return bornInstant; + } - if (!(initialized)) { - initializeDirector(testingMode); - } - return bornInstantStr; - } + public static String getBornInstantStr(boolean testingMode) { - public static long getBornInstant() { + if (!(initialized)) { + initializeDirector(testingMode); + } + return bornInstantStr; + } - if (!(initialized)) { - initializeDirector(false); - } - return bornInstant; - } + public static long getBornInstant() { - public static String getBornInstantStr() { + if (!(initialized)) { + initializeDirector(false); + } + return bornInstant; + } - if (!(initialized)) { - initializeDirector(false); - } - return bornInstantStr; - } + public static String getBornInstantStr() { + if (!(initialized)) { + initializeDirector(false); + } + return bornInstantStr; + } } diff --git a/src/main/java/it/grid/storm/health/HealthMonitor.java b/src/main/java/it/grid/storm/health/HealthMonitor.java index 2d5ca047..a9338de4 100644 --- a/src/main/java/it/grid/storm/health/HealthMonitor.java +++ b/src/main/java/it/grid/storm/health/HealthMonitor.java @@ -1,89 +1,82 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; import java.util.ArrayList; import java.util.Hashtable; import java.util.Timer; - import org.slf4j.Logger; public class HealthMonitor { - private Logger HEARTLOG = HealthDirector.HEARTLOG; - private Logger PERFLOG = HealthDirector.getPerformanceLogger(); + private Logger HEARTLOG = HealthDirector.HEARTLOG; + private Logger PERFLOG = HealthDirector.getPerformanceLogger(); + + private Timer healthTimer = null; + private Hashtable bookKeepers; - private Timer healthTimer = null; - private Hashtable bookKeepers; + public static int perfGlanceTimeInterval = 15; // 15 sec - public static int perfGlanceTimeInterval = 15; // 15 sec + public HealthMonitor(int delay, int period) { - public HealthMonitor(int delay, int period) { + healthTimer = new Timer(); + this.heartbeat(delay * 1000, period * 1000); - healthTimer = new Timer(); - this.heartbeat(delay * 1000, period * 1000); + // Create the Book Keepers + bookKeepers = new Hashtable(); - // Create the Book Keepers - bookKeepers = new Hashtable(); + // Add the Simple BookKeeper + bookKeepers.put(SimpleBookKeeper.KEY, new SimpleBookKeeper()); - // Add the Simple BookKeeper - bookKeepers.put(SimpleBookKeeper.KEY, new SimpleBookKeeper()); + HEARTLOG.info("HEART MONITOR Initialized"); + } - HEARTLOG.info("HEART MONITOR Initialized"); - } + public void initializePerformanceMonitor(int logTimeInterval, int defaultGlangeTimeInterval) { - public void initializePerformanceMonitor(int logTimeInterval, - int defaultGlangeTimeInterval) { + if (defaultGlangeTimeInterval > logTimeInterval) { + HealthDirector.getPerformanceLogger() + .warn("WARNING: Log Book has the time " + "interval lower than Glance time interval!"); + } + // Add the Performance BookKeeper + PerformanceBookKeeper pbk = + new PerformanceBookKeeper(logTimeInterval, defaultGlangeTimeInterval); + bookKeepers.put(PerformanceBookKeeper.KEY, pbk); - if (defaultGlangeTimeInterval > logTimeInterval) { - HealthDirector.getPerformanceLogger().warn( - "WARNING: Log Book has the time " - + "interval lower than Glance time interval!"); - } - // Add the Performance BookKeeper - PerformanceBookKeeper pbk = new PerformanceBookKeeper(logTimeInterval, - defaultGlangeTimeInterval); - bookKeepers.put(PerformanceBookKeeper.KEY, pbk); + long pulseTimeInterval = pbk.getGlanceWindowInMSec(); + // this.perfEnabled = true; + healthTimer.scheduleAtFixedRate(new PerformancePulse(), 0, pulseTimeInterval); + PERFLOG.info("Set PERFORMANCE MONITOR in Timer Task (PERIOD:{})", perfGlanceTimeInterval); - long pulseTimeInterval = pbk.getGlanceWindowInMSec(); - // this.perfEnabled = true; - healthTimer.scheduleAtFixedRate(new PerformancePulse(), 0, - pulseTimeInterval); - PERFLOG.info("Set PERFORMANCE MONITOR in Timer Task (PERIOD:{})", - perfGlanceTimeInterval); - - PERFLOG.info("--- PERFORMANCE MONITOR Initialized"); - } + PERFLOG.info("--- PERFORMANCE MONITOR Initialized"); + } - public ArrayList getBookKeepers() { + public ArrayList getBookKeepers() { - return new ArrayList(bookKeepers.values()); - } + return new ArrayList(bookKeepers.values()); + } - public PerformanceBookKeeper getPerformanceBookKeeper() { + public PerformanceBookKeeper getPerformanceBookKeeper() { - if (bookKeepers.containsKey(PerformanceBookKeeper.KEY)) { - return (PerformanceBookKeeper) bookKeepers.get(PerformanceBookKeeper.KEY); - } else { - return null; - } - } + if (bookKeepers.containsKey(PerformanceBookKeeper.KEY)) { + return (PerformanceBookKeeper) bookKeepers.get(PerformanceBookKeeper.KEY); + } else { + return null; + } + } - public SimpleBookKeeper getSimpleBookKeeper() { + public SimpleBookKeeper getSimpleBookKeeper() { - if (bookKeepers.containsKey(SimpleBookKeeper.KEY)) { - return (SimpleBookKeeper) bookKeepers.get(SimpleBookKeeper.KEY); - } else { - return null; - } - } + if (bookKeepers.containsKey(SimpleBookKeeper.KEY)) { + return (SimpleBookKeeper) bookKeepers.get(SimpleBookKeeper.KEY); + } else { + return null; + } + } - public void heartbeat(int delay, int period) { + public void heartbeat(int delay, int period) { - healthTimer.scheduleAtFixedRate(new Hearthbeat(), delay, period); - HEARTLOG.info("Set HEARTHBEAT in Timer Task (DELAY: {}, PERIOD: {})", delay, - period); - } + healthTimer.scheduleAtFixedRate(new Hearthbeat(), delay, period); + HEARTLOG.info("Set HEARTHBEAT in Timer Task (DELAY: {}, PERIOD: {})", delay, period); + } } diff --git a/src/main/java/it/grid/storm/health/Hearthbeat.java b/src/main/java/it/grid/storm/health/Hearthbeat.java index 55454a57..0be8b655 100644 --- a/src/main/java/it/grid/storm/health/Hearthbeat.java +++ b/src/main/java/it/grid/storm/health/Hearthbeat.java @@ -1,38 +1,36 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; import java.util.TimerTask; - import org.slf4j.Logger; public class Hearthbeat extends TimerTask { - private DetectiveGlance detective; - private Logger HEALTH_LOG = HealthDirector.HEARTLOG; - private long progressivNumber = 0L; - - protected Hearthbeat() { - - detective = new DetectiveGlance(); - } - - /** - * When an object implementing interface Runnable is used to - * create a thread, starting the thread causes the object's run - * method to be called in that separately executing thread. - * - * @todo Implement this java.lang.Runnable method - */ - @Override - public void run() { - - progressivNumber++; - StoRMStatus status = detective.haveaLook(); - status.setPulseNumber(progressivNumber); - HEALTH_LOG.debug("*** HEARTHBEAT ***"); - HEALTH_LOG.info(status.toString()); - } + private DetectiveGlance detective; + private Logger HEALTH_LOG = HealthDirector.HEARTLOG; + private long progressivNumber = 0L; + + protected Hearthbeat() { + + detective = new DetectiveGlance(); + } + + /** + * When an object implementing interface Runnable is used to create a thread, + * starting the thread causes the object's run method to be called in that separately + * executing thread. + * + * @todo Implement this java.lang.Runnable method + */ + @Override + public void run() { + + progressivNumber++; + StoRMStatus status = detective.haveaLook(); + status.setPulseNumber(progressivNumber); + HEALTH_LOG.debug("*** HEARTHBEAT ***"); + HEALTH_LOG.info(status.toString()); + } } diff --git a/src/main/java/it/grid/storm/health/LogEvent.java b/src/main/java/it/grid/storm/health/LogEvent.java index eceac48b..d3b25818 100644 --- a/src/main/java/it/grid/storm/health/LogEvent.java +++ b/src/main/java/it/grid/storm/health/LogEvent.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; import it.grid.storm.srm.types.TSURL; - import java.text.SimpleDateFormat; import java.util.Date; import java.util.concurrent.Delayed; @@ -13,141 +11,157 @@ public class LogEvent implements Delayed { - // Attributes to manage the Event within the BookKeeper - private static long THOUSAND = 1000L; - public final long birthTime; - public final long deathTime; - private long timeToLive = 60000L; // Expressed in MILLISEC (1 min) - - // Attributes of EVENT - private OperationType opType = null; - private String userDN = null; - private String surl = null; - private long startTime = -1L; - private String startTimeStr = null; - private long duration = -1L; - private String requestToken = null; - private boolean successResult = false; - - public LogEvent(OperationType opType, String userDN, String surl, - long startTime, long durationInMilliSec, String requestToken, boolean successResult) { - - this.opType = opType; - this.userDN = userDN; - this.surl = surl; - this.startTime = startTime; - this.duration = durationInMilliSec; - this.requestToken = requestToken; - Date date = new Date(startTime); - SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss,SSS"); - this.startTimeStr = formatter.format(date); - this.successResult = successResult; - - this.timeToLive = HealthDirector.timeToLiveLogEventInSec; - this.deathTime = System.currentTimeMillis() - + (HealthDirector.timeToLiveLogEventInSec * LogEvent.THOUSAND); - this.birthTime = System.currentTimeMillis(); - - } - - public LogEvent(OperationType opType, String userDN, long startTime, - long durationInMilliSec, boolean successResult) { - - this(opType, userDN, TSURL.makeEmpty().toString(), startTime, durationInMilliSec, - "SYNCH", successResult); - HealthDirector.LOGGER.debug("Event TTL (milliSec): {}", timeToLive); - } - - public LogEvent(OperationType opType, String userDN, String surl, - long startTime, long durationInMilliSec, boolean successResult) { - - this(opType, userDN, surl, startTime, durationInMilliSec, "SYNCH", successResult); - HealthDirector.LOGGER.debug("Event TTL (milliSec): {}", timeToLive); - } - - public OperationType getOperationType() { - - return this.opType; - } - - public String getDN() { - - return this.userDN; - } - - public String getSURL() { - - return this.surl; - } - - public long getStartTime() { - - return this.startTime; - } - - public String getStartTimeString() { - - return this.startTimeStr; - } - - /** - * @return duration in millisec - */ - public long getDuration() { - - return this.duration; - } - - public String getRequestToken() { - - return this.requestToken; - } - - public boolean isSuccess() { - - return this.successResult; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - final char fieldSeparator = '\t'; - sb.append(userDN).append(fieldSeparator); - sb.append(opType.toString()).append(fieldSeparator); - sb.append(opType.getOperationTypeCategory()).append(fieldSeparator); - if (this.successResult) { - sb.append("-OK-").append(fieldSeparator); - } else { - sb.append("#ko#").append(fieldSeparator); - } - sb.append(surl).append(fieldSeparator); - sb.append(startTimeStr).append(fieldSeparator); - sb.append(duration).append(fieldSeparator); - sb.append(requestToken).append(fieldSeparator); - return sb.toString(); - } - - public long getDelay(TimeUnit unit) { - - long result = -1; - result = unit.convert(deathTime - System.currentTimeMillis(), - TimeUnit.MILLISECONDS); - HealthDirector.LOGGER.debug("Event TimeToLive : {} result: {}", - timeToLive, result); - - return result; - } - - public int compareTo(Delayed other) { - - LogEvent otherEvent = (LogEvent) other; - if (deathTime < otherEvent.deathTime) { - return -1; - } - if (deathTime > otherEvent.deathTime) { - return 1; - } - return 0; - } + // Attributes to manage the Event within the BookKeeper + private static long THOUSAND = 1000L; + public final long birthTime; + public final long deathTime; + private long timeToLive = 60000L; // Expressed in MILLISEC (1 min) + + // Attributes of EVENT + private OperationType opType = null; + private String userDN = null; + private String surl = null; + private long startTime = -1L; + private String startTimeStr = null; + private long duration = -1L; + private String requestToken = null; + private boolean successResult = false; + + public LogEvent( + OperationType opType, + String userDN, + String surl, + long startTime, + long durationInMilliSec, + String requestToken, + boolean successResult) { + + this.opType = opType; + this.userDN = userDN; + this.surl = surl; + this.startTime = startTime; + this.duration = durationInMilliSec; + this.requestToken = requestToken; + Date date = new Date(startTime); + SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss,SSS"); + this.startTimeStr = formatter.format(date); + this.successResult = successResult; + + this.timeToLive = HealthDirector.timeToLiveLogEventInSec; + this.deathTime = + System.currentTimeMillis() + (HealthDirector.timeToLiveLogEventInSec * LogEvent.THOUSAND); + this.birthTime = System.currentTimeMillis(); + } + + public LogEvent( + OperationType opType, + String userDN, + long startTime, + long durationInMilliSec, + boolean successResult) { + + this( + opType, + userDN, + TSURL.makeEmpty().toString(), + startTime, + durationInMilliSec, + "SYNCH", + successResult); + HealthDirector.LOGGER.debug("Event TTL (milliSec): {}", timeToLive); + } + + public LogEvent( + OperationType opType, + String userDN, + String surl, + long startTime, + long durationInMilliSec, + boolean successResult) { + + this(opType, userDN, surl, startTime, durationInMilliSec, "SYNCH", successResult); + HealthDirector.LOGGER.debug("Event TTL (milliSec): {}", timeToLive); + } + + public OperationType getOperationType() { + + return this.opType; + } + + public String getDN() { + + return this.userDN; + } + + public String getSURL() { + + return this.surl; + } + + public long getStartTime() { + + return this.startTime; + } + + public String getStartTimeString() { + + return this.startTimeStr; + } + + /** @return duration in millisec */ + public long getDuration() { + + return this.duration; + } + + public String getRequestToken() { + + return this.requestToken; + } + + public boolean isSuccess() { + + return this.successResult; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + final char fieldSeparator = '\t'; + sb.append(userDN).append(fieldSeparator); + sb.append(opType.toString()).append(fieldSeparator); + sb.append(opType.getOperationTypeCategory()).append(fieldSeparator); + if (this.successResult) { + sb.append("-OK-").append(fieldSeparator); + } else { + sb.append("#ko#").append(fieldSeparator); + } + sb.append(surl).append(fieldSeparator); + sb.append(startTimeStr).append(fieldSeparator); + sb.append(duration).append(fieldSeparator); + sb.append(requestToken).append(fieldSeparator); + return sb.toString(); + } + + public long getDelay(TimeUnit unit) { + + long result = -1; + result = unit.convert(deathTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS); + HealthDirector.LOGGER.debug("Event TimeToLive : {} result: {}", timeToLive, result); + + return result; + } + + public int compareTo(Delayed other) { + + LogEvent otherEvent = (LogEvent) other; + if (deathTime < otherEvent.deathTime) { + return -1; + } + if (deathTime > otherEvent.deathTime) { + return 1; + } + return 0; + } } diff --git a/src/main/java/it/grid/storm/health/OperationType.java b/src/main/java/it/grid/storm/health/OperationType.java index 008aa489..7c84b32e 100644 --- a/src/main/java/it/grid/storm/health/OperationType.java +++ b/src/main/java/it/grid/storm/health/OperationType.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; @@ -8,134 +7,137 @@ public class OperationType { - private int operationIndex = -1; - private String operationName; - private String operationDescription; - private OperationTypeCategory opTypeCategory; - - public final static OperationType UNDEF = new OperationType(0, "UNDEF", - "Undefined", OperationTypeCategory.UNKNOWN); - - public final static OperationType PTG = new OperationType(1, "PTG", - "srmPrepareToGet", OperationTypeCategory.ASYNCH); - public final static OperationType SPTG = new OperationType(20, "SPTG", - "srmPrepareToGetStatus", OperationTypeCategory.SYNCH_DB); - public final static OperationType PTP = new OperationType(2, "PTP", - "srmPrepareToPut", OperationTypeCategory.ASYNCH); - public final static OperationType SPTP = new OperationType(21, "SPTP", - "srmPrepareToPutStatus", OperationTypeCategory.SYNCH_DB); - public final static OperationType COPY = new OperationType(3, "COPY", - "srmCopy", OperationTypeCategory.ASYNCH); - public final static OperationType BOL = new OperationType(4, "BOL", - "srmBringOnLine", OperationTypeCategory.ASYNCH); - - public final static OperationType PNG = new OperationType(13, "PNG", - "srmPing", OperationTypeCategory.PURESYNCH); - - public final static OperationType MKD = new OperationType(11, "MKD", - "srmMkdir", OperationTypeCategory.SYNCH_FS); - public final static OperationType MV = new OperationType(12, "MV", "srmMv", - OperationTypeCategory.SYNCH_FS); - public final static OperationType RM = new OperationType(18, "RM", "srmRm", - OperationTypeCategory.SYNCH_FS); - public final static OperationType RMD = new OperationType(19, "RMD", - "srmRmdir", OperationTypeCategory.SYNCH_FS); - - public final static OperationType EFL = new OperationType(7, "ELT", - "srmExtendLifeTime", OperationTypeCategory.SYNCH_DB); - public final static OperationType GST = new OperationType(9, "GST", - "srmGetSpaceTokens", OperationTypeCategory.SYNCH_DB); - public final static OperationType RSP = new OperationType(16, "RSP", - "srmReleaseSpace", OperationTypeCategory.SYNCH_DB); - - public final static OperationType AF = new OperationType(5, "AF", - "srmAbortFile", OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType AR = new OperationType(6, "AR", - "srmAbortRequest", OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType GSM = new OperationType(8, "GSM", - "srmGetSpaceMetaData", OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType LS = new OperationType(10, "LS", "srmLs", - OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType PD = new OperationType(14, "PD", - "srmPutDone", OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType RF = new OperationType(15, "RF", - "srmReleaseFile", OperationTypeCategory.SYNCH_FS_DB); - public final static OperationType RS = new OperationType(17, "RS", - "srmReserveSpace", OperationTypeCategory.SYNCH_FS_DB); - - public OperationType(int operationIndex, String operationName, - String operationDescription, OperationTypeCategory opCat) { - - this.operationIndex = operationIndex; - this.operationName = operationName; - this.operationDescription = operationDescription; - this.opTypeCategory = opCat; - } - - public static OperationType makeFromChunkType(ChunkType chunkType) { - - OperationType result = OperationType.UNDEF; - switch (chunkType.getIndex()) { - case 1: - result = OperationType.PTG; - break; - case 2: - result = OperationType.PTP; - break; - case 3: - result = OperationType.COPY; - break; - case 4: - result = OperationType.BOL; - break; - default: - result = OperationType.UNDEF; - break; - } - return result; - } - - public boolean isSynchronousOperation() { - - boolean result = false; - if (this.operationIndex > 4) { - result = true; - } - return result; - } - - public String getOperationDescription() { - - return this.operationDescription; - } - - public OperationTypeCategory getOperationTypeCategory() { - - return this.opTypeCategory; - } - - @Override - public String toString() { - - return this.operationName; - } - - @Override - public int hashCode() { - - return this.operationIndex; - } - - @Override - public boolean equals(Object obj) { - - boolean result = false; - if (obj instanceof OperationType) { - OperationType other = (OperationType) obj; - if (other.operationIndex == this.operationIndex) { - result = true; - } - } - return result; - } -} \ No newline at end of file + private int operationIndex = -1; + private String operationName; + private String operationDescription; + private OperationTypeCategory opTypeCategory; + + public static final OperationType UNDEF = + new OperationType(0, "UNDEF", "Undefined", OperationTypeCategory.UNKNOWN); + + public static final OperationType PTG = + new OperationType(1, "PTG", "srmPrepareToGet", OperationTypeCategory.ASYNCH); + public static final OperationType SPTG = + new OperationType(20, "SPTG", "srmPrepareToGetStatus", OperationTypeCategory.SYNCH_DB); + public static final OperationType PTP = + new OperationType(2, "PTP", "srmPrepareToPut", OperationTypeCategory.ASYNCH); + public static final OperationType SPTP = + new OperationType(21, "SPTP", "srmPrepareToPutStatus", OperationTypeCategory.SYNCH_DB); + public static final OperationType COPY = + new OperationType(3, "COPY", "srmCopy", OperationTypeCategory.ASYNCH); + public static final OperationType BOL = + new OperationType(4, "BOL", "srmBringOnLine", OperationTypeCategory.ASYNCH); + + public static final OperationType PNG = + new OperationType(13, "PNG", "srmPing", OperationTypeCategory.PURESYNCH); + + public static final OperationType MKD = + new OperationType(11, "MKD", "srmMkdir", OperationTypeCategory.SYNCH_FS); + public static final OperationType MV = + new OperationType(12, "MV", "srmMv", OperationTypeCategory.SYNCH_FS); + public static final OperationType RM = + new OperationType(18, "RM", "srmRm", OperationTypeCategory.SYNCH_FS); + public static final OperationType RMD = + new OperationType(19, "RMD", "srmRmdir", OperationTypeCategory.SYNCH_FS); + + public static final OperationType EFL = + new OperationType(7, "ELT", "srmExtendLifeTime", OperationTypeCategory.SYNCH_DB); + public static final OperationType GST = + new OperationType(9, "GST", "srmGetSpaceTokens", OperationTypeCategory.SYNCH_DB); + public static final OperationType RSP = + new OperationType(16, "RSP", "srmReleaseSpace", OperationTypeCategory.SYNCH_DB); + + public static final OperationType AF = + new OperationType(5, "AF", "srmAbortFile", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType AR = + new OperationType(6, "AR", "srmAbortRequest", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType GSM = + new OperationType(8, "GSM", "srmGetSpaceMetaData", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType LS = + new OperationType(10, "LS", "srmLs", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType PD = + new OperationType(14, "PD", "srmPutDone", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType RF = + new OperationType(15, "RF", "srmReleaseFile", OperationTypeCategory.SYNCH_FS_DB); + public static final OperationType RS = + new OperationType(17, "RS", "srmReserveSpace", OperationTypeCategory.SYNCH_FS_DB); + + public OperationType( + int operationIndex, + String operationName, + String operationDescription, + OperationTypeCategory opCat) { + + this.operationIndex = operationIndex; + this.operationName = operationName; + this.operationDescription = operationDescription; + this.opTypeCategory = opCat; + } + + public static OperationType makeFromChunkType(ChunkType chunkType) { + + OperationType result = OperationType.UNDEF; + switch (chunkType.getIndex()) { + case 1: + result = OperationType.PTG; + break; + case 2: + result = OperationType.PTP; + break; + case 3: + result = OperationType.COPY; + break; + case 4: + result = OperationType.BOL; + break; + default: + result = OperationType.UNDEF; + break; + } + return result; + } + + public boolean isSynchronousOperation() { + + boolean result = false; + if (this.operationIndex > 4) { + result = true; + } + return result; + } + + public String getOperationDescription() { + + return this.operationDescription; + } + + public OperationTypeCategory getOperationTypeCategory() { + + return this.opTypeCategory; + } + + @Override + public String toString() { + + return this.operationName; + } + + @Override + public int hashCode() { + + return this.operationIndex; + } + + @Override + public boolean equals(Object obj) { + + boolean result = false; + if (obj instanceof OperationType) { + OperationType other = (OperationType) obj; + if (other.operationIndex == this.operationIndex) { + result = true; + } + } + return result; + } +} diff --git a/src/main/java/it/grid/storm/health/OperationTypeCategory.java b/src/main/java/it/grid/storm/health/OperationTypeCategory.java index 50db1484..856b1c10 100644 --- a/src/main/java/it/grid/storm/health/OperationTypeCategory.java +++ b/src/main/java/it/grid/storm/health/OperationTypeCategory.java @@ -1,116 +1,131 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.ArrayList; -/** - * @author zappi - * - */ +/** @author zappi */ public enum OperationTypeCategory { - ASYNCH("ASYNCH", new ArrayList() { - - private static final long serialVersionUID = 3641598296676643733L; - { - add(OperationType.BOL); - add(OperationType.COPY); - add(OperationType.PTG); - add(OperationType.PTP); - } - }), PURESYNCH("PURESYNCH", new ArrayList() { - - private static final long serialVersionUID = -6608417863091343037L; - - { - add(OperationType.PNG); - } - }), SYNCH_DB("SYNCH_DB", new ArrayList() { - - private static final long serialVersionUID = 5028836664777062718L; - { - add(OperationType.EFL); - add(OperationType.GST); - add(OperationType.RSP); - add(OperationType.SPTG); - add(OperationType.SPTP); - } - }), SYNCH_FS("SYNCH_FS", new ArrayList() { - - private static final long serialVersionUID = -5750075706467406539L; - { - add(OperationType.MKD); - add(OperationType.MV); - add(OperationType.RM); - add(OperationType.RMD); - } - }), SYNCH_FS_DB("SYNCH_FS_DB", new ArrayList() { - - private static final long serialVersionUID = 6155834878615823037L; - { - add(OperationType.AF); - add(OperationType.AR); - add(OperationType.GSM); - add(OperationType.LS); - add(OperationType.PD); - add(OperationType.RF); - add(OperationType.RS); - } - }), UNKNOWN("UNKNOWN", new ArrayList() { - - private static final long serialVersionUID = -3529992869598284560L; - { - add(OperationType.UNDEF); - } - }); - - private ArrayList opTypeList; - private String acronym; - - private OperationTypeCategory(String acronym, - ArrayList opTypeList) { - - this.opTypeList = opTypeList; - this.acronym = acronym; - } - - public boolean contains(OperationType op) { - - boolean result = false; - if (this.opTypeList.contains(op)) { - result = true; - } - return result; - } - - public OperationTypeCategory getCategory(OperationType opType) { - - if (ASYNCH.contains(opType)) { - return ASYNCH; - } - if (PURESYNCH.contains(opType)) { - return PURESYNCH; - } - if (SYNCH_DB.contains(opType)) { - return SYNCH_DB; - } - if (SYNCH_FS.contains(opType)) { - return SYNCH_FS; - } - if (SYNCH_FS_DB.contains(opType)) { - return SYNCH_FS_DB; - } - return UNKNOWN; - } - - @Override - public String toString() { - - return this.acronym; - } -} \ No newline at end of file + ASYNCH( + "ASYNCH", + new ArrayList() { + + private static final long serialVersionUID = 3641598296676643733L; + + { + add(OperationType.BOL); + add(OperationType.COPY); + add(OperationType.PTG); + add(OperationType.PTP); + } + }), + PURESYNCH( + "PURESYNCH", + new ArrayList() { + + private static final long serialVersionUID = -6608417863091343037L; + + { + add(OperationType.PNG); + } + }), + SYNCH_DB( + "SYNCH_DB", + new ArrayList() { + + private static final long serialVersionUID = 5028836664777062718L; + + { + add(OperationType.EFL); + add(OperationType.GST); + add(OperationType.RSP); + add(OperationType.SPTG); + add(OperationType.SPTP); + } + }), + SYNCH_FS( + "SYNCH_FS", + new ArrayList() { + + private static final long serialVersionUID = -5750075706467406539L; + + { + add(OperationType.MKD); + add(OperationType.MV); + add(OperationType.RM); + add(OperationType.RMD); + } + }), + SYNCH_FS_DB( + "SYNCH_FS_DB", + new ArrayList() { + + private static final long serialVersionUID = 6155834878615823037L; + + { + add(OperationType.AF); + add(OperationType.AR); + add(OperationType.GSM); + add(OperationType.LS); + add(OperationType.PD); + add(OperationType.RF); + add(OperationType.RS); + } + }), + UNKNOWN( + "UNKNOWN", + new ArrayList() { + + private static final long serialVersionUID = -3529992869598284560L; + + { + add(OperationType.UNDEF); + } + }); + + private ArrayList opTypeList; + private String acronym; + + private OperationTypeCategory(String acronym, ArrayList opTypeList) { + + this.opTypeList = opTypeList; + this.acronym = acronym; + } + + public boolean contains(OperationType op) { + + boolean result = false; + if (this.opTypeList.contains(op)) { + result = true; + } + return result; + } + + public OperationTypeCategory getCategory(OperationType opType) { + + if (ASYNCH.contains(opType)) { + return ASYNCH; + } + if (PURESYNCH.contains(opType)) { + return PURESYNCH; + } + if (SYNCH_DB.contains(opType)) { + return SYNCH_DB; + } + if (SYNCH_FS.contains(opType)) { + return SYNCH_FS; + } + if (SYNCH_FS_DB.contains(opType)) { + return SYNCH_FS_DB; + } + return UNKNOWN; + } + + @Override + public String toString() { + + return this.acronym; + } +} diff --git a/src/main/java/it/grid/storm/health/PerformanceBookKeeper.java b/src/main/java/it/grid/storm/health/PerformanceBookKeeper.java index 9e68ac37..94bb8715 100644 --- a/src/main/java/it/grid/storm/health/PerformanceBookKeeper.java +++ b/src/main/java/it/grid/storm/health/PerformanceBookKeeper.java @@ -1,126 +1,117 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.ArrayList; import java.util.concurrent.DelayQueue; import java.util.concurrent.TimeUnit; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformanceBookKeeper extends BookKeeper { - private static final Logger log = LoggerFactory.getLogger(PerformanceBookKeeper.class); - - public static final String KEY = "PERF"; - - private static long THOUSAND = 1000L; - - private DelayQueue timedLogBook = new DelayQueue(); - private int lengthInSeconds = 0; - private long lengthInMSec = 0; - private long visibleToGlancerInMSec = 0; - - public PerformanceBookKeeper(int timeWindowInSecond, int glancerPeriodInSec) { - - this.lengthInSeconds = timeWindowInSecond; - this.lengthInMSec = timeWindowInSecond * THOUSAND; - this.visibleToGlancerInMSec = glancerPeriodInSec * THOUSAND; - } - - @Override - public void addLogEvent(LogEvent logEvent) { - - boolean result = timedLogBook.offer(logEvent); - HealthDirector.LOGGER.debug("TimedLOGBOOK (offering result) {}", result); - HealthDirector.LOGGER.debug("TimedLOGBOOK : {}", timedLogBook.size()); - } - - public long getGlanceWindowInMSec() { - - return this.visibleToGlancerInMSec; - } - - public int getTimeWindowInSecond() { - - return this.lengthInSeconds; - } - - /** - * getZombieEvents - * - * Remove from the queue LogBook the event with lifetime expired - * - * @return the arraylist of removed delayed Log Event - */ - public ArrayList removeZombieEvents() { - - ArrayList zombies = new ArrayList(); - int nZombies = timedLogBook.drainTo(zombies); - logDebug("Removed " + nZombies + "oldest event in Delayed log book."); - return zombies; - } - - /** - * getSnapshot - * - * create a purged copy of LogBook of all LogEvent yet alive. - * - * @return - */ - public ArrayList getCompleteSnapshot() { - - removeZombieEvents(); // discard the zombies - ArrayList snapshot = new ArrayList(timedLogBook); - return snapshot; - } - - /** - * getGlancedLogBook - * - * return the list cointaing only the LogEvents within the Glance time - * interval specified by the parameter 'timeToLiveGraterThan'. - * - * Note: When the event is inserted into the timedLogBook has a maximum delay - * and when the delay is negative the event is tagged as zombie - * - * @return - */ - public ArrayList getEventsGlanced(long timeToLiveGraterThan) { - - ArrayList eGlanced = new ArrayList(); - log.debug("time to live - glance: {}",timeToLiveGraterThan); - removeZombieEvents(); - for (LogEvent event : timedLogBook) { - log.debug("event: {}", event.getDelay(TimeUnit.MILLISECONDS)); - if ((event.getDelay(TimeUnit.MILLISECONDS)) < timeToLiveGraterThan) { - eGlanced.add(event); - } - } - log.debug("Nr. Events to analyze: {}", eGlanced.size()); - - return eGlanced; - } - - public PerformanceStatus getPerformanceStatus(long timeToLiveGraterThan) { - PerformanceStatus pStatus = new PerformanceStatus( - getEventsGlanced(timeToLiveGraterThan)); - return pStatus; - } - - public PerformanceStatus getPerformanceStatus() { - - return getPerformanceStatus(this.visibleToGlancerInMSec); - } - + private static final Logger log = LoggerFactory.getLogger(PerformanceBookKeeper.class); + + public static final String KEY = "PERF"; + + private static long THOUSAND = 1000L; + + private DelayQueue timedLogBook = new DelayQueue(); + private int lengthInSeconds = 0; + private long lengthInMSec = 0; + private long visibleToGlancerInMSec = 0; + + public PerformanceBookKeeper(int timeWindowInSecond, int glancerPeriodInSec) { + + this.lengthInSeconds = timeWindowInSecond; + this.lengthInMSec = timeWindowInSecond * THOUSAND; + this.visibleToGlancerInMSec = glancerPeriodInSec * THOUSAND; + } + + @Override + public void addLogEvent(LogEvent logEvent) { + + boolean result = timedLogBook.offer(logEvent); + HealthDirector.LOGGER.debug("TimedLOGBOOK (offering result) {}", result); + HealthDirector.LOGGER.debug("TimedLOGBOOK : {}", timedLogBook.size()); + } + + public long getGlanceWindowInMSec() { + + return this.visibleToGlancerInMSec; + } + + public int getTimeWindowInSecond() { + + return this.lengthInSeconds; + } + + /** + * getZombieEvents + * + *

Remove from the queue LogBook the event with lifetime expired + * + * @return the arraylist of removed delayed Log Event + */ + public ArrayList removeZombieEvents() { + + ArrayList zombies = new ArrayList(); + int nZombies = timedLogBook.drainTo(zombies); + logDebug("Removed " + nZombies + "oldest event in Delayed log book."); + return zombies; + } + + /** + * getSnapshot + * + *

create a purged copy of LogBook of all LogEvent yet alive. + * + * @return + */ + public ArrayList getCompleteSnapshot() { + + removeZombieEvents(); // discard the zombies + ArrayList snapshot = new ArrayList(timedLogBook); + return snapshot; + } + + /** + * getGlancedLogBook + * + *

return the list cointaing only the LogEvents within the Glance time interval specified by + * the parameter 'timeToLiveGraterThan'. + * + *

Note: When the event is inserted into the timedLogBook has a maximum delay and when the + * delay is negative the event is tagged as zombie + * + * @return + */ + public ArrayList getEventsGlanced(long timeToLiveGraterThan) { + + ArrayList eGlanced = new ArrayList(); + log.debug("time to live - glance: {}", timeToLiveGraterThan); + removeZombieEvents(); + for (LogEvent event : timedLogBook) { + log.debug("event: {}", event.getDelay(TimeUnit.MILLISECONDS)); + if ((event.getDelay(TimeUnit.MILLISECONDS)) < timeToLiveGraterThan) { + eGlanced.add(event); + } + } + log.debug("Nr. Events to analyze: {}", eGlanced.size()); + + return eGlanced; + } + + public PerformanceStatus getPerformanceStatus(long timeToLiveGraterThan) { + PerformanceStatus pStatus = new PerformanceStatus(getEventsGlanced(timeToLiveGraterThan)); + return pStatus; + } + + public PerformanceStatus getPerformanceStatus() { + + return getPerformanceStatus(this.visibleToGlancerInMSec); + } } diff --git a/src/main/java/it/grid/storm/health/PerformanceEvent.java b/src/main/java/it/grid/storm/health/PerformanceEvent.java index b2f26af0..728db3b4 100644 --- a/src/main/java/it/grid/storm/health/PerformanceEvent.java +++ b/src/main/java/it/grid/storm/health/PerformanceEvent.java @@ -1,62 +1,56 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformanceEvent { - private OperationType opType = OperationType.UNDEF; - private int numberOfOperation = 0; - private long minExecutionDuration = Long.MAX_VALUE; - private long maxExecutionDuration = Long.MIN_VALUE; - private long meanExecutionDuration = 0; - private long totExecutionDuration = 0; - - public PerformanceEvent(OperationType op) { - - opType = op; - } - - public void addLogEvent(LogEvent logEvent) { - - if (logEvent.getOperationType().equals(this.opType)) { - this.numberOfOperation++; - long executionDuration = logEvent.getDuration(); - if (executionDuration < minExecutionDuration) { - this.minExecutionDuration = executionDuration; - } - if (executionDuration > maxExecutionDuration) { - this.maxExecutionDuration = executionDuration; - } - this.totExecutionDuration = totExecutionDuration + executionDuration; - this.meanExecutionDuration = totExecutionDuration / numberOfOperation; - } - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append(opType.toString()); - sb.append(" [ "); - sb.append("#" + this.numberOfOperation); - sb.append(", "); - sb.append(" min:" + this.minExecutionDuration); - sb.append(", "); - sb.append(" Max:" + this.maxExecutionDuration); - sb.append(", "); - sb.append(" Mean:" + this.meanExecutionDuration); - sb.append(", "); - sb.append(" TOT:" + this.totExecutionDuration); - sb.append(" ]"); - return sb.toString(); - } + private OperationType opType = OperationType.UNDEF; + private int numberOfOperation = 0; + private long minExecutionDuration = Long.MAX_VALUE; + private long maxExecutionDuration = Long.MIN_VALUE; + private long meanExecutionDuration = 0; + private long totExecutionDuration = 0; + + public PerformanceEvent(OperationType op) { + + opType = op; + } + + public void addLogEvent(LogEvent logEvent) { + + if (logEvent.getOperationType().equals(this.opType)) { + this.numberOfOperation++; + long executionDuration = logEvent.getDuration(); + if (executionDuration < minExecutionDuration) { + this.minExecutionDuration = executionDuration; + } + if (executionDuration > maxExecutionDuration) { + this.maxExecutionDuration = executionDuration; + } + this.totExecutionDuration = totExecutionDuration + executionDuration; + this.meanExecutionDuration = totExecutionDuration / numberOfOperation; + } + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append(opType.toString()); + sb.append(" [ "); + sb.append("#" + this.numberOfOperation); + sb.append(", "); + sb.append(" min:" + this.minExecutionDuration); + sb.append(", "); + sb.append(" Max:" + this.maxExecutionDuration); + sb.append(", "); + sb.append(" Mean:" + this.meanExecutionDuration); + sb.append(", "); + sb.append(" TOT:" + this.totExecutionDuration); + sb.append(" ]"); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/health/PerformanceGlance.java b/src/main/java/it/grid/storm/health/PerformanceGlance.java index aca3dc37..af4e3044 100644 --- a/src/main/java/it/grid/storm/health/PerformanceGlance.java +++ b/src/main/java/it/grid/storm/health/PerformanceGlance.java @@ -1,47 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.ArrayList; - import org.slf4j.Logger; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformanceGlance { - private static Logger PERFLOG = HealthDirector.getPerformanceLogger(); - - /** - * - * @return StoRMStatus - */ - public PerformanceStatus haveaLook() { + private static Logger PERFLOG = HealthDirector.getPerformanceLogger(); - HealthDirector.LOGGER.debug("Having a look.."); - PerformanceStatus performanceStatus = null; + /** @return StoRMStatus */ + public PerformanceStatus haveaLook() { - PerformanceBookKeeper pbk = HealthDirector.getHealthMonitor() - .getPerformanceBookKeeper(); + HealthDirector.LOGGER.debug("Having a look.."); + PerformanceStatus performanceStatus = null; - if (pbk != null) { - performanceStatus = pbk.getPerformanceStatus(); - ArrayList zombies = pbk.removeZombieEvents(); - HealthDirector.LOGGER - .debug("Removed # <{}> zombies.", zombies.size()); + PerformanceBookKeeper pbk = HealthDirector.getHealthMonitor().getPerformanceBookKeeper(); - HealthDirector.LOGGER.debug("have a look : {}", performanceStatus); - } + if (pbk != null) { + performanceStatus = pbk.getPerformanceStatus(); + ArrayList zombies = pbk.removeZombieEvents(); + HealthDirector.LOGGER.debug("Removed # <{}> zombies.", zombies.size()); - HealthDirector.LOGGER.debug(".. glance ended."); - return performanceStatus; - } + HealthDirector.LOGGER.debug("have a look : {}", performanceStatus); + } + HealthDirector.LOGGER.debug(".. glance ended."); + return performanceStatus; + } } diff --git a/src/main/java/it/grid/storm/health/PerformancePulse.java b/src/main/java/it/grid/storm/health/PerformancePulse.java index 91b23f40..45da003a 100644 --- a/src/main/java/it/grid/storm/health/PerformancePulse.java +++ b/src/main/java/it/grid/storm/health/PerformancePulse.java @@ -1,39 +1,31 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.TimerTask; - import org.slf4j.Logger; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformancePulse extends TimerTask { - private PerformanceGlance perfMonitor; - private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); - private long progressivNumber = 0L; - - protected PerformancePulse() { + private PerformanceGlance perfMonitor; + private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); + private long progressivNumber = 0L; - perfMonitor = new PerformanceGlance(); - } + protected PerformancePulse() { - @Override - public void run() { + perfMonitor = new PerformanceGlance(); + } - HealthDirector.LOGGER.debug("PERFORMANCE PULSE"); - progressivNumber++; - PerformanceStatus status = perfMonitor.haveaLook(); - status.setPulseNumber(progressivNumber); - PERF_LOG.info(status.toString()); - } + @Override + public void run() { + HealthDirector.LOGGER.debug("PERFORMANCE PULSE"); + progressivNumber++; + PerformanceStatus status = perfMonitor.haveaLook(); + status.setPulseNumber(progressivNumber); + PERF_LOG.info(status.toString()); + } } diff --git a/src/main/java/it/grid/storm/health/PerformanceStatus.java b/src/main/java/it/grid/storm/health/PerformanceStatus.java index cb5cda3c..3d8f228f 100644 --- a/src/main/java/it/grid/storm/health/PerformanceStatus.java +++ b/src/main/java/it/grid/storm/health/PerformanceStatus.java @@ -1,83 +1,71 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.ArrayList; import java.util.Hashtable; - import org.slf4j.Logger; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformanceStatus { - private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); - - private String pulseNumberStr = ""; - private Hashtable perfStatus = new Hashtable(); - private static int timeWindows = HealthDirector.getHealthMonitor().perfGlanceTimeInterval; + private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); - public PerformanceStatus(ArrayList eventToAnalyze) { + private String pulseNumberStr = ""; + private Hashtable perfStatus = + new Hashtable(); + private static int timeWindows = HealthDirector.getHealthMonitor().perfGlanceTimeInterval; - PERF_LOG.debug("PERFORMANCE STATUS"); - PerformanceEvent pEvent; - OperationType ot; - if (eventToAnalyze != null) { - PERF_LOG.debug("PERFORMANCE STATUS : {}", - eventToAnalyze.size()); - for (LogEvent event : eventToAnalyze) { - ot = event.getOperationType(); - if (perfStatus.containsKey(ot)) { - pEvent = perfStatus.get(event.getOperationType()); - } else { - pEvent = new PerformanceEvent(ot); - } - pEvent.addLogEvent(event); - perfStatus.put(ot, pEvent); - } - } else { - PERF_LOG.debug("NO EVENTS TO ANALYZE!!!"); - } - } + public PerformanceStatus(ArrayList eventToAnalyze) { - /** - * - * @param number - * long - */ - public void setPulseNumber(long number) { + PERF_LOG.debug("PERFORMANCE STATUS"); + PerformanceEvent pEvent; + OperationType ot; + if (eventToAnalyze != null) { + PERF_LOG.debug("PERFORMANCE STATUS : {}", eventToAnalyze.size()); + for (LogEvent event : eventToAnalyze) { + ot = event.getOperationType(); + if (perfStatus.containsKey(ot)) { + pEvent = perfStatus.get(event.getOperationType()); + } else { + pEvent = new PerformanceEvent(ot); + } + pEvent.addLogEvent(event); + perfStatus.put(ot, pEvent); + } + } else { + PERF_LOG.debug("NO EVENTS TO ANALYZE!!!"); + } + } - this.pulseNumberStr = number + ""; - String prefix = ""; - for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { - prefix += "."; - } - this.pulseNumberStr = prefix + this.pulseNumberStr; - } + /** @param number long */ + public void setPulseNumber(long number) { - @Override - public String toString() { + this.pulseNumberStr = number + ""; + String prefix = ""; + for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { + prefix += "."; + } + this.pulseNumberStr = prefix + this.pulseNumberStr; + } - StringBuilder result = new StringBuilder(); - result.append("#" + this.pulseNumberStr + ": "); - if (perfStatus.isEmpty()) { - result.append("No activity in the last " + timeWindows + " seconds"); - } else { - result.append("\n=== last " + timeWindows + " seconds ===\n"); - for (PerformanceEvent pEvent : perfStatus.values()) { - result.append(pEvent); - result.append("\n"); - } - } - // result.append("\n"); - return result.toString(); - } + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append("#" + this.pulseNumberStr + ": "); + if (perfStatus.isEmpty()) { + result.append("No activity in the last " + timeWindows + " seconds"); + } else { + result.append("\n=== last " + timeWindows + " seconds ===\n"); + for (PerformanceEvent pEvent : perfStatus.values()) { + result.append(pEvent); + result.append("\n"); + } + } + // result.append("\n"); + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/health/PerformanceTask.java b/src/main/java/it/grid/storm/health/PerformanceTask.java index 571d4a1d..c88c8426 100644 --- a/src/main/java/it/grid/storm/health/PerformanceTask.java +++ b/src/main/java/it/grid/storm/health/PerformanceTask.java @@ -1,44 +1,35 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health; import java.util.TimerTask; - import org.slf4j.Logger; -/** - * @author zappi - * - */ +/** @author zappi */ public class PerformanceTask extends TimerTask { - private PerformanceGlance detective; - private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); - private long progressivNumber = 0L; - - protected PerformanceTask() { - - detective = new PerformanceGlance(); - } + private PerformanceGlance detective; + private Logger PERF_LOG = HealthDirector.getPerformanceLogger(); + private long progressivNumber = 0L; - /* - * (non-Javadoc) - * - * @see java.util.TimerTask#run() - */ - @Override - public void run() { + protected PerformanceTask() { - progressivNumber++; - PerformanceStatus status = detective.haveaLook(); - status.setPulseNumber(progressivNumber); - PERF_LOG.info(status.toString()); + detective = new PerformanceGlance(); + } - } + /* + * (non-Javadoc) + * + * @see java.util.TimerTask#run() + */ + @Override + public void run() { + progressivNumber++; + PerformanceStatus status = detective.haveaLook(); + status.setPulseNumber(progressivNumber); + PERF_LOG.info(status.toString()); + } } diff --git a/src/main/java/it/grid/storm/health/SimpleBookKeeper.java b/src/main/java/it/grid/storm/health/SimpleBookKeeper.java index 6f56a344..7892f8bb 100644 --- a/src/main/java/it/grid/storm/health/SimpleBookKeeper.java +++ b/src/main/java/it/grid/storm/health/SimpleBookKeeper.java @@ -1,121 +1,108 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; public class SimpleBookKeeper extends BookKeeper { - public static final String KEY = "BK"; - - public SimpleBookKeeper() { - - super(); - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.health.BookKeeper#addLogEvent(it.grid.storm.health.LogEvent) - */ - @Override - public synchronized void addLogEvent(LogEvent logEvent) { - - logbook.add(logEvent); - logDebug("Event is added to Log Book (item #" + (logbook.size() - 1) + ""); - logInfo(logEvent.toString()); - } - - /** - * - * @return int - */ - public synchronized int getNumberOfAsynchRequest() { - - int result = 0; - for (int i = 0; i < logbook.size(); i++) { - if (!(logbook.get(i).getOperationType().isSynchronousOperation())) { - result++; - } - } - return result; - } - - /** - * - * @return int - */ - public synchronized int getNumberOfSynchRequest() { - - int result = 0; - for (int i = 0; i < logbook.size(); i++) { - if (logbook.get(i).getOperationType().isSynchronousOperation()) { - result++; - } - } - return result; - } - - /** - * - * @param opType - * OperationType - * @return int - */ - public synchronized int getNumberOfRequest(OperationType opType) { - - int result = 0; - for (int i = 0; i < logbook.size(); i++) { - if (logbook.get(i).getOperationType().equals(opType)) { - result++; - } - } - return result; - } - - /** - * - * @param opType - * OperationType - * @return long - */ - public synchronized long getMeansDuration(OperationType opType) { - - long meanTime = 0L; - long sumTime = 0L; - int requestNumber = getNumberOfRequest(opType); - if (requestNumber > 0) { - for (int i = 0; i < logbook.size(); i++) { - if (logbook.get(i).getOperationType().equals(opType)) { - sumTime += logbook.get(i).getDuration(); - } - } - meanTime = sumTime / requestNumber; - } - return meanTime; - } - - /** - * - * @param opType - * OperationType - * @return int - */ - public synchronized int getNumberOfSuccess(OperationType opType) { - - int result = 0; - int requestNumber = getNumberOfRequest(opType); - if (requestNumber > 0) { - for (int i = 0; i < logbook.size(); i++) { - LogEvent logE = logbook.get(i); - if (logE.getOperationType().equals(opType)) { - if (logE.isSuccess()) { - result++; - } - } - } - } - return result; - } + public static final String KEY = "BK"; + + public SimpleBookKeeper() { + + super(); + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.health.BookKeeper#addLogEvent(it.grid.storm.health.LogEvent) + */ + @Override + public synchronized void addLogEvent(LogEvent logEvent) { + + logbook.add(logEvent); + logDebug("Event is added to Log Book (item #" + (logbook.size() - 1) + ""); + logInfo(logEvent.toString()); + } + + /** @return int */ + public synchronized int getNumberOfAsynchRequest() { + + int result = 0; + for (int i = 0; i < logbook.size(); i++) { + if (!(logbook.get(i).getOperationType().isSynchronousOperation())) { + result++; + } + } + return result; + } + + /** @return int */ + public synchronized int getNumberOfSynchRequest() { + + int result = 0; + for (int i = 0; i < logbook.size(); i++) { + if (logbook.get(i).getOperationType().isSynchronousOperation()) { + result++; + } + } + return result; + } + + /** + * @param opType OperationType + * @return int + */ + public synchronized int getNumberOfRequest(OperationType opType) { + + int result = 0; + for (int i = 0; i < logbook.size(); i++) { + if (logbook.get(i).getOperationType().equals(opType)) { + result++; + } + } + return result; + } + + /** + * @param opType OperationType + * @return long + */ + public synchronized long getMeansDuration(OperationType opType) { + + long meanTime = 0L; + long sumTime = 0L; + int requestNumber = getNumberOfRequest(opType); + if (requestNumber > 0) { + for (int i = 0; i < logbook.size(); i++) { + if (logbook.get(i).getOperationType().equals(opType)) { + sumTime += logbook.get(i).getDuration(); + } + } + meanTime = sumTime / requestNumber; + } + return meanTime; + } + + /** + * @param opType OperationType + * @return int + */ + public synchronized int getNumberOfSuccess(OperationType opType) { + + int result = 0; + int requestNumber = getNumberOfRequest(opType); + if (requestNumber > 0) { + for (int i = 0; i < logbook.size(); i++) { + LogEvent logE = logbook.get(i); + if (logE.getOperationType().equals(opType)) { + if (logE.isSuccess()) { + result++; + } + } + } + } + return result; + } } diff --git a/src/main/java/it/grid/storm/health/StoRMStatus.java b/src/main/java/it/grid/storm/health/StoRMStatus.java index 323fbb67..6bdca2aa 100644 --- a/src/main/java/it/grid/storm/health/StoRMStatus.java +++ b/src/main/java/it/grid/storm/health/StoRMStatus.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.health; @@ -9,203 +8,149 @@ public class StoRMStatus { - private long heapSize = -1L; - private long heapMaxSize = -1L; - private long heapFreeSize = -1L; - private String pulseNumberStr = ""; - - private int ptgRequests = 0; - private int ptgSuccess = 0; - private long ptgMeansTime = -1L; - - private int ptpRequests = 0; - private int ptpSuccess = 0; - private long ptpMeansTime = -1L; - - private long lifetime = -1L; - private String lifetimeStr = ""; - - private long totPtGRequest = 0L; - private long totPtPRequest = 0L; - - private int synchRequest = 0; - - public StoRMStatus() { - - } - - /** - * - * @param heapSize - * long - */ - public void setHeapSize(long heapSize) { - - this.heapSize = heapSize; - } - - /** - * - * @param maxHeapSize - * long - */ - public void setMAXHeapSize(long maxHeapSize) { - - this.heapMaxSize = maxHeapSize; - } - - /** - * - * @param heapFreeSize - * long - */ - public void setHeapFreeSize(long heapFreeSize) { - - this.heapFreeSize = heapFreeSize; - } - - /** - * - * @return int - */ - public int getHeapFreePercentile() { - - int result = 100; - if (this.heapMaxSize > 0) { - double average = this.heapFreeSize / this.heapMaxSize * 100; - result = (int) average; - } - return result; - } - - /** - * - * @param number - * long - */ - public void setPulseNumber(long number) { - - this.pulseNumberStr = number + ""; - String prefix = ""; - for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { - prefix += "."; - } - this.pulseNumberStr = prefix + this.pulseNumberStr; - } - - /** - * - * @param synchRequest - * int - */ - public void setSynchRequest(int synchRequest) { - - this.synchRequest = synchRequest; - } - - /** - * - * @param ptgNumber - * int - */ - public void setPtGNumberRequests(int ptgNumber) { - - this.ptgRequests = ptgNumber; - } - - /** - * - * @param ptpSuccess - * int - */ - public void setPtGSuccessRequests(int ptgSuccess) { - - this.ptgSuccess = ptgSuccess; - } - - public void setTotalPtGRequest(long totPtG) { - - this.totPtGRequest = totPtG; - } - - public void setTotalPtPRequest(long totPtP) { - - this.totPtPRequest = totPtP; - } - - /** - * - * @param meanTime - * long - */ - public void setPtGMeanDuration(long meanTime) { - - this.ptgMeansTime = meanTime; - } - - /** - * - * @param ptpNumber - * int - */ - public void setPtPNumberRequests(int ptpNumber) { - - this.ptpRequests = ptpNumber; - } - - /** - * - * @param ptpSuccess - * int - */ - public void setPtPSuccessRequests(int ptpSuccess) { - - this.ptpSuccess = ptpSuccess; - } - - /** - * - * @param meanTime - * long - */ - public void setPtPMeanDuration(long meanTime) { - - this.ptpMeansTime = meanTime; - } - - public void calculateLifeTime() { - - long bornTime = HealthDirector.getBornInstant(); - long now = System.currentTimeMillis(); - this.lifetime = now - bornTime; - - Date date = new Date(this.lifetime); - SimpleDateFormat formatter = new SimpleDateFormat("mm.ss"); - String minsec = formatter.format(date); - long hours = this.lifetime / 3600000; - this.lifetimeStr = hours + ":" + minsec; - } - - /** - * - * @return String - */ - public String toString() { - - StringBuilder result = new StringBuilder(); - result.append(" [#" + this.pulseNumberStr + " lifetime=" + this.lifetimeStr - + "]"); - result.append(" Heap Free:" + this.heapFreeSize); - result.append(" SYNCH [" + this.synchRequest + "]"); - result.append(" ASynch [PTG:" + this.totPtGRequest); - result.append(" PTP:" + this.totPtPRequest + "]"); - result.append(" Last:( [#PTG=" + this.ptgRequests); - result.append(" OK=" + this.ptgSuccess); - result.append(" M.Dur.=" + this.ptgMeansTime + "]"); - result.append(" [#PTP=" + this.ptpRequests); - result.append(" OK=" + this.ptpSuccess); - result.append(" M.Dur.=" + this.ptpMeansTime + "] )"); - return result.toString(); - } + private long heapSize = -1L; + private long heapMaxSize = -1L; + private long heapFreeSize = -1L; + private String pulseNumberStr = ""; + private int ptgRequests = 0; + private int ptgSuccess = 0; + private long ptgMeansTime = -1L; + + private int ptpRequests = 0; + private int ptpSuccess = 0; + private long ptpMeansTime = -1L; + + private long lifetime = -1L; + private String lifetimeStr = ""; + + private long totPtGRequest = 0L; + private long totPtPRequest = 0L; + + private int synchRequest = 0; + + public StoRMStatus() {} + + /** @param heapSize long */ + public void setHeapSize(long heapSize) { + + this.heapSize = heapSize; + } + + /** @param maxHeapSize long */ + public void setMAXHeapSize(long maxHeapSize) { + + this.heapMaxSize = maxHeapSize; + } + + /** @param heapFreeSize long */ + public void setHeapFreeSize(long heapFreeSize) { + + this.heapFreeSize = heapFreeSize; + } + + /** @return int */ + public int getHeapFreePercentile() { + + int result = 100; + if (this.heapMaxSize > 0) { + double average = this.heapFreeSize / this.heapMaxSize * 100; + result = (int) average; + } + return result; + } + + /** @param number long */ + public void setPulseNumber(long number) { + + this.pulseNumberStr = number + ""; + String prefix = ""; + for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { + prefix += "."; + } + this.pulseNumberStr = prefix + this.pulseNumberStr; + } + + /** @param synchRequest int */ + public void setSynchRequest(int synchRequest) { + + this.synchRequest = synchRequest; + } + + /** @param ptgNumber int */ + public void setPtGNumberRequests(int ptgNumber) { + + this.ptgRequests = ptgNumber; + } + + /** @param ptpSuccess int */ + public void setPtGSuccessRequests(int ptgSuccess) { + + this.ptgSuccess = ptgSuccess; + } + + public void setTotalPtGRequest(long totPtG) { + + this.totPtGRequest = totPtG; + } + + public void setTotalPtPRequest(long totPtP) { + + this.totPtPRequest = totPtP; + } + + /** @param meanTime long */ + public void setPtGMeanDuration(long meanTime) { + + this.ptgMeansTime = meanTime; + } + + /** @param ptpNumber int */ + public void setPtPNumberRequests(int ptpNumber) { + + this.ptpRequests = ptpNumber; + } + + /** @param ptpSuccess int */ + public void setPtPSuccessRequests(int ptpSuccess) { + + this.ptpSuccess = ptpSuccess; + } + + /** @param meanTime long */ + public void setPtPMeanDuration(long meanTime) { + + this.ptpMeansTime = meanTime; + } + + public void calculateLifeTime() { + + long bornTime = HealthDirector.getBornInstant(); + long now = System.currentTimeMillis(); + this.lifetime = now - bornTime; + + Date date = new Date(this.lifetime); + SimpleDateFormat formatter = new SimpleDateFormat("mm.ss"); + String minsec = formatter.format(date); + long hours = this.lifetime / 3600000; + this.lifetimeStr = hours + ":" + minsec; + } + + /** @return String */ + public String toString() { + + StringBuilder result = new StringBuilder(); + result.append(" [#" + this.pulseNumberStr + " lifetime=" + this.lifetimeStr + "]"); + result.append(" Heap Free:" + this.heapFreeSize); + result.append(" SYNCH [" + this.synchRequest + "]"); + result.append(" ASynch [PTG:" + this.totPtGRequest); + result.append(" PTP:" + this.totPtPRequest + "]"); + result.append(" Last:( [#PTG=" + this.ptgRequests); + result.append(" OK=" + this.ptgSuccess); + result.append(" M.Dur.=" + this.ptgMeansTime + "]"); + result.append(" [#PTP=" + this.ptpRequests); + result.append(" OK=" + this.ptpSuccess); + result.append(" M.Dur.=" + this.ptpMeansTime + "] )"); + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/health/external/FSMetadataStatus.java b/src/main/java/it/grid/storm/health/external/FSMetadataStatus.java index f2b39d1b..ad41c011 100644 --- a/src/main/java/it/grid/storm/health/external/FSMetadataStatus.java +++ b/src/main/java/it/grid/storm/health/external/FSMetadataStatus.java @@ -1,89 +1,71 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.health.external; import it.grid.storm.health.HealthDirector; - import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Enumeration; import java.util.Hashtable; -/** - * @author zappi - * - */ +/** @author zappi */ public class FSMetadataStatus { - private String pulseNumberStr = ""; - private long lifetime = -1L; - private String lifetimeStr = ""; - private final int benchmarkCount = -1; - private final Hashtable pathName = new Hashtable(); - - /** - * - */ - public FSMetadataStatus(ArrayList storageAreasName) { + private String pulseNumberStr = ""; + private long lifetime = -1L; + private String lifetimeStr = ""; + private final int benchmarkCount = -1; + private final Hashtable pathName = new Hashtable(); - super(); - pathName.put("Local", -1L); - for (Object element : storageAreasName) { - pathName.put((String) element, -1L); - } - } + /** */ + public FSMetadataStatus(ArrayList storageAreasName) { - /** - * - * @param number - * long - */ - public void setPulseNumber(long number) { + super(); + pathName.put("Local", -1L); + for (Object element : storageAreasName) { + pathName.put((String) element, -1L); + } + } - this.pulseNumberStr = number + ""; - String prefix = ""; - for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { - prefix += "."; - } - this.pulseNumberStr = prefix + this.pulseNumberStr; - } + /** @param number long */ + public void setPulseNumber(long number) { - public void calculateLifeTime() { + this.pulseNumberStr = number + ""; + String prefix = ""; + for (int i = 0; i < (6 - pulseNumberStr.length()); i++) { + prefix += "."; + } + this.pulseNumberStr = prefix + this.pulseNumberStr; + } - long bornTime = HealthDirector.getBornInstant(); - long now = System.currentTimeMillis(); - this.lifetime = now - bornTime; + public void calculateLifeTime() { - Date date = new Date(this.lifetime); - SimpleDateFormat formatter = new SimpleDateFormat("mm.ss"); - String minsec = formatter.format(date); - long hours = this.lifetime / 3600000; - this.lifetimeStr = hours + ":" + minsec; - } + long bornTime = HealthDirector.getBornInstant(); + long now = System.currentTimeMillis(); + this.lifetime = now - bornTime; - /** - * - * @return String - */ - @Override - public String toString() { + Date date = new Date(this.lifetime); + SimpleDateFormat formatter = new SimpleDateFormat("mm.ss"); + String minsec = formatter.format(date); + long hours = this.lifetime / 3600000; + this.lifetimeStr = hours + ":" + minsec; + } - StringBuilder result = new StringBuilder(); - result.append(" [#" + this.pulseNumberStr + " lifetime=" + this.lifetimeStr - + "]"); - Enumeration sas = pathName.keys(); - while (sas.hasMoreElements()) { - String sa = sas.nextElement(); - Long average = pathName.get(sa); - result.append("SA('" + sa + "')=" + average); - } - return result.toString(); - } + /** @return String */ + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append(" [#" + this.pulseNumberStr + " lifetime=" + this.lifetimeStr + "]"); + Enumeration sas = pathName.keys(); + while (sas.hasMoreElements()) { + String sa = sas.nextElement(); + Long average = pathName.get(sa); + result.append("SA('" + sa + "')=" + average); + } + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/info/InfoService.java b/src/main/java/it/grid/storm/info/InfoService.java index e40e04ba..df816ca8 100644 --- a/src/main/java/it/grid/storm/info/InfoService.java +++ b/src/main/java/it/grid/storm/info/InfoService.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info; public class InfoService { - public static String getResourcePackage() { - return "it.grid.storm.info.remote.resources"; - } - + public static String getResourcePackage() { + return "it.grid.storm.info.remote.resources"; + } } diff --git a/src/main/java/it/grid/storm/info/SAInfoException.java b/src/main/java/it/grid/storm/info/SAInfoException.java index e266d552..e316902c 100644 --- a/src/main/java/it/grid/storm/info/SAInfoException.java +++ b/src/main/java/it/grid/storm/info/SAInfoException.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info; public class SAInfoException extends Exception { - private static final long serialVersionUID = 1L; - - public SAInfoException(String message) { - super(message); - } + private static final long serialVersionUID = 1L; + public SAInfoException(String message) { + super(message); + } } diff --git a/src/main/java/it/grid/storm/info/SpaceInfoManager.java b/src/main/java/it/grid/storm/info/SpaceInfoManager.java index e23ad360..aa8be342 100644 --- a/src/main/java/it/grid/storm/info/SpaceInfoManager.java +++ b/src/main/java/it/grid/storm/info/SpaceInfoManager.java @@ -1,20 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info; import static it.grid.storm.config.Configuration.DISKUSAGE_SERVICE_ENABLED; -import java.io.FileNotFoundException; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.base.Preconditions; import com.google.common.collect.Lists; - import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.common.types.SizeUnit; import it.grid.storm.config.Configuration; @@ -28,6 +20,10 @@ import it.grid.storm.space.init.UsedSpaceFile.SaUsedSize; import it.grid.storm.srm.types.InvalidTSizeAttributesException; import it.grid.storm.srm.types.TSizeInBytes; +import java.io.FileNotFoundException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SpaceInfoManager { @@ -72,13 +68,14 @@ public void initializeUsedSpace() { } if (Configuration.getInstance().getDiskUsageServiceEnabled()) { - log.info("The remaining {} storage spaces will be initialized by DiskUsage service", - ssni.size()); + log.info( + "The remaining {} storage spaces will be initialized by DiskUsage service", ssni.size()); } else { log.warn( "The remaining {} storage spaces WON'T be initialized with DUs. " + "Please enable DiskUsage service by setting '{}' as true.", - ssni.size(), DISKUSAGE_SERVICE_ENABLED); + ssni.size(), + DISKUSAGE_SERVICE_ENABLED); } } @@ -89,7 +86,7 @@ public final int getQuotasDefined() { /** * @return a list of StorageSpaceData related to SA with quota enabled to be initialized. Can be - * empty. + * empty. */ public List retrieveSSDtoInitializeWithQuota() { @@ -144,8 +141,10 @@ private void updateUsedSpaceOnPersistence(SaUsedSize usedSize) { try { ssd.setUsedSpaceSize(TSizeInBytes.make(usedSize.getUsedSize(), SizeUnit.BYTES)); spaceCatalog.updateStorageSpace(ssd); - log.debug("StorageSpace table updated for SA: '{}' with used size = {}", - usedSize.getSaName(), usedSize.getUsedSize()); + log.debug( + "StorageSpace table updated for SA: '{}' with used size = {}", + usedSize.getSaName(), + usedSize.getUsedSize()); } catch (InvalidTSizeAttributesException | DataAccessException e) { failPersistence(usedSize.getSaName(), e.getMessage()); } diff --git a/src/main/java/it/grid/storm/info/du/DiskUsageExecCommand.java b/src/main/java/it/grid/storm/info/du/DiskUsageExecCommand.java index 375b1bba..b0de385d 100644 --- a/src/main/java/it/grid/storm/info/du/DiskUsageExecCommand.java +++ b/src/main/java/it/grid/storm/info/du/DiskUsageExecCommand.java @@ -1,23 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.du; +import com.google.common.collect.Lists; +import it.grid.storm.space.DUResult; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.time.Instant; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - -import it.grid.storm.space.DUResult; - public class DiskUsageExecCommand { private static final Logger LOG = LoggerFactory.getLogger(DiskUsageExecCommand.class); diff --git a/src/main/java/it/grid/storm/info/du/DiskUsageService.java b/src/main/java/it/grid/storm/info/du/DiskUsageService.java index 6bd51621..fdde6416 100644 --- a/src/main/java/it/grid/storm/info/du/DiskUsageService.java +++ b/src/main/java/it/grid/storm/info/du/DiskUsageService.java @@ -1,22 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.du; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import it.grid.storm.namespace.model.VirtualFS; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; - -import it.grid.storm.namespace.model.VirtualFS; - public class DiskUsageService { public static final int DEFAULT_INITIAL_DELAY = 0; @@ -32,8 +28,8 @@ public class DiskUsageService { private int delay; private int period; - private DiskUsageService(List vfss, ScheduledExecutorService executor, - int delay, int period) { + private DiskUsageService( + List vfss, ScheduledExecutorService executor, int delay, int period) { Preconditions.checkNotNull(vfss, "Invalid null list of Virtual FS"); Preconditions.checkNotNull(executor, "Invalid null scheduled executor service"); @@ -80,8 +76,7 @@ public static DiskUsageService getSingleThreadScheduledService() { return getSingleThreadScheduledService(Lists.newArrayList()); } - public static DiskUsageService getScheduledThreadPoolService(List vfss, - int poolSize) { + public static DiskUsageService getScheduledThreadPoolService(List vfss, int poolSize) { return new DiskUsageService(vfss, Executors.newScheduledThreadPool(poolSize)); } @@ -114,11 +109,12 @@ public synchronized int start() { } log.debug("Starting DiskUsageService ..."); - monitoredSAs.forEach(vfs -> { - DiskUsageTask task = new DiskUsageTask(vfs); - log.debug("Schedule task {} with delay {}s and period {}s", task, delay, period); - executor.scheduleAtFixedRate(task, delay, period, TimeUnit.SECONDS); - }); + monitoredSAs.forEach( + vfs -> { + DiskUsageTask task = new DiskUsageTask(vfs); + log.debug("Schedule task {} with delay {}s and period {}s", task, delay, period); + executor.scheduleAtFixedRate(task, delay, period, TimeUnit.SECONDS); + }); log.debug("Scheduled {} tasks", monitoredSAs.size()); running = true; return monitoredSAs.size(); diff --git a/src/main/java/it/grid/storm/info/du/DiskUsageTask.java b/src/main/java/it/grid/storm/info/du/DiskUsageTask.java index 3f55223d..f5809aed 100644 --- a/src/main/java/it/grid/storm/info/du/DiskUsageTask.java +++ b/src/main/java/it/grid/storm/info/du/DiskUsageTask.java @@ -1,17 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.du; -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.base.Preconditions; - import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.common.types.SizeUnit; import it.grid.storm.namespace.model.VirtualFS; @@ -20,6 +12,10 @@ import it.grid.storm.space.StorageSpaceData; import it.grid.storm.srm.types.InvalidTSizeAttributesException; import it.grid.storm.srm.types.TSizeInBytes; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class DiskUsageTask implements Runnable { @@ -47,8 +43,11 @@ public void run() { log.debug("du-result: {}", result); updateUsedSpaceOnPersistence(spaceToken, result); long seconds = TimeUnit.MILLISECONDS.toSeconds(result.getDurationInMillis()); - log.info("DiskUsageTask for {} successfully ended in {}s with used-size = {} bytes", - spaceToken, seconds, result.getSizeInBytes()); + log.info( + "DiskUsageTask for {} successfully ended in {}s with used-size = {} bytes", + spaceToken, + seconds, + result.getSizeInBytes()); } catch (IOException e) { @@ -72,7 +71,9 @@ private void updateUsedSpaceOnPersistence(String spaceToken, DUResult duResult) ssd.setUsedSpaceSize(TSizeInBytes.make(duResult.getSizeInBytes(), SizeUnit.BYTES)); spaceCatalog.updateStorageSpace(ssd); - log.debug("StorageSpace table updated for SA: '{}' with used size = {}", spaceToken, + log.debug( + "StorageSpace table updated for SA: '{}' with used size = {}", + spaceToken, duResult.getSizeInBytes()); } catch (InvalidTSizeAttributesException | DataAccessException e) { @@ -95,5 +96,4 @@ public String toString() { builder.append("]"); return builder.toString(); } - } diff --git a/src/main/java/it/grid/storm/info/du/DiskUsageUtils.java b/src/main/java/it/grid/storm/info/du/DiskUsageUtils.java index f4f39b1c..8cdc0638 100644 --- a/src/main/java/it/grid/storm/info/du/DiskUsageUtils.java +++ b/src/main/java/it/grid/storm/info/du/DiskUsageUtils.java @@ -1,15 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.du; -import java.time.Instant; -import java.util.List; - import com.google.common.base.Preconditions; - import it.grid.storm.space.DUResult; +import java.time.Instant; +import java.util.List; public class DiskUsageUtils { @@ -26,8 +23,8 @@ public static long parseSize(List output) { return Long.parseLong(sizeStr); } - public static DUResult getResult(String absPath, Instant start, Instant end, - List output) { + public static DUResult getResult( + String absPath, Instant start, Instant end, List output) { Preconditions.checkNotNull(output, "Null output received"); @@ -45,5 +42,4 @@ public static DUResult getResult(String absPath, Instant start, Instant end, return DUResult.failure(absPath, start, end, "NumberFormatException on parsing du output"); } } - } diff --git a/src/main/java/it/grid/storm/info/model/SATree.java b/src/main/java/it/grid/storm/info/model/SATree.java index 778b48c7..d8815818 100644 --- a/src/main/java/it/grid/storm/info/model/SATree.java +++ b/src/main/java/it/grid/storm/info/model/SATree.java @@ -1,9 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; -public class SATree { - -} +public class SATree {} diff --git a/src/main/java/it/grid/storm/info/model/SpaceStatusDetailed.java b/src/main/java/it/grid/storm/info/model/SpaceStatusDetailed.java index 05c99193..1ae23091 100644 --- a/src/main/java/it/grid/storm/info/model/SpaceStatusDetailed.java +++ b/src/main/java/it/grid/storm/info/model/SpaceStatusDetailed.java @@ -1,142 +1,104 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; public class SpaceStatusDetailed extends SpaceStatusSummary { - private long nrDir; - private long nrFiles; - private int levelMax; - private int levelMedian; - private long nrMaxFilesPerDir; - private long nrMedFilesPerDir; - private long oldestAccessedFile; - - public SpaceStatusDetailed(String saAlias, long spaceTotal) - throws IllegalArgumentException { - - super(saAlias, spaceTotal); - } - - /** - * @return the nrDir - */ - public long getNrDir() { - - return nrDir; - } - - /** - * @return the nrFiles - */ - public long getNrFiles() { - - return nrFiles; - } - - /** - * @return the levelMax - */ - public int getLevelMax() { - - return levelMax; - } - - /** - * @return the levelMedian - */ - public int getLevelMedian() { - - return levelMedian; - } - - /** - * @return the nrMaxFilesPerDir - */ - public long getNrMaxFilesPerDir() { - - return nrMaxFilesPerDir; - } - - /** - * @return the nrMedFilesPerDir - */ - public long getNrMedFilesPerDir() { - - return nrMedFilesPerDir; - } - - /** - * @return the oldestAccessedFile - */ - public long getOldestAccessedFile() { - - return oldestAccessedFile; - } - - /** - * @param nrDir - * the nrDir to set - */ - public void setNrDir(long nrDir) { - - this.nrDir = nrDir; - } - - /** - * @param nrFiles - * the nrFiles to set - */ - public void setNrFiles(long nrFiles) { - - this.nrFiles = nrFiles; - } - - /** - * @param levelMax - * the levelMax to set - */ - public void setLevelMax(int levelMax) { - - this.levelMax = levelMax; - } - - /** - * @param levelMedian - * the levelMedian to set - */ - public void setLevelMedian(int levelMedian) { - - this.levelMedian = levelMedian; - } - - /** - * @param nrMaxFilesPerDir - * the nrMaxFilesPerDir to set - */ - public void setNrMaxFilesPerDir(long nrMaxFilesPerDir) { - - this.nrMaxFilesPerDir = nrMaxFilesPerDir; - } - - /** - * @param nrMedFilesPerDir - * the nrMedFilesPerDir to set - */ - public void setNrMedFilesPerDir(long nrMedFilesPerDir) { - - this.nrMedFilesPerDir = nrMedFilesPerDir; - } - - /** - * @param oldestAccessedFile - * the oldestAccessedFile to set - */ - public void setOldestAccessedFile(long oldestAccessedFile) { - - this.oldestAccessedFile = oldestAccessedFile; - } + private long nrDir; + private long nrFiles; + private int levelMax; + private int levelMedian; + private long nrMaxFilesPerDir; + private long nrMedFilesPerDir; + private long oldestAccessedFile; + public SpaceStatusDetailed(String saAlias, long spaceTotal) throws IllegalArgumentException { + + super(saAlias, spaceTotal); + } + + /** @return the nrDir */ + public long getNrDir() { + + return nrDir; + } + + /** @return the nrFiles */ + public long getNrFiles() { + + return nrFiles; + } + + /** @return the levelMax */ + public int getLevelMax() { + + return levelMax; + } + + /** @return the levelMedian */ + public int getLevelMedian() { + + return levelMedian; + } + + /** @return the nrMaxFilesPerDir */ + public long getNrMaxFilesPerDir() { + + return nrMaxFilesPerDir; + } + + /** @return the nrMedFilesPerDir */ + public long getNrMedFilesPerDir() { + + return nrMedFilesPerDir; + } + + /** @return the oldestAccessedFile */ + public long getOldestAccessedFile() { + + return oldestAccessedFile; + } + + /** @param nrDir the nrDir to set */ + public void setNrDir(long nrDir) { + + this.nrDir = nrDir; + } + + /** @param nrFiles the nrFiles to set */ + public void setNrFiles(long nrFiles) { + + this.nrFiles = nrFiles; + } + + /** @param levelMax the levelMax to set */ + public void setLevelMax(int levelMax) { + + this.levelMax = levelMax; + } + + /** @param levelMedian the levelMedian to set */ + public void setLevelMedian(int levelMedian) { + + this.levelMedian = levelMedian; + } + + /** @param nrMaxFilesPerDir the nrMaxFilesPerDir to set */ + public void setNrMaxFilesPerDir(long nrMaxFilesPerDir) { + + this.nrMaxFilesPerDir = nrMaxFilesPerDir; + } + + /** @param nrMedFilesPerDir the nrMedFilesPerDir to set */ + public void setNrMedFilesPerDir(long nrMedFilesPerDir) { + + this.nrMedFilesPerDir = nrMedFilesPerDir; + } + + /** @param oldestAccessedFile the oldestAccessedFile to set */ + public void setOldestAccessedFile(long oldestAccessedFile) { + + this.oldestAccessedFile = oldestAccessedFile; + } } diff --git a/src/main/java/it/grid/storm/info/model/SpaceStatusSummary.java b/src/main/java/it/grid/storm/info/model/SpaceStatusSummary.java index 9c73c85d..1913bc3f 100644 --- a/src/main/java/it/grid/storm/info/model/SpaceStatusSummary.java +++ b/src/main/java/it/grid/storm/info/model/SpaceStatusSummary.java @@ -1,17 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.space.StorageSpaceData; - import java.io.IOException; import java.io.StringWriter; - import javax.xml.stream.XMLStreamException; - import org.codehaus.jettison.AbstractXMLStreamWriter; import org.codehaus.jettison.mapped.Configuration; import org.codehaus.jettison.mapped.MappedNamespaceConvention; @@ -21,287 +17,269 @@ public class SpaceStatusSummary { - protected final String saAlias; - /** defined in config/db (static value) **/ - protected final long totalSpace; - /** defined in config/db (static value) **/ - // published by DIP - - protected long usedSpace = -1; - /** info retrieved by sensors **/ - // published by DIP - protected long unavailableSpace = -1; - /** info retrieved by sensors **/ - protected long reservedSpace = -1; - /** info retrieved from DB **/ - // published by DIP SETTED TO ZERO BECAUSE CURRENTLY RETURN FAKE VALUES - // For now do not consider the reserved space, a better management is needed - - private static final ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); - - private static final Logger log = LoggerFactory - .getLogger(SpaceStatusSummary.class); - - /***************************** - * Constructors - */ - - /** - * @param saAlias - * @param totalSpace - * @throws IllegalArgumentException - */ - public SpaceStatusSummary(String saAlias, long totalSpace) - throws IllegalArgumentException { - - if (totalSpace < 0 || saAlias == null) { - log - .error("Unable to create SpaceStatusSummary. Received illegal parameter: saAlias: " - + saAlias + " totalSpace: " + totalSpace); - throw new IllegalArgumentException( - "Unable to create SpaceStatusSummary. Received illegal parameter"); - } - this.saAlias = saAlias; - this.totalSpace = totalSpace; - } - - private SpaceStatusSummary(String saAlias, long usedSpace, - long unavailableSpace, long reservedSpace, long totalSpace) { - - this.saAlias = saAlias; - this.usedSpace = usedSpace; - this.unavailableSpace = unavailableSpace; - this.reservedSpace = reservedSpace; - this.totalSpace = totalSpace; - } - - /** - * Produce a SpaceStatusSummary with fields matching exactly the ones - * available on the database - * - * @param saAlias - * @return - * @throws IllegalArgumentException - */ - public static SpaceStatusSummary createFromDB(String saAlias) - throws IllegalArgumentException { - - StorageSpaceData storageSpaceData = catalog.getStorageSpaceByAlias(saAlias); - if (storageSpaceData == null) { - throw new IllegalArgumentException( - "Unable to find a storage space row for alias \'" + saAlias - + "\' from storm Database"); - } else { - if (!storageSpaceData.isInitialized()) { - log - .warn("Building the SpaceStatusSummary from non initialized space with alias \'" - + saAlias + "\'"); - } - SpaceStatusSummary summary = new SpaceStatusSummary(saAlias, - storageSpaceData.getUsedSpaceSize().value(), storageSpaceData - .getUnavailableSpaceSize().value(), storageSpaceData - .getReservedSpaceSize().value(), storageSpaceData.getTotalSpaceSize() - .value()); - return summary; - } - } - - /***************************** - * GETTER methods - ****************************/ - - /** - * @return the saAlias - */ - public String getSaAlias() { - - return saAlias; - } - - /** - * busySpace = used + unavailable + reserved - * - * @return the busySpace - */ - public long getBusySpace() { - - return this.usedSpace + this.reservedSpace + this.unavailableSpace; - } - - /** - * availableSpace = totalSpace - busySpace - * - * @return - */ - public long getAvailableSpace() { - - return this.totalSpace - this.getBusySpace(); - } - - /** - * @return the usedSpace - */ - public long getUsedSpace() { - - return usedSpace; - } - - /** - * @return the unavailableSpace - */ - public long getUnavailableSpace() { - - return unavailableSpace; - } - - /** - * @return the reservedSpace - */ - public long getReservedSpace() { - - return reservedSpace; - } - - /** - * @return the totalSpace - */ - public long getTotalSpace() { - - return totalSpace; - } - - /** - * Real One freeSpace = totalSpace - used - reserved For now... freeSpace = - * totalSpace - used - * - * @return the freeSpace - */ - public long getFreeSpace() { - - if (this.totalSpace >= 0) { - // For now do not consider the reserved space, a better management is - // needed - // this.freeSpace = this.totalSpace - this.usedSpace - this.reservedSpace; - return this.totalSpace - this.usedSpace; - } else { - return -1; - } - } - - /***************************** - * SETTER methods - ****************************/ - - /** - * @param usedSpace - * the usedSpace to set - */ - public void setUsedSpace(long usedSpace) { - - this.usedSpace = usedSpace; - } - - /** - * @param unavailableSpace - * the unavailableSpace to set - */ - public void setUnavailableSpace(long unavailableSpace) { - - this.unavailableSpace = unavailableSpace; - } - - /** - * @param reservedSpace - * the reservedSpace to set - */ - public void setReservedSpace(long reservedSpace) { - - this.reservedSpace = reservedSpace; - } - - /******************************* - * JSON Building - */ - - /** - * String saAlias; long busySpace; // busySpace = used + unavailable + - * reserved long usedSpace; //info retrieved by sensors long unavailableSpace; - * // info retrieved by sensors long reservedSpace; // info retrieved from DB - * long totalSpace; // defined in config/db (static value) long freeSpace; // - * freeSpace = totalSpace - used - reserved; - */ - public String getJsonFormat() { - - String result = ""; - StringWriter strWriter = new StringWriter(); - Configuration config = new Configuration(); - MappedNamespaceConvention con = new MappedNamespaceConvention(config); - - try { - AbstractXMLStreamWriter w = new MappedXMLStreamWriter(con, strWriter); - w.writeStartDocument(); - // start main element - w.writeStartElement("sa-status"); - // Alias - w.writeStartElement("alias"); - w.writeCharacters(this.getSaAlias()); - w.writeEndElement(); - // busy space - w.writeStartElement("busy-space"); - w.writeCharacters("" + this.getBusySpace()); - w.writeEndElement(); - // used space - w.writeStartElement("used-space"); - w.writeCharacters("" + this.getUsedSpace()); - w.writeEndElement(); - // unavailable space - w.writeStartElement("unavailable-space"); - w.writeCharacters("" + this.getUnavailableSpace()); - w.writeEndElement(); - // reserved space - w.writeStartElement("reserved-space"); - w.writeCharacters("" + this.getReservedSpace()); - w.writeEndElement(); - // total space - w.writeStartElement("total-space"); - w.writeCharacters("" + this.getTotalSpace()); - w.writeEndElement(); - // free space - w.writeStartElement("free-space"); - w.writeCharacters("" + this.getFreeSpace()); - w.writeEndElement(); - // available space - w.writeStartElement("available-space"); - w.writeCharacters("" + this.getAvailableSpace()); - w.writeEndElement(); - // end main element - w.writeEndElement(); - w.writeEndDocument(); - w.close(); - } catch (XMLStreamException e) { - log - .error("Unable to produce Json representation of the object. XMLStreamException: " - + e.getMessage()); - } - try { - strWriter.close(); - } catch (IOException e) { - log - .error("Unable to close the StringWriter for Json representation of the object. IOException: " - + e.getMessage()); - } - result = strWriter.toString(); - return result; - } - - @Override - public String toString() { - - return "SpaceStatusSummary [getSaAlias()=" + getSaAlias() - + ", getBusySpace()=" + getBusySpace() + ", getAvailableSpace()=" - + getAvailableSpace() + ", getUsedSpace()=" + getUsedSpace() - + ", getUnavailableSpace()=" + getUnavailableSpace() - + ", getReservedSpace()=" + getReservedSpace() + ", getTotalSpace()=" - + getTotalSpace() + ", getFreeSpace()=" + getFreeSpace() + "]"; - } + protected final String saAlias; + /** defined in config/db (static value) * */ + protected final long totalSpace; + /** defined in config/db (static value) * */ + // published by DIP + + protected long usedSpace = -1; + /** info retrieved by sensors * */ + // published by DIP + protected long unavailableSpace = -1; + /** info retrieved by sensors * */ + protected long reservedSpace = -1; + /** info retrieved from DB * */ + // published by DIP SETTED TO ZERO BECAUSE CURRENTLY RETURN FAKE VALUES + // For now do not consider the reserved space, a better management is needed + + private static final ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); + + private static final Logger log = LoggerFactory.getLogger(SpaceStatusSummary.class); + + /** *************************** Constructors */ + + /** + * @param saAlias + * @param totalSpace + * @throws IllegalArgumentException + */ + public SpaceStatusSummary(String saAlias, long totalSpace) throws IllegalArgumentException { + + if (totalSpace < 0 || saAlias == null) { + log.error( + "Unable to create SpaceStatusSummary. Received illegal parameter: saAlias: " + + saAlias + + " totalSpace: " + + totalSpace); + throw new IllegalArgumentException( + "Unable to create SpaceStatusSummary. Received illegal parameter"); + } + this.saAlias = saAlias; + this.totalSpace = totalSpace; + } + + private SpaceStatusSummary( + String saAlias, long usedSpace, long unavailableSpace, long reservedSpace, long totalSpace) { + + this.saAlias = saAlias; + this.usedSpace = usedSpace; + this.unavailableSpace = unavailableSpace; + this.reservedSpace = reservedSpace; + this.totalSpace = totalSpace; + } + + /** + * Produce a SpaceStatusSummary with fields matching exactly the ones available on the database + * + * @param saAlias + * @return + * @throws IllegalArgumentException + */ + public static SpaceStatusSummary createFromDB(String saAlias) throws IllegalArgumentException { + + StorageSpaceData storageSpaceData = catalog.getStorageSpaceByAlias(saAlias); + if (storageSpaceData == null) { + throw new IllegalArgumentException( + "Unable to find a storage space row for alias \'" + saAlias + "\' from storm Database"); + } else { + if (!storageSpaceData.isInitialized()) { + log.warn( + "Building the SpaceStatusSummary from non initialized space with alias \'" + + saAlias + + "\'"); + } + SpaceStatusSummary summary = + new SpaceStatusSummary( + saAlias, + storageSpaceData.getUsedSpaceSize().value(), + storageSpaceData.getUnavailableSpaceSize().value(), + storageSpaceData.getReservedSpaceSize().value(), + storageSpaceData.getTotalSpaceSize().value()); + return summary; + } + } + + /** *************************** GETTER methods ************************** */ + + /** @return the saAlias */ + public String getSaAlias() { + + return saAlias; + } + + /** + * busySpace = used + unavailable + reserved + * + * @return the busySpace + */ + public long getBusySpace() { + + return this.usedSpace + this.reservedSpace + this.unavailableSpace; + } + + /** + * availableSpace = totalSpace - busySpace + * + * @return + */ + public long getAvailableSpace() { + + return this.totalSpace - this.getBusySpace(); + } + + /** @return the usedSpace */ + public long getUsedSpace() { + + return usedSpace; + } + + /** @return the unavailableSpace */ + public long getUnavailableSpace() { + + return unavailableSpace; + } + + /** @return the reservedSpace */ + public long getReservedSpace() { + + return reservedSpace; + } + + /** @return the totalSpace */ + public long getTotalSpace() { + + return totalSpace; + } + + /** + * Real One freeSpace = totalSpace - used - reserved For now... freeSpace = totalSpace - used + * + * @return the freeSpace + */ + public long getFreeSpace() { + + if (this.totalSpace >= 0) { + // For now do not consider the reserved space, a better management is + // needed + // this.freeSpace = this.totalSpace - this.usedSpace - this.reservedSpace; + return this.totalSpace - this.usedSpace; + } else { + return -1; + } + } + + /** *************************** SETTER methods ************************** */ + + /** @param usedSpace the usedSpace to set */ + public void setUsedSpace(long usedSpace) { + + this.usedSpace = usedSpace; + } + + /** @param unavailableSpace the unavailableSpace to set */ + public void setUnavailableSpace(long unavailableSpace) { + + this.unavailableSpace = unavailableSpace; + } + + /** @param reservedSpace the reservedSpace to set */ + public void setReservedSpace(long reservedSpace) { + + this.reservedSpace = reservedSpace; + } + + /** ***************************** JSON Building */ + + /** + * String saAlias; long busySpace; // busySpace = used + unavailable + reserved long usedSpace; + * //info retrieved by sensors long unavailableSpace; // info retrieved by sensors long + * reservedSpace; // info retrieved from DB long totalSpace; // defined in config/db (static + * value) long freeSpace; // freeSpace = totalSpace - used - reserved; + */ + public String getJsonFormat() { + + String result = ""; + StringWriter strWriter = new StringWriter(); + Configuration config = new Configuration(); + MappedNamespaceConvention con = new MappedNamespaceConvention(config); + + try { + AbstractXMLStreamWriter w = new MappedXMLStreamWriter(con, strWriter); + w.writeStartDocument(); + // start main element + w.writeStartElement("sa-status"); + // Alias + w.writeStartElement("alias"); + w.writeCharacters(this.getSaAlias()); + w.writeEndElement(); + // busy space + w.writeStartElement("busy-space"); + w.writeCharacters("" + this.getBusySpace()); + w.writeEndElement(); + // used space + w.writeStartElement("used-space"); + w.writeCharacters("" + this.getUsedSpace()); + w.writeEndElement(); + // unavailable space + w.writeStartElement("unavailable-space"); + w.writeCharacters("" + this.getUnavailableSpace()); + w.writeEndElement(); + // reserved space + w.writeStartElement("reserved-space"); + w.writeCharacters("" + this.getReservedSpace()); + w.writeEndElement(); + // total space + w.writeStartElement("total-space"); + w.writeCharacters("" + this.getTotalSpace()); + w.writeEndElement(); + // free space + w.writeStartElement("free-space"); + w.writeCharacters("" + this.getFreeSpace()); + w.writeEndElement(); + // available space + w.writeStartElement("available-space"); + w.writeCharacters("" + this.getAvailableSpace()); + w.writeEndElement(); + // end main element + w.writeEndElement(); + w.writeEndDocument(); + w.close(); + } catch (XMLStreamException e) { + log.error( + "Unable to produce Json representation of the object. XMLStreamException: " + + e.getMessage()); + } + try { + strWriter.close(); + } catch (IOException e) { + log.error( + "Unable to close the StringWriter for Json representation of the object. IOException: " + + e.getMessage()); + } + result = strWriter.toString(); + return result; + } + + @Override + public String toString() { + + return "SpaceStatusSummary [getSaAlias()=" + + getSaAlias() + + ", getBusySpace()=" + + getBusySpace() + + ", getAvailableSpace()=" + + getAvailableSpace() + + ", getUsedSpace()=" + + getUsedSpace() + + ", getUnavailableSpace()=" + + getUnavailableSpace() + + ", getReservedSpace()=" + + getReservedSpace() + + ", getTotalSpace()=" + + getTotalSpace() + + ", getFreeSpace()=" + + getFreeSpace() + + "]"; + } } diff --git a/src/main/java/it/grid/storm/info/model/TreeNode.java b/src/main/java/it/grid/storm/info/model/TreeNode.java index 7352b889..3f7c1e2e 100644 --- a/src/main/java/it/grid/storm/info/model/TreeNode.java +++ b/src/main/java/it/grid/storm/info/model/TreeNode.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; @@ -9,476 +8,405 @@ public class TreeNode { - // ******************* - // Parameters referring this node - private String path = null; - private int nrChildren = -1; - private long nrFiles = 0; - private long sumSize = 0; - private long minSize = 0; - private long maxSize = 0; - - // ******************** - // Time properties - // - private boolean toCompute = true; - // Time retrieved from FS - private long modificationTime = -1L; - // Time set when a check of values between FS-path and Node occurs - private long checkTime = -1L; - // Time set when the Service see a incoherence between FS-path and Node - private long toComputeTime = -1L; - // Time set when the Service performs a synch between FS and Node - private long nodeUpdateTime = -1L; - - // ******************* - // Parameters referring the tree starting from this node - // - // Sum of sizes of all files under the tree with this node as root. - private long totSize = 0; - // Sum of sizes of all files in term of BLOCK sizes under the tree with this - // node as root. - private long totApparentSize = 0; - - // ******************* - // Parameters to navigate the Tree, and summarizing the underlying tree - // - // Position - private int depthFromRoot = -1; - private TreeStructureSummary tsSummary; - private TreeNode father = null; - private TreeNode firstChild = null; - private TreeNode lastChild = null; - private TreeNode brother = null; - - // **************************** - // Constructors - // - - /** - * Simplest constructor - */ - public TreeNode(String path) { - - super(); - this.path = path; - } - - // **************************** - // Setter and Getter methods - // - - public final String getPath() { - - return path; - } - - public final int getDepthFromRoot() { - - return depthFromRoot; - } - - public final TreeStructureSummary getTsSummary() { - - return tsSummary; - } - - public final TreeNode getFather() { - - return father; - } - - public final void setFather(TreeNode father) { - - this.father = father; - } - - /** - * @param brother - * the brother to set - */ - public final void setBrother(TreeNode brother) { - - this.brother = brother; - } - - /** - * @return the brother - */ - public final TreeNode getBrother() { - - return brother; - } - - /** - * @param nrChildren - * the nrChildren to set - */ - public final void setNrChildren(int nrChildren) { - - this.nrChildren = nrChildren; - } - - /** - * @return the nrChildren - */ - public final int getNrChildren() { - - return nrChildren; - } - - /** - * @param nrFiles - * the nrFiles to set - */ - public final void setNrFiles(long nrFiles) { - - this.nrFiles = nrFiles; - } - - /** - * @return the nrFiles - */ - public final long getNrFiles() { - - return nrFiles; - } + // ******************* + // Parameters referring this node + private String path = null; + private int nrChildren = -1; + private long nrFiles = 0; + private long sumSize = 0; + private long minSize = 0; + private long maxSize = 0; - /** - * @param sumSize - * the sumSize to set - */ - public final void setSumSize(long sumSize) { + // ******************** + // Time properties + // + private boolean toCompute = true; + // Time retrieved from FS + private long modificationTime = -1L; + // Time set when a check of values between FS-path and Node occurs + private long checkTime = -1L; + // Time set when the Service see a incoherence between FS-path and Node + private long toComputeTime = -1L; + // Time set when the Service performs a synch between FS and Node + private long nodeUpdateTime = -1L; - this.sumSize = sumSize; - } + // ******************* + // Parameters referring the tree starting from this node + // + // Sum of sizes of all files under the tree with this node as root. + private long totSize = 0; + // Sum of sizes of all files in term of BLOCK sizes under the tree with this + // node as root. + private long totApparentSize = 0; - /** - * @return the sumSize - */ - public final long getSumSize() { + // ******************* + // Parameters to navigate the Tree, and summarizing the underlying tree + // + // Position + private int depthFromRoot = -1; + private TreeStructureSummary tsSummary; + private TreeNode father = null; + private TreeNode firstChild = null; + private TreeNode lastChild = null; + private TreeNode brother = null; - return sumSize; - } + // **************************** + // Constructors + // - /** - * @param minSize - * the minSize to set - */ - public final void setMinSize(long minSize) { + /** Simplest constructor */ + public TreeNode(String path) { - this.minSize = minSize; - } + super(); + this.path = path; + } - /** - * @return the minSize - */ - public final long getMinSize() { + // **************************** + // Setter and Getter methods + // - return minSize; - } + public final String getPath() { - /** - * @param maxSize - * the maxSize to set - */ - public final void setMaxSize(long maxSize) { - - this.maxSize = maxSize; - } - - /** - * @return the maxSize - */ - public final long getMaxSize() { - - return maxSize; - } - - /** - * @param totSize - * the totSize to set - */ - public final void setTotSize(long totSize) { - - this.totSize = totSize; - } - - /** - * @return the totSize - */ - public final long getTotSize() { - - return totSize; - } - - /** - * @param totApparentSize - * the totApparentSize to set - */ - public final void setTotApparentSize(long totApparentSize) { - - this.totApparentSize = totApparentSize; - } - - /** - * @return the totApparentSize - */ - public final long getTotApparentSize() { + return path; + } - return totApparentSize; - } + public final int getDepthFromRoot() { - /** - * @param toCompute - * the toCompute to set - */ - public final void setToCompute(boolean toCompute) { + return depthFromRoot; + } - this.toCompute = toCompute; - } + public final TreeStructureSummary getTsSummary() { - /** - * @return the toCompute - */ - public final boolean isToCompute() { + return tsSummary; + } - return toCompute; - } + public final TreeNode getFather() { - /** - * @param dateModification - * the dateModification to set - */ - public final void setModificationTime(long dateModification) { + return father; + } - this.modificationTime = dateModification; - } + public final void setFather(TreeNode father) { - /** - * @return the dateModification - */ - public final long getModificationTime() { + this.father = father; + } - return modificationTime; - } + /** @param brother the brother to set */ + public final void setBrother(TreeNode brother) { - /** - * @param checkTime - * the checkTime to set - */ - public void setCheckTime(long checkTime) { + this.brother = brother; + } - this.checkTime = checkTime; - } + /** @return the brother */ + public final TreeNode getBrother() { - /** - * @return the checkTime - */ - public long getCheckTime() { + return brother; + } - return checkTime; - } + /** @param nrChildren the nrChildren to set */ + public final void setNrChildren(int nrChildren) { - /** - * @param toComputeTime - * the toComputeTime to set - */ - public void setToComputeTime(long toComputeTime) { + this.nrChildren = nrChildren; + } - this.toComputeTime = toComputeTime; - } + /** @return the nrChildren */ + public final int getNrChildren() { - /** - * @return the toComputeTime - */ - public long getToComputeTime() { + return nrChildren; + } - return toComputeTime; - } + /** @param nrFiles the nrFiles to set */ + public final void setNrFiles(long nrFiles) { - /** - * @param nodeUpdateTime - * the nodeUpdateTime to set - */ - public void setNodeUpdateTime(long nodeUpdateTime) { + this.nrFiles = nrFiles; + } - this.nodeUpdateTime = nodeUpdateTime; - } + /** @return the nrFiles */ + public final long getNrFiles() { - /** - * @return the nodeUpdateTime - */ - public long getNodeUpdateTime() { + return nrFiles; + } - return nodeUpdateTime; - } + /** @param sumSize the sumSize to set */ + public final void setSumSize(long sumSize) { - public final TreeNode getFirstChild() { + this.sumSize = sumSize; + } - return firstChild; - } + /** @return the sumSize */ + public final long getSumSize() { - public final void setFirstChild(TreeNode firstChild) { + return sumSize; + } - this.firstChild = firstChild; - } - - public final TreeNode getLastChild() { - - return lastChild; - } - - public final void setLastChild(TreeNode lastChild) { - - this.lastChild = lastChild; - } - - public final void setDepthFromRoot(int depthFromRoot) { - - this.depthFromRoot = depthFromRoot; - } - - // **************************** - // Business Methods - // - - private void setToComputeUntilRoot() { - - TreeNode cursor = this.father; - while (cursor != null) { - if (!(cursor.toCompute)) { - cursor.toCompute = true; - cursor.setToComputeTime(System.currentTimeMillis()); - } - } - } - - /** - * Modification of path tree: ADDING a child node - * - * @param child - */ - public final void addChildNode(TreeNode child) { - - // Set the modification Time - - setModificationTime(System.currentTimeMillis()); - - if (firstChild == null) { - firstChild = child; - } - // If children exist, the brother of the last is the last node added - if (lastChild != null) { - lastChild.setBrother(child); - } - - // The new last child is the last node added - this.lastChild = child; - - // Set the father relationship - child.father = this; - - // Set the depth of the child - child.depthFromRoot = this.depthFromRoot + 1; - - // Increase the number of children - this.nrChildren++; - - boolean toSetComp = false; - - // Summary: Max branches check - if (this.nrChildren > this.tsSummary.getMaxBranches()) { - // new max - this.tsSummary.setMaxBranches(this.nrChildren); - toSetComp = true; - } - - // Summary: Max distance from leaves - if (this.tsSummary.getMaxDepthToLeaves() == 0) { - this.tsSummary.setMaxDepthToLeaves(1); - toSetComp = true; - } - - // Summary: Max files per node - if (this.tsSummary.getMaxFilePerNode() < child.getNrFiles()) { - this.tsSummary.setMaxFilePerNode(child.getNrFiles()); - toSetComp = true; - } - - if (toSetComp) { - setToComputeUntilRoot(); - } - } - - /** - * Building a list containing all the children - * - * @return - */ - public final List getChildren() { - - ArrayList children = new ArrayList(); - if (firstChild != null) { - children.add(firstChild); - TreeNode cursor = firstChild; - while (cursor.brother != null) { - cursor = cursor.brother; - children.add(cursor); - } - } - return children; - } - - /** - * Update node parameters from children info - * - */ - public final void compute() { - - List children = getChildren(); - if (!(children.isEmpty())) { - int maxDistanceLeaves = -1; - int maxBranches = -1; - long maxNodes = -1; - - // Search for maxes - for (TreeNode treeNode : children) { - TreeStructureSummary ts = treeNode.getTsSummary(); - if (ts.getMaxBranches() > maxBranches) { - maxBranches = ts.getMaxBranches(); - } - if (ts.getMaxDepthToLeaves() > maxDistanceLeaves) { - maxDistanceLeaves = ts.getMaxDepthToLeaves(); - } - if (ts.getMaxFilePerNode() > maxNodes) { - maxNodes = ts.getMaxFilePerNode(); - } - } - - boolean toSetComp = false; - - // Check and update - if (maxDistanceLeaves > this.tsSummary.getMaxDepthToLeaves()) { - this.tsSummary.setMaxDepthToLeaves(maxDistanceLeaves); - toSetComp = true; - } - if (maxBranches > this.tsSummary.getMaxBranches()) { - this.tsSummary.setMaxBranches(maxBranches); - toSetComp = true; - } - if (maxNodes > this.tsSummary.getMaxFilePerNode()) { - this.tsSummary.setMaxFilePerNode(maxNodes); - toSetComp = true; - } - - if (toSetComp) { - setToComputeUntilRoot(); - } - } - - this.toCompute = false; - setNodeUpdateTime(System.currentTimeMillis()); - } + /** @param minSize the minSize to set */ + public final void setMinSize(long minSize) { + this.minSize = minSize; + } + + /** @return the minSize */ + public final long getMinSize() { + + return minSize; + } + + /** @param maxSize the maxSize to set */ + public final void setMaxSize(long maxSize) { + + this.maxSize = maxSize; + } + + /** @return the maxSize */ + public final long getMaxSize() { + + return maxSize; + } + + /** @param totSize the totSize to set */ + public final void setTotSize(long totSize) { + + this.totSize = totSize; + } + + /** @return the totSize */ + public final long getTotSize() { + + return totSize; + } + + /** @param totApparentSize the totApparentSize to set */ + public final void setTotApparentSize(long totApparentSize) { + + this.totApparentSize = totApparentSize; + } + + /** @return the totApparentSize */ + public final long getTotApparentSize() { + + return totApparentSize; + } + + /** @param toCompute the toCompute to set */ + public final void setToCompute(boolean toCompute) { + + this.toCompute = toCompute; + } + + /** @return the toCompute */ + public final boolean isToCompute() { + + return toCompute; + } + + /** @param dateModification the dateModification to set */ + public final void setModificationTime(long dateModification) { + + this.modificationTime = dateModification; + } + + /** @return the dateModification */ + public final long getModificationTime() { + + return modificationTime; + } + + /** @param checkTime the checkTime to set */ + public void setCheckTime(long checkTime) { + + this.checkTime = checkTime; + } + + /** @return the checkTime */ + public long getCheckTime() { + + return checkTime; + } + + /** @param toComputeTime the toComputeTime to set */ + public void setToComputeTime(long toComputeTime) { + + this.toComputeTime = toComputeTime; + } + + /** @return the toComputeTime */ + public long getToComputeTime() { + + return toComputeTime; + } + + /** @param nodeUpdateTime the nodeUpdateTime to set */ + public void setNodeUpdateTime(long nodeUpdateTime) { + + this.nodeUpdateTime = nodeUpdateTime; + } + + /** @return the nodeUpdateTime */ + public long getNodeUpdateTime() { + + return nodeUpdateTime; + } + + public final TreeNode getFirstChild() { + + return firstChild; + } + + public final void setFirstChild(TreeNode firstChild) { + + this.firstChild = firstChild; + } + + public final TreeNode getLastChild() { + + return lastChild; + } + + public final void setLastChild(TreeNode lastChild) { + + this.lastChild = lastChild; + } + + public final void setDepthFromRoot(int depthFromRoot) { + + this.depthFromRoot = depthFromRoot; + } + + // **************************** + // Business Methods + // + + private void setToComputeUntilRoot() { + + TreeNode cursor = this.father; + while (cursor != null) { + if (!(cursor.toCompute)) { + cursor.toCompute = true; + cursor.setToComputeTime(System.currentTimeMillis()); + } + } + } + + /** + * Modification of path tree: ADDING a child node + * + * @param child + */ + public final void addChildNode(TreeNode child) { + + // Set the modification Time + + setModificationTime(System.currentTimeMillis()); + + if (firstChild == null) { + firstChild = child; + } + // If children exist, the brother of the last is the last node added + if (lastChild != null) { + lastChild.setBrother(child); + } + + // The new last child is the last node added + this.lastChild = child; + + // Set the father relationship + child.father = this; + + // Set the depth of the child + child.depthFromRoot = this.depthFromRoot + 1; + + // Increase the number of children + this.nrChildren++; + + boolean toSetComp = false; + + // Summary: Max branches check + if (this.nrChildren > this.tsSummary.getMaxBranches()) { + // new max + this.tsSummary.setMaxBranches(this.nrChildren); + toSetComp = true; + } + + // Summary: Max distance from leaves + if (this.tsSummary.getMaxDepthToLeaves() == 0) { + this.tsSummary.setMaxDepthToLeaves(1); + toSetComp = true; + } + + // Summary: Max files per node + if (this.tsSummary.getMaxFilePerNode() < child.getNrFiles()) { + this.tsSummary.setMaxFilePerNode(child.getNrFiles()); + toSetComp = true; + } + + if (toSetComp) { + setToComputeUntilRoot(); + } + } + + /** + * Building a list containing all the children + * + * @return + */ + public final List getChildren() { + + ArrayList children = new ArrayList(); + if (firstChild != null) { + children.add(firstChild); + TreeNode cursor = firstChild; + while (cursor.brother != null) { + cursor = cursor.brother; + children.add(cursor); + } + } + return children; + } + + /** Update node parameters from children info */ + public final void compute() { + + List children = getChildren(); + if (!(children.isEmpty())) { + int maxDistanceLeaves = -1; + int maxBranches = -1; + long maxNodes = -1; + + // Search for maxes + for (TreeNode treeNode : children) { + TreeStructureSummary ts = treeNode.getTsSummary(); + if (ts.getMaxBranches() > maxBranches) { + maxBranches = ts.getMaxBranches(); + } + if (ts.getMaxDepthToLeaves() > maxDistanceLeaves) { + maxDistanceLeaves = ts.getMaxDepthToLeaves(); + } + if (ts.getMaxFilePerNode() > maxNodes) { + maxNodes = ts.getMaxFilePerNode(); + } + } + + boolean toSetComp = false; + + // Check and update + if (maxDistanceLeaves > this.tsSummary.getMaxDepthToLeaves()) { + this.tsSummary.setMaxDepthToLeaves(maxDistanceLeaves); + toSetComp = true; + } + if (maxBranches > this.tsSummary.getMaxBranches()) { + this.tsSummary.setMaxBranches(maxBranches); + toSetComp = true; + } + if (maxNodes > this.tsSummary.getMaxFilePerNode()) { + this.tsSummary.setMaxFilePerNode(maxNodes); + toSetComp = true; + } + + if (toSetComp) { + setToComputeUntilRoot(); + } + } + + this.toCompute = false; + setNodeUpdateTime(System.currentTimeMillis()); + } } diff --git a/src/main/java/it/grid/storm/info/model/TreeStructureSummary.java b/src/main/java/it/grid/storm/info/model/TreeStructureSummary.java index 3c4597bc..f24a7e20 100644 --- a/src/main/java/it/grid/storm/info/model/TreeStructureSummary.java +++ b/src/main/java/it/grid/storm/info/model/TreeStructureSummary.java @@ -1,86 +1,93 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; public class TreeStructureSummary { - private String path; - private long nodes; - private long files; - private int maxDepthToLeaves; - private int maxBranches; - private long maxFilePerNode; + private String path; + private long nodes; + private long files; + private int maxDepthToLeaves; + private int maxBranches; + private long maxFilePerNode; - public TreeStructureSummary(String path) { + public TreeStructureSummary(String path) { - super(); - this.path = path; - } + super(); + this.path = path; + } - public final String getPath() { + public final String getPath() { - return path; - } + return path; + } - public final long getNodes() { + public final long getNodes() { - return nodes; - } + return nodes; + } - public final void setNodes(long nodes) { + public final void setNodes(long nodes) { - this.nodes = nodes; - } + this.nodes = nodes; + } - public final long getFiles() { + public final long getFiles() { - return files; - } + return files; + } - public final void setFiles(long files) { + public final void setFiles(long files) { - this.files = files; - } + this.files = files; + } - public final int getMaxDepthToLeaves() { + public final int getMaxDepthToLeaves() { - return maxDepthToLeaves; - } + return maxDepthToLeaves; + } - public final void setMaxDepthToLeaves(int maxDepthToLeaves) { + public final void setMaxDepthToLeaves(int maxDepthToLeaves) { - this.maxDepthToLeaves = maxDepthToLeaves; - } + this.maxDepthToLeaves = maxDepthToLeaves; + } - public final int getMaxBranches() { + public final int getMaxBranches() { - return maxBranches; - } + return maxBranches; + } - public final void setMaxBranches(int maxBranches) { + public final void setMaxBranches(int maxBranches) { - this.maxBranches = maxBranches; - } + this.maxBranches = maxBranches; + } - public final long getMaxFilePerNode() { + public final long getMaxFilePerNode() { - return maxFilePerNode; - } + return maxFilePerNode; + } - public final void setMaxFilePerNode(long maxFilePerNode) { + public final void setMaxFilePerNode(long maxFilePerNode) { - this.maxFilePerNode = maxFilePerNode; - } + this.maxFilePerNode = maxFilePerNode; + } - @Override - public String toString() { - - return "TreeStructureSummary [nodes=" + nodes + ", files=" + files - + ", maxDepthToLeaves=" + maxDepthToLeaves + ", maxBranches=" - + maxBranches + ", maxFilePerNode=" + maxFilePerNode + ", path=" + path - + "]"; - } + @Override + public String toString() { + return "TreeStructureSummary [nodes=" + + nodes + + ", files=" + + files + + ", maxDepthToLeaves=" + + maxDepthToLeaves + + ", maxBranches=" + + maxBranches + + ", maxFilePerNode=" + + maxFilePerNode + + ", path=" + + path + + "]"; + } } diff --git a/src/main/java/it/grid/storm/info/model/VOInfoStatusData.java b/src/main/java/it/grid/storm/info/model/VOInfoStatusData.java index 2e544c46..f50c400e 100644 --- a/src/main/java/it/grid/storm/info/model/VOInfoStatusData.java +++ b/src/main/java/it/grid/storm/info/model/VOInfoStatusData.java @@ -1,53 +1,41 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.model; import it.grid.storm.srm.types.TSizeInBytes; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF + * * @author R.Zappi * @version 1.0 */ public class VOInfoStatusData { - private String voInfoLocalIdentifier = null; - private TSizeInBytes usedSpaceNearLine = TSizeInBytes.makeEmpty(); - private TSizeInBytes availableSpaceNearLine = TSizeInBytes.makeEmpty(); - private TSizeInBytes ReservedSpaceNearLine = TSizeInBytes.makeEmpty(); - private TSizeInBytes usedSpaceOnLine = TSizeInBytes.makeEmpty(); - private TSizeInBytes availableSpaceOnLine = TSizeInBytes.makeEmpty(); - private TSizeInBytes ReservedSpaceOnLine = TSizeInBytes.makeEmpty(); + private String voInfoLocalIdentifier = null; + private TSizeInBytes usedSpaceNearLine = TSizeInBytes.makeEmpty(); + private TSizeInBytes availableSpaceNearLine = TSizeInBytes.makeEmpty(); + private TSizeInBytes ReservedSpaceNearLine = TSizeInBytes.makeEmpty(); + private TSizeInBytes usedSpaceOnLine = TSizeInBytes.makeEmpty(); + private TSizeInBytes availableSpaceOnLine = TSizeInBytes.makeEmpty(); + private TSizeInBytes ReservedSpaceOnLine = TSizeInBytes.makeEmpty(); - public VOInfoStatusData() { + public VOInfoStatusData() {} - } + public void setVOInfoLocalID(String voInfoLocalID) { - public void setVOInfoLocalID(String voInfoLocalID) { + this.voInfoLocalIdentifier = voInfoLocalID; + } - this.voInfoLocalIdentifier = voInfoLocalID; - } - - public String getVOInfoLocalID() { - - return this.voInfoLocalIdentifier; - } + public String getVOInfoLocalID() { + return this.voInfoLocalIdentifier; + } } diff --git a/src/main/java/it/grid/storm/info/remote/Constants.java b/src/main/java/it/grid/storm/info/remote/Constants.java index 60fd5f81..4ac5f90e 100644 --- a/src/main/java/it/grid/storm/info/remote/Constants.java +++ b/src/main/java/it/grid/storm/info/remote/Constants.java @@ -1,47 +1,43 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.remote; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class Constants { - public static final String ENCODING_SCHEME = "UTF-8"; + public static final String ENCODING_SCHEME = "UTF-8"; - public static final String RESOURCE = "info/status"; + public static final String RESOURCE = "info/status"; - // public static final String VERSION = "1.0"; + // public static final String VERSION = "1.0"; - public static final String UPDATE_OPERATION = "update"; + public static final String UPDATE_OPERATION = "update"; - public static final String TOTAL_SPACE_KEY = "total"; + public static final String TOTAL_SPACE_KEY = "total"; - public static final String USED_SPACE_KEY = "used"; + public static final String USED_SPACE_KEY = "used"; - public static final String RESERVED_SPACE_KEY = "reserved"; + public static final String RESERVED_SPACE_KEY = "reserved"; - public static final String UNAVALILABLE_SPACE_KEY = "unavailable"; + public static final String UNAVALILABLE_SPACE_KEY = "unavailable"; - /* - * get: /RESOURCE/alias put: - * /RESOURCE/alias/UPDATE_OPERATION?TOTAL_SPACE_KEY=total - * &USED_SPACE_KEY=used&RESERVED_SPACE_KEY - * =reserved&UNAVALILABLE_SPACE_KEY=unavailable put: - * /RESOURCE/alias/UPDATE_OPERATION - * ?USED_SPACE_KEY=used&RESERVED_SPACE_KEY=reserved - * &UNAVALILABLE_SPACE_KEY=unavailable put: - * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY - * =used&RESERVED_SPACE_KEY=reserved put: - * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY - * =used&UNAVALILABLE_SPACE_KEY=unavailable put: - * /RESOURCE/alias/UPDATE_OPERATION - * ?RESERVED_SPACE_KEY=reserved&UNAVALILABLE_SPACE_KEY=unavailable put: - * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY=used put: - * /RESOURCE/alias/UPDATE_OPERATION?RESERVED_SPACE_KEY=reserved put: - * /RESOURCE/alias/UPDATE_OPERATION?UNAVALILABLE_SPACE_KEY=unavailable - */ + /* + * get: /RESOURCE/alias put: + * /RESOURCE/alias/UPDATE_OPERATION?TOTAL_SPACE_KEY=total + * &USED_SPACE_KEY=used&RESERVED_SPACE_KEY + * =reserved&UNAVALILABLE_SPACE_KEY=unavailable put: + * /RESOURCE/alias/UPDATE_OPERATION + * ?USED_SPACE_KEY=used&RESERVED_SPACE_KEY=reserved + * &UNAVALILABLE_SPACE_KEY=unavailable put: + * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY + * =used&RESERVED_SPACE_KEY=reserved put: + * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY + * =used&UNAVALILABLE_SPACE_KEY=unavailable put: + * /RESOURCE/alias/UPDATE_OPERATION + * ?RESERVED_SPACE_KEY=reserved&UNAVALILABLE_SPACE_KEY=unavailable put: + * /RESOURCE/alias/UPDATE_OPERATION?USED_SPACE_KEY=used put: + * /RESOURCE/alias/UPDATE_OPERATION?RESERVED_SPACE_KEY=reserved put: + * /RESOURCE/alias/UPDATE_OPERATION?UNAVALILABLE_SPACE_KEY=unavailable + */ } diff --git a/src/main/java/it/grid/storm/info/remote/resources/Ping.java b/src/main/java/it/grid/storm/info/remote/resources/Ping.java index dcac82d1..8757f55e 100644 --- a/src/main/java/it/grid/storm/info/remote/resources/Ping.java +++ b/src/main/java/it/grid/storm/info/remote/resources/Ping.java @@ -1,14 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.remote.resources; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import it.grid.storm.info.remote.Constants; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; - import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.Path; @@ -17,68 +16,69 @@ import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; -import it.grid.storm.info.remote.Constants; - @Path("/info/ping") public class Ping { - // The Java method will process HTTP GET requests - @GET - // The Java method will produce content identified by the MIME Media - // type "text/plain" - @Produces("text/plain") - public String getClichedMessage() { - - // Return some cliched textual content - return "Hello World"; - } + // The Java method will process HTTP GET requests + @GET + // The Java method will produce content identified by the MIME Media + // type "text/plain" + @Produces("text/plain") + public String getClichedMessage() { - @GET - // The Java method will produce content identified by the MIME Media - // type "text/plain" - @Produces("text/plain") - @Path("/queryMeGet") - public String getParameterizedMessage(@QueryParam("uno") String uno, - @QueryParam("due") String due) { + // Return some cliched textual content + return "Hello World"; + } - String unoDecoded, dueDecoded; - try { - unoDecoded = URLDecoder.decode(uno.trim(), Constants.ENCODING_SCHEME); - dueDecoded = URLDecoder.decode(due.trim(), Constants.ENCODING_SCHEME); - } catch (UnsupportedEncodingException e) { - System.err - .println("Unable to decode parameters. UnsupportedEncodingException : " - + e.getMessage()); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode parameters, unsupported encoding \'" + Constants.ENCODING_SCHEME - + "\'") - .build()); - } - return "Hello by GET my friend " + unoDecoded + " from " + dueDecoded; - } + @GET + // The Java method will produce content identified by the MIME Media + // type "text/plain" + @Produces("text/plain") + @Path("/queryMeGet") + public String getParameterizedMessage( + @QueryParam("uno") String uno, @QueryParam("due") String due) { - @PUT - // The Java method will produce content identified by the MIME Media - // type "text/plain" - @Produces("text/plain") - @Path("/queryMePut") - public String putParameterizedMessage(@QueryParam("uno") String uno, - @QueryParam("due") String due) { + String unoDecoded, dueDecoded; + try { + unoDecoded = URLDecoder.decode(uno.trim(), Constants.ENCODING_SCHEME); + dueDecoded = URLDecoder.decode(due.trim(), Constants.ENCODING_SCHEME); + } catch (UnsupportedEncodingException e) { + System.err.println( + "Unable to decode parameters. UnsupportedEncodingException : " + e.getMessage()); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode parameters, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); + } + return "Hello by GET my friend " + unoDecoded + " from " + dueDecoded; + } - String unoDecoded, dueDecoded; - try { - unoDecoded = URLDecoder.decode(uno.trim(), Constants.ENCODING_SCHEME); - dueDecoded = URLDecoder.decode(due.trim(), Constants.ENCODING_SCHEME); - } catch (UnsupportedEncodingException e) { - System.err - .println("Unable to decode parameters. UnsupportedEncodingException : " - + e.getMessage()); - throw new WebApplicationException(Response.status(BAD_REQUEST) - .entity("Unable to decode parameters, unsupported encoding \'" + Constants.ENCODING_SCHEME - + "\'") - .build()); - } - return "Hello by PUT my friend " + unoDecoded + " from " + dueDecoded; - } + @PUT + // The Java method will produce content identified by the MIME Media + // type "text/plain" + @Produces("text/plain") + @Path("/queryMePut") + public String putParameterizedMessage( + @QueryParam("uno") String uno, @QueryParam("due") String due) { + String unoDecoded, dueDecoded; + try { + unoDecoded = URLDecoder.decode(uno.trim(), Constants.ENCODING_SCHEME); + dueDecoded = URLDecoder.decode(due.trim(), Constants.ENCODING_SCHEME); + } catch (UnsupportedEncodingException e) { + System.err.println( + "Unable to decode parameters. UnsupportedEncodingException : " + e.getMessage()); + throw new WebApplicationException( + Response.status(BAD_REQUEST) + .entity( + "Unable to decode parameters, unsupported encoding \'" + + Constants.ENCODING_SCHEME + + "\'") + .build()); + } + return "Hello by PUT my friend " + unoDecoded + " from " + dueDecoded; + } } diff --git a/src/main/java/it/grid/storm/info/remote/resources/SpaceStatusResource.java b/src/main/java/it/grid/storm/info/remote/resources/SpaceStatusResource.java index dd6c840f..68735588 100644 --- a/src/main/java/it/grid/storm/info/remote/resources/SpaceStatusResource.java +++ b/src/main/java/it/grid/storm/info/remote/resources/SpaceStatusResource.java @@ -1,26 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.remote.resources; import static javax.ws.rs.core.Response.Status.NOT_FOUND; +import it.grid.storm.info.SpaceInfoManager; +import it.grid.storm.info.model.SpaceStatusSummary; +import it.grid.storm.info.remote.Constants; +import it.grid.storm.space.gpfsquota.GPFSQuotaManager; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.info.SpaceInfoManager; -import it.grid.storm.info.model.SpaceStatusSummary; -import it.grid.storm.info.remote.Constants; -import it.grid.storm.space.gpfsquota.GPFSQuotaManager; - @Path("/" + Constants.RESOURCE) public class SpaceStatusResource { @@ -48,9 +45,10 @@ public String getStatusSummary(@PathParam("alias") String saAlias) { log.info( "Unable to load requested space status summary from database. IllegalArgumentException: " + e.getMessage()); - throw new WebApplicationException(Response.status(NOT_FOUND) - .entity("Unable to load requested space status info from database") - .build()); + throw new WebApplicationException( + Response.status(NOT_FOUND) + .entity("Unable to load requested space status info from database") + .build()); } result = saSum.getJsonFormat(); return result; diff --git a/src/main/java/it/grid/storm/jna/Errno.java b/src/main/java/it/grid/storm/jna/Errno.java index bc9149f4..5092d60a 100644 --- a/src/main/java/it/grid/storm/jna/Errno.java +++ b/src/main/java/it/grid/storm/jna/Errno.java @@ -1,23 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna; public class Errno { - public static final int ENOENT = 2; /* No such file or directory */ - public static final int EEXIST = 17; /* File exists */ - public static final int ENOTDIR = 20; /* Not a directory */ - public static final int ENOSPC = 28; /* No space left on device */ - public static final int ERANGE = 34; /* Math result not representable */ - public static final int ENODATA = 61; /* No data available */ - public static final int ENOATTR = ENODATA; /* No such attribute */ - public static final int EOPNOTSUPP = 95; /* + public static final int ENOENT = 2; /* No such file or directory */ + public static final int EEXIST = 17; /* File exists */ + public static final int ENOTDIR = 20; /* Not a directory */ + public static final int ENOSPC = 28; /* No space left on device */ + public static final int ERANGE = 34; /* Math result not representable */ + public static final int ENODATA = 61; /* No data available */ + public static final int ENOATTR = ENODATA; /* No such attribute */ + public static final int EOPNOTSUPP = 95; /* * Operation not supported on * transport endpoint */ - public static final int ENOTSUP = EOPNOTSUPP; - public static final int EDQUOT = 122; /* Quota exceeded */ - + public static final int ENOTSUP = EOPNOTSUPP; + public static final int EDQUOT = 122; /* Quota exceeded */ } diff --git a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsAccountInterface.java b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsAccountInterface.java index a2199bed..0dab259e 100644 --- a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsAccountInterface.java +++ b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsAccountInterface.java @@ -1,38 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna.lcmaps; -/** - * @author dibenedetto_m - * - */ - +/** @author dibenedetto_m */ import com.sun.jna.Library; import com.sun.jna.Native; public interface LcmapsAccountInterface extends Library { - public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps_without_gsi"; - public static final LcmapsAccountInterface INSTANCE = (LcmapsAccountInterface) Native - .loadLibrary( - it.grid.storm.jna.lcmaps.LcmapsAccountInterface.JNA_LIBRARY_NAME, - LcmapsAccountInterface.class); + public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps_without_gsi"; + public static final LcmapsAccountInterface INSTANCE = + (LcmapsAccountInterface) + Native.loadLibrary( + it.grid.storm.jna.lcmaps.LcmapsAccountInterface.JNA_LIBRARY_NAME, + LcmapsAccountInterface.class); - /** - * Original signature : - * int lcmaps_account_info_init(lcmaps_account_info_t*)
- * native declaration : line 20 - */ - int lcmaps_account_info_init( - it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); + /** + * Original signature : int lcmaps_account_info_init(lcmaps_account_info_t*)
+ * native declaration : line 20 + */ + int lcmaps_account_info_init(it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); - /** - * Original signature : - * int lcmaps_account_info_clean(lcmaps_account_info_t*)
- * native declaration : line 25 - */ - int lcmaps_account_info_clean( - it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); -} \ No newline at end of file + /** + * Original signature : int lcmaps_account_info_clean(lcmaps_account_info_t*)
+ * native declaration : line 25 + */ + int lcmaps_account_info_clean(it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); +} diff --git a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsInterface.java b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsInterface.java index f3979b49..c83a7c7e 100644 --- a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsInterface.java +++ b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsInterface.java @@ -1,47 +1,39 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna.lcmaps; -/** - * @author dibenedetto_m - * - */ - +/** @author dibenedetto_m */ import com.sun.jna.Library; import com.sun.jna.Native; public interface LcmapsInterface extends Library { - public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps"; - public static final LcmapsInterface INSTANCE = (LcmapsInterface) Native - .loadLibrary(it.grid.storm.jna.lcmaps.LcmapsInterface.JNA_LIBRARY_NAME, - LcmapsInterface.class); + public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps"; + public static final LcmapsInterface INSTANCE = + (LcmapsInterface) + Native.loadLibrary( + it.grid.storm.jna.lcmaps.LcmapsInterface.JNA_LIBRARY_NAME, LcmapsInterface.class); - /** - * \fn lcmaps_init_and_logfile(
- * char * logfile,
- * FILE* fp,
- * unsigned short logtype
- * )
- * \brief Initialize the LCMAPS module, select logging type and set logfile
- * The function does the following:
- * - initialize LCMAPS module.
- * - Setup logging by providing a file handle or file name, error handling - * (not yet).
- * - start PluginManager
- * \param logfile name of logfile
- * \param fp file handle for logging (from gatekeeper or other previously - * opened file handle)
- * If the file handle is zero, assume that only syslogging is requested
- * \param logtype type of logging (usrlog and/or syslog)
- * \retval 0 initialization succeeded.
- * \retval 1 initialization failed.
- * Original signature : - * int lcmaps_init_and_logfile(char*, FILE*, unsigned short)
- * native declaration : line 217 - */ - int lcmaps_init_and_logfile(String logfile, com.sun.jna.Pointer fp, - short logtype); -} \ No newline at end of file + /** + * \fn lcmaps_init_and_logfile(
+ * char * logfile,
+ * FILE* fp,
+ * unsigned short logtype
+ * )
+ * \brief Initialize the LCMAPS module, select logging type and set logfile
+ * The function does the following:
+ * - initialize LCMAPS module.
+ * - Setup logging by providing a file handle or file name, error handling (not yet).
+ * - start PluginManager
+ * \param logfile name of logfile
+ * \param fp file handle for logging (from gatekeeper or other previously opened file handle)
+ * If the file handle is zero, assume that only syslogging is requested
+ * \param logtype type of logging (usrlog and/or syslog)
+ * \retval 0 initialization succeeded.
+ * \retval 1 initialization failed.
+ * Original signature : int lcmaps_init_and_logfile(char*, FILE*, unsigned short)
+ * native declaration : line 217 + */ + int lcmaps_init_and_logfile(String logfile, com.sun.jna.Pointer fp, short logtype); +} diff --git a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsPoolindexInterface.java b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsPoolindexInterface.java index af0707d8..481bb71b 100644 --- a/src/main/java/it/grid/storm/jna/lcmaps/LcmapsPoolindexInterface.java +++ b/src/main/java/it/grid/storm/jna/lcmaps/LcmapsPoolindexInterface.java @@ -1,14 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna.lcmaps; -/** - * @author dibenedetto_m - * - */ - +/** @author dibenedetto_m */ import com.sun.jna.Library; import com.sun.jna.Native; import com.sun.jna.Pointer; @@ -16,36 +11,43 @@ public interface LcmapsPoolindexInterface extends Library { - public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps_return_poolindex_without_gsi"; - public static final LcmapsPoolindexInterface INSTANCE = (LcmapsPoolindexInterface) Native - .loadLibrary( - it.grid.storm.jna.lcmaps.LcmapsPoolindexInterface.JNA_LIBRARY_NAME, - LcmapsPoolindexInterface.class); + public static final java.lang.String JNA_LIBRARY_NAME = "lcmaps_return_poolindex_without_gsi"; + public static final LcmapsPoolindexInterface INSTANCE = + (LcmapsPoolindexInterface) + Native.loadLibrary( + it.grid.storm.jna.lcmaps.LcmapsPoolindexInterface.JNA_LIBRARY_NAME, + LcmapsPoolindexInterface.class); - /** - * Original signature : - * int lcmaps_return_account_without_gsi(char*, char**, int, int, lcmaps_account_info_t*) - *
- * native declaration : line 30
- * - * @deprecated use the safer methods - * {@link #lcmaps_return_account_without_gsi(java.nio.ByteBuffer, com.sun.jna.ptr.PointerByReference, int, int, test.lcmaps_account_info_t)} - * and - * {@link #lcmaps_return_account_without_gsi(com.sun.jna.Pointer, com.sun.jna.ptr.PointerByReference, int, int, test.lcmaps_account_info_t)} - * instead - */ - @java.lang.Deprecated - int lcmaps_return_account_without_gsi(Pointer user_dn, - PointerByReference fqan_list, int nfqan, int mapcounter, - it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); + /** + * Original signature : + * int lcmaps_return_account_without_gsi(char*, char**, int, int, lcmaps_account_info_t*) + *
+ * native declaration : line 30
+ * + * @deprecated use the safer methods {@link + * #lcmaps_return_account_without_gsi(java.nio.ByteBuffer, com.sun.jna.ptr.PointerByReference, + * int, int, test.lcmaps_account_info_t)} and {@link + * #lcmaps_return_account_without_gsi(com.sun.jna.Pointer, com.sun.jna.ptr.PointerByReference, + * int, int, test.lcmaps_account_info_t)} instead + */ + @java.lang.Deprecated + int lcmaps_return_account_without_gsi( + Pointer user_dn, + PointerByReference fqan_list, + int nfqan, + int mapcounter, + it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); - /** - * Original signature : - * int lcmaps_return_account_without_gsi(char*, char**, int, int, lcmaps_account_info_t*) - *
- * native declaration : line 30 - */ - int lcmaps_return_account_without_gsi(String user_dn, String[] fqan_list, - int nfqan, int mapcounter, - it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); -} \ No newline at end of file + /** + * Original signature : + * int lcmaps_return_account_without_gsi(char*, char**, int, int, lcmaps_account_info_t*) + *
+ * native declaration : line 30 + */ + int lcmaps_return_account_without_gsi( + String user_dn, + String[] fqan_list, + int nfqan, + int mapcounter, + it.grid.storm.jna.lcmaps.lcmaps_account_info_t plcmaps_account); +} diff --git a/src/main/java/it/grid/storm/jna/lcmaps/StormLcmapsLibrary.java b/src/main/java/it/grid/storm/jna/lcmaps/StormLcmapsLibrary.java index e03d1cdb..21dd0f6b 100644 --- a/src/main/java/it/grid/storm/jna/lcmaps/StormLcmapsLibrary.java +++ b/src/main/java/it/grid/storm/jna/lcmaps/StormLcmapsLibrary.java @@ -1,93 +1,86 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna.lcmaps; -import java.nio.IntBuffer; import com.sun.jna.Library; import com.sun.jna.Native; +import java.nio.IntBuffer; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public interface StormLcmapsLibrary extends Library { - public static final java.lang.String JNA_LIBRARY_NAME = "storm_lcmaps"; - public static final StormLcmapsLibrary INSTANCE = (StormLcmapsLibrary) Native - .loadLibrary(StormLcmapsLibrary.JNA_LIBRARY_NAME, StormLcmapsLibrary.class); + public static final java.lang.String JNA_LIBRARY_NAME = "storm_lcmaps"; + public static final StormLcmapsLibrary INSTANCE = + (StormLcmapsLibrary) + Native.loadLibrary(StormLcmapsLibrary.JNA_LIBRARY_NAME, StormLcmapsLibrary.class); - /** - * @param logfile - * the lcmaps log file - * @param user_dn - * the user DN - * @param fqan_list - * the array of FQAN - * @param nfqan - * the number of FQAN in fqan_list - * @param uid - * the mapped user id - * @param gid - * the mapped group id - * @return 0 if mapping is performed correctly, an int greater than 0 - * otherwise - */ - int map_user(String logfile, String user_dn, String[] fqan_list, int nfqan, - IntBuffer uid, IntBuffer gid); + /** + * @param logfile the lcmaps log file + * @param user_dn the user DN + * @param fqan_list the array of FQAN + * @param nfqan the number of FQAN in fqan_list + * @param uid the mapped user id + * @param gid the mapped group id + * @return 0 if mapping is performed correctly, an int greater than 0 otherwise + */ + int map_user( + String logfile, String user_dn, String[] fqan_list, int nfqan, IntBuffer uid, IntBuffer gid); - public enum Errors { - INIT_FAILURE(1, "lcmaps initialization failed"), ACCOUNT_INITIALIZATION_FAILURE( - 2, "lcmaps_account object creation failed"), RETURN_ACCOUNT_FAILED(3, - "lcmaps_return_account_without_gsi call failed"), NO_GIDS_RETURNED(4, - "no gids provided by the lcmaps_return_account_without_gsi call"), UNREACHIBLE_CODE( - 5, "unexpected condition, this code should be nor reachable"), UNKNOW_ERROR( - -1, "error unknown"); + public enum Errors { + INIT_FAILURE(1, "lcmaps initialization failed"), + ACCOUNT_INITIALIZATION_FAILURE(2, "lcmaps_account object creation failed"), + RETURN_ACCOUNT_FAILED(3, "lcmaps_return_account_without_gsi call failed"), + NO_GIDS_RETURNED(4, "no gids provided by the lcmaps_return_account_without_gsi call"), + UNREACHIBLE_CODE(5, "unexpected condition, this code should be nor reachable"), + UNKNOW_ERROR(-1, "error unknown"); - private final int errorCode; - private final String errorMessage; + private final int errorCode; + private final String errorMessage; - /** - * @param errorCode - * @param errorMessage - */ - private Errors(int errorCode, String errorMessage) { + /** + * @param errorCode + * @param errorMessage + */ + private Errors(int errorCode, String errorMessage) { - this.errorCode = errorCode; - this.errorMessage = errorMessage; - } + this.errorCode = errorCode; + this.errorMessage = errorMessage; + } - /** - * @param errorCode - * @return - */ - public static Errors getError(int errorCode) { + /** + * @param errorCode + * @return + */ + public static Errors getError(int errorCode) { - for (Errors error : Errors.values()) { - if (error.errorCode == errorCode) { - return error; - } - } - return UNKNOW_ERROR; - } + for (Errors error : Errors.values()) { + if (error.errorCode == errorCode) { + return error; + } + } + return UNKNOW_ERROR; + } - /** - * @return - */ - public String getMessage() { + /** @return */ + public String getMessage() { - return errorMessage; - } + return errorMessage; + } - /* - * (non-Javadoc) - * - * @see java.lang.Enum#toString() - */ - public String toString() { + /* + * (non-Javadoc) + * + * @see java.lang.Enum#toString() + */ + public String toString() { - return super.toString() + ": "; - } - } + return super.toString() + + ": "; + } + } } diff --git a/src/main/java/it/grid/storm/jna/lcmaps/lcmaps_account_info_t.java b/src/main/java/it/grid/storm/jna/lcmaps/lcmaps_account_info_t.java index 7d112437..d1afb9ee 100644 --- a/src/main/java/it/grid/storm/jna/lcmaps/lcmaps_account_info_t.java +++ b/src/main/java/it/grid/storm/jna/lcmaps/lcmaps_account_info_t.java @@ -1,82 +1,70 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.jna.lcmaps; -/** - * @author dibenedetto_m - * - */ - +/** @author dibenedetto_m */ import com.sun.jna.Structure; import com.sun.jna.ptr.IntByReference; public class lcmaps_account_info_t extends Structure { - // / < the uid of the local account - public int uid; - /** - * < the list of primary gids
- * C type : int* - */ - public IntByReference pgid_list; - // / < the number of primary gids found - public int npgid; - /** - * < the list of secondary gids
- * C type : int* - */ - public IntByReference sgid_list; - // / < the number of secondary gids found - public int nsgid; - /** - * < the pool index
- * C type : char* - */ - public String poolindex; - - public lcmaps_account_info_t() { - - super(); - } + // / < the uid of the local account + public int uid; + /** + * < the list of primary gids
+ * C type : int* + */ + public IntByReference pgid_list; + // / < the number of primary gids found + public int npgid; + /** + * < the list of secondary gids
+ * C type : int* + */ + public IntByReference sgid_list; + // / < the number of secondary gids found + public int nsgid; + /** + * < the pool index
+ * C type : char* + */ + public String poolindex; - /** - * @param uid - * < the uid of the local account
- * @param pgid_list - * < the list of primary gids
- * C type : int*
- * @param npgid - * < the number of primary gids found
- * @param sgid_list - * < the list of secondary gids
- * C type : int*
- * @param nsgid - * < the number of secondary gids found
- * @param poolindex - * < the pool index
- * C type : char* - */ - public lcmaps_account_info_t(int uid, IntByReference pgid_list, int npgid, - IntByReference sgid_list, int nsgid, String poolindex) { + public lcmaps_account_info_t() { - super(); - this.uid = uid; - this.pgid_list = pgid_list; - this.npgid = npgid; - this.sgid_list = sgid_list; - this.nsgid = nsgid; - this.poolindex = poolindex; - } + super(); + } - public static class ByReference extends lcmaps_account_info_t implements - Structure.ByReference { + /** + * @param uid < the uid of the local account
+ * @param pgid_list < the list of primary gids
+ * C type : int*
+ * @param npgid < the number of primary gids found
+ * @param sgid_list < the list of secondary gids
+ * C type : int*
+ * @param nsgid < the number of secondary gids found
+ * @param poolindex < the pool index
+ * C type : char* + */ + public lcmaps_account_info_t( + int uid, + IntByReference pgid_list, + int npgid, + IntByReference sgid_list, + int nsgid, + String poolindex) { - }; + super(); + this.uid = uid; + this.pgid_list = pgid_list; + this.npgid = npgid; + this.sgid_list = sgid_list; + this.nsgid = nsgid; + this.poolindex = poolindex; + } - public static class ByValue extends lcmaps_account_info_t implements - Structure.ByValue { + public static class ByReference extends lcmaps_account_info_t implements Structure.ByReference {}; - }; + public static class ByValue extends lcmaps_account_info_t implements Structure.ByValue {}; } diff --git a/src/main/java/it/grid/storm/logging/Files.java b/src/main/java/it/grid/storm/logging/Files.java index 787b7989..7ff1d889 100644 --- a/src/main/java/it/grid/storm/logging/Files.java +++ b/src/main/java/it/grid/storm/logging/Files.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.logging; @@ -10,110 +9,94 @@ public final class Files { - private Files() {} - - /** - * A convenience method for getting a file and requiring it to be a readable - * file. This is equivalent to calling - * getFile(filePath, true, true, true, false). - * - * @param filePath - * the path to the file - * - * @return the file - * - * @throws IOException - * thrown if the file is a directory, does not exist, or can not be - * read - */ - public static File getReadableFile(String filePath) throws IOException { - return getFile(filePath, true, true, true, false); - } - - /** - * Gets the file object associated with the path. - * - * @param filePath - * the file path - * @param requireFile - * whether the given path is required to be a file instead of a - * directory - * @param requireExistance - * whether the given file/directory must exist already - * @param requireReadable - * whether the given file/directory must be readable - * @param requireWritable - * whether the given file/directory must be writable - * - * @return the created file - * - * @throws IOException - * thrown if existance, reabability, or writability is required but - * not met - */ - public static File getFile(String filePath, boolean requireFile, - boolean requireExistance, boolean requireReadable, boolean requireWritable) - throws IOException { - - String path = Strings.safeTrimOrNullString(filePath); - if (path == null) { - throw new IOException("The file path may not be empty"); - } - - File file = new File(filePath); - - if (requireExistance && !file.exists()) { - throw new IOException("The file '" + filePath + "' does not exist."); - } - - if (requireFile && !file.isFile()) { - throw new IOException("The path '" + filePath - + "' is a directory not a file"); - } - - if (requireReadable && !file.canRead()) { - throw new IOException("The file '" + filePath + "' is not readable."); - } - - if (requireWritable && !file.canWrite()) { - throw new IOException("The file '" + filePath + "' is not writable."); - } - - return file; - } - - /** - * Reads the contents of a file in to a byte array. - * - * @param file - * file to read - * @return the byte contents of the file - * - * @throws IOException - * throw if there is a problem reading the file in to the byte array - */ - public static byte[] fileToByteArray(File file) throws IOException { - - long numOfBytes = file.length(); - - if (numOfBytes > Integer.MAX_VALUE) { - throw new IOException("File is to large to be read in to a byte array"); - } - - byte[] bytes = new byte[(int) numOfBytes]; - FileInputStream ins = new FileInputStream(file); - int offset = 0; - int numRead = 0; - do { - numRead = ins.read(bytes, offset, bytes.length - offset); - offset += numRead; - } while (offset < bytes.length && numRead >= 0); - - if (offset < bytes.length) { - throw new IOException("Could not completely read file " + file.getName()); - } - - ins.close(); - return bytes; - } -} \ No newline at end of file + private Files() {} + + /** + * A convenience method for getting a file and requiring it to be a readable file. This is + * equivalent to calling getFile(filePath, true, true, true, false). + * + * @param filePath the path to the file + * @return the file + * @throws IOException thrown if the file is a directory, does not exist, or can not be read + */ + public static File getReadableFile(String filePath) throws IOException { + return getFile(filePath, true, true, true, false); + } + + /** + * Gets the file object associated with the path. + * + * @param filePath the file path + * @param requireFile whether the given path is required to be a file instead of a directory + * @param requireExistance whether the given file/directory must exist already + * @param requireReadable whether the given file/directory must be readable + * @param requireWritable whether the given file/directory must be writable + * @return the created file + * @throws IOException thrown if existance, reabability, or writability is required but not met + */ + public static File getFile( + String filePath, + boolean requireFile, + boolean requireExistance, + boolean requireReadable, + boolean requireWritable) + throws IOException { + + String path = Strings.safeTrimOrNullString(filePath); + if (path == null) { + throw new IOException("The file path may not be empty"); + } + + File file = new File(filePath); + + if (requireExistance && !file.exists()) { + throw new IOException("The file '" + filePath + "' does not exist."); + } + + if (requireFile && !file.isFile()) { + throw new IOException("The path '" + filePath + "' is a directory not a file"); + } + + if (requireReadable && !file.canRead()) { + throw new IOException("The file '" + filePath + "' is not readable."); + } + + if (requireWritable && !file.canWrite()) { + throw new IOException("The file '" + filePath + "' is not writable."); + } + + return file; + } + + /** + * Reads the contents of a file in to a byte array. + * + * @param file file to read + * @return the byte contents of the file + * @throws IOException throw if there is a problem reading the file in to the byte array + */ + public static byte[] fileToByteArray(File file) throws IOException { + + long numOfBytes = file.length(); + + if (numOfBytes > Integer.MAX_VALUE) { + throw new IOException("File is to large to be read in to a byte array"); + } + + byte[] bytes = new byte[(int) numOfBytes]; + FileInputStream ins = new FileInputStream(file); + int offset = 0; + int numRead = 0; + do { + numRead = ins.read(bytes, offset, bytes.length - offset); + offset += numRead; + } while (offset < bytes.length && numRead >= 0); + + if (offset < bytes.length) { + throw new IOException("Could not completely read file " + file.getName()); + } + + ins.close(); + return bytes; + } +} diff --git a/src/main/java/it/grid/storm/logging/StoRMLoggers.java b/src/main/java/it/grid/storm/logging/StoRMLoggers.java index df9c711f..84d88459 100644 --- a/src/main/java/it/grid/storm/logging/StoRMLoggers.java +++ b/src/main/java/it/grid/storm/logging/StoRMLoggers.java @@ -1,79 +1,70 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.logging; +import ch.qos.logback.classic.LoggerContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import ch.qos.logback.classic.LoggerContext; - -/** - * @author zappi - * - */ +/** @author zappi */ public class StoRMLoggers { - private static Logger bookkeepingLogger; - private static Logger heartbeatLogger; - private static Logger performanceLogger; - private static Logger stdoutLogger; - private static Logger stderrLogger; - private static boolean initDone = false; - - private static void initLoggers() { - - LoggerContext loggerContext = (LoggerContext) LoggerFactory - .getILoggerFactory(); - bookkeepingLogger = loggerContext.getLogger("bookkeeping"); - heartbeatLogger = loggerContext.getLogger("health"); - performanceLogger = loggerContext.getLogger("performance"); - stdoutLogger = loggerContext.getLogger("system.out"); - stderrLogger = loggerContext.getLogger("system.err"); - } - - public static Logger getBKLogger() { - - if (!initDone) { - initLoggers(); - } - return bookkeepingLogger; - } - - public static Logger getHBLogger() { - - if (!initDone) { - initLoggers(); - } - return heartbeatLogger; - } - - public static Logger getPerfLogger() { - - if (!initDone) { - initLoggers(); - } - return performanceLogger; - } - - public static Logger getStdoutLogger() { - - if (!initDone) { - initLoggers(); - } - return stdoutLogger; - } - - public static Logger getStderrLogger() { - - if (!initDone) { - initLoggers(); - } - return stderrLogger; - } - + private static Logger bookkeepingLogger; + private static Logger heartbeatLogger; + private static Logger performanceLogger; + private static Logger stdoutLogger; + private static Logger stderrLogger; + private static boolean initDone = false; + + private static void initLoggers() { + + LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory(); + bookkeepingLogger = loggerContext.getLogger("bookkeeping"); + heartbeatLogger = loggerContext.getLogger("health"); + performanceLogger = loggerContext.getLogger("performance"); + stdoutLogger = loggerContext.getLogger("system.out"); + stderrLogger = loggerContext.getLogger("system.err"); + } + + public static Logger getBKLogger() { + + if (!initDone) { + initLoggers(); + } + return bookkeepingLogger; + } + + public static Logger getHBLogger() { + + if (!initDone) { + initLoggers(); + } + return heartbeatLogger; + } + + public static Logger getPerfLogger() { + + if (!initDone) { + initLoggers(); + } + return performanceLogger; + } + + public static Logger getStdoutLogger() { + + if (!initDone) { + initLoggers(); + } + return stdoutLogger; + } + + public static Logger getStderrLogger() { + + if (!initDone) { + initLoggers(); + } + return stderrLogger; + } } diff --git a/src/main/java/it/grid/storm/logging/Strings.java b/src/main/java/it/grid/storm/logging/Strings.java index 32c89c9b..38fd7e93 100644 --- a/src/main/java/it/grid/storm/logging/Strings.java +++ b/src/main/java/it/grid/storm/logging/Strings.java @@ -1,95 +1,80 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.logging; /** Helper class for working with Strings. */ public final class Strings { - /** Constructor. */ - private Strings() { + /** Constructor. */ + private Strings() {} - } + /** + * A "safe" null/empty check for strings. + * + * @param s The string to check + * @return true if the string is null or the trimmed string is length zero + */ + public static boolean isEmpty(String s) { - /** - * A "safe" null/empty check for strings. - * - * @param s - * The string to check - * - * @return true if the string is null or the trimmed string is length zero - */ - public static boolean isEmpty(String s) { + if (s != null) { + String sTrimmed = s.trim(); + if (sTrimmed.length() > 0) { + return false; + } + } - if (s != null) { - String sTrimmed = s.trim(); - if (sTrimmed.length() > 0) { - return false; - } - } + return true; + } - return true; - } + /** + * Compares two strings for equality, allowing for nulls. + * + * @param type of object to compare + * @param s1 The first operand + * @param s2 The second operand + * @return true if both are null or both are non-null and the same string value + */ + public static boolean safeEquals(T s1, T s2) { - /** - * Compares two strings for equality, allowing for nulls. - * - * @param - * type of object to compare - * @param s1 - * The first operand - * @param s2 - * The second operand - * - * @return true if both are null or both are non-null and the same string - * value - */ - public static boolean safeEquals(T s1, T s2) { + if (s1 == null || s2 == null) { + return s1 == s2; + } - if (s1 == null || s2 == null) { - return s1 == s2; - } + return s1.equals(s2); + } - return s1.equals(s2); - } + /** + * A safe string trim that handles nulls. + * + * @param s the string to trim + * @return the trimmed string or null if the given string was null + */ + public static String safeTrim(String s) { - /** - * A safe string trim that handles nulls. - * - * @param s - * the string to trim - * - * @return the trimmed string or null if the given string was null - */ - public static String safeTrim(String s) { + if (s != null) { + return s.trim(); + } - if (s != null) { - return s.trim(); - } + return null; + } - return null; - } + /** + * Removes preceding or proceeding whitespace from a string or return null if the string is null + * or of zero length after trimming (i.e. if the string only contained whitespace). + * + * @param s the string to trim + * @return the trimmed string or null + */ + public static String safeTrimOrNullString(String s) { - /** - * Removes preceding or proceeding whitespace from a string or return null if - * the string is null or of zero length after trimming (i.e. if the string - * only contained whitespace). - * - * @param s - * the string to trim - * - * @return the trimmed string or null - */ - public static String safeTrimOrNullString(String s) { + if (s != null) { + String sTrimmed = s.trim(); + if (sTrimmed.length() > 0) { + return sTrimmed; + } + } - if (s != null) { - String sTrimmed = s.trim(); - if (sTrimmed.length() > 0) { - return sTrimmed; - } - } - - return null; - } -} \ No newline at end of file + return null; + } +} diff --git a/src/main/java/it/grid/storm/metrics/NamedInstrumentedSelectChannelConnector.java b/src/main/java/it/grid/storm/metrics/NamedInstrumentedSelectChannelConnector.java index d40a0283..551431ad 100644 --- a/src/main/java/it/grid/storm/metrics/NamedInstrumentedSelectChannelConnector.java +++ b/src/main/java/it/grid/storm/metrics/NamedInstrumentedSelectChannelConnector.java @@ -1,22 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.metrics; import static com.codahale.metrics.MetricRegistry.name; -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -import org.eclipse.jetty.io.Connection; -import org.eclipse.jetty.server.nio.SelectChannelConnector; - import com.codahale.metrics.Clock; import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.eclipse.jetty.io.Connection; +import org.eclipse.jetty.server.nio.SelectChannelConnector; public class NamedInstrumentedSelectChannelConnector extends SelectChannelConnector { diff --git a/src/main/java/it/grid/storm/metrics/NamedInstrumentedThreadPool.java b/src/main/java/it/grid/storm/metrics/NamedInstrumentedThreadPool.java index 6c2821e8..22e104b8 100644 --- a/src/main/java/it/grid/storm/metrics/NamedInstrumentedThreadPool.java +++ b/src/main/java/it/grid/storm/metrics/NamedInstrumentedThreadPool.java @@ -1,56 +1,62 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.metrics; import static com.codahale.metrics.MetricRegistry.name; -import org.eclipse.jetty.util.thread.QueuedThreadPool; - import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.RatioGauge; +import org.eclipse.jetty.util.thread.QueuedThreadPool; -public class NamedInstrumentedThreadPool extends QueuedThreadPool{ +public class NamedInstrumentedThreadPool extends QueuedThreadPool { public NamedInstrumentedThreadPool(String name, MetricRegistry registry) { super(); setName(name); - String tpName = name+"-tp"; - registry.register(name(tpName, "percent-idle"), new RatioGauge() { - @Override - protected Ratio getRatio() { - return Ratio.of(getIdleThreads(), - getThreads()); - } - }); - registry.register(name(tpName, "active-threads"), new Gauge() { - @Override - public Integer getValue() { - return getThreads(); - } - }); - registry.register(name(tpName, "idle-threads"), new Gauge() { - @Override - public Integer getValue() { - return getIdleThreads(); - } - }); - registry.register(name(tpName, "jobs"), new Gauge() { - @Override - public Integer getValue() { - // This assumes the QueuedThreadPool is using a BlockingArrayQueue or - // ArrayBlockingQueue for its queue, and is therefore a constant-time operation. - return getQueue() != null ? getQueue().size() : 0; - } - }); - registry.register(name(tpName, "utilization-max"), new RatioGauge() { - @Override - protected Ratio getRatio() { - return Ratio.of(getThreads() - getIdleThreads(), getMaxThreads()); - } - }); + String tpName = name + "-tp"; + registry.register( + name(tpName, "percent-idle"), + new RatioGauge() { + @Override + protected Ratio getRatio() { + return Ratio.of(getIdleThreads(), getThreads()); + } + }); + registry.register( + name(tpName, "active-threads"), + new Gauge() { + @Override + public Integer getValue() { + return getThreads(); + } + }); + registry.register( + name(tpName, "idle-threads"), + new Gauge() { + @Override + public Integer getValue() { + return getIdleThreads(); + } + }); + registry.register( + name(tpName, "jobs"), + new Gauge() { + @Override + public Integer getValue() { + // This assumes the QueuedThreadPool is using a BlockingArrayQueue or + // ArrayBlockingQueue for its queue, and is therefore a constant-time operation. + return getQueue() != null ? getQueue().size() : 0; + } + }); + registry.register( + name(tpName, "utilization-max"), + new RatioGauge() { + @Override + protected Ratio getRatio() { + return Ratio.of(getThreads() - getIdleThreads(), getMaxThreads()); + } + }); } - } diff --git a/src/main/java/it/grid/storm/metrics/StormMetricRegistry.java b/src/main/java/it/grid/storm/metrics/StormMetricRegistry.java index 615024fa..a922a062 100644 --- a/src/main/java/it/grid/storm/metrics/StormMetricRegistry.java +++ b/src/main/java/it/grid/storm/metrics/StormMetricRegistry.java @@ -1,18 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.metrics; import com.codahale.metrics.MetricRegistry; -/** - * Singleton wrapper for the dropwizard metrics registry. - * - * - */ +/** Singleton wrapper for the dropwizard metrics registry. */ public enum StormMetricRegistry { - METRIC_REGISTRY; private StormMetricRegistry() { @@ -25,5 +19,4 @@ public MetricRegistry getRegistry() { return registry; } - } diff --git a/src/main/java/it/grid/storm/metrics/StormMetricsReporter.java b/src/main/java/it/grid/storm/metrics/StormMetricsReporter.java index 27543cf4..10f8f473 100644 --- a/src/main/java/it/grid/storm/metrics/StormMetricsReporter.java +++ b/src/main/java/it/grid/storm/metrics/StormMetricsReporter.java @@ -1,15 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.metrics; -import java.util.SortedMap; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.Histogram; @@ -19,26 +12,41 @@ import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Snapshot; import com.codahale.metrics.Timer; - import it.grid.storm.common.OperationType; import it.grid.storm.filesystem.MetricsFilesystemAdapter.FilesystemMetric; +import java.util.SortedMap; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class StormMetricsReporter extends ScheduledReporter { public static final String METRICS_LOGGER_NAME = StormMetricsReporter.class.getName(); - protected static final String[] REPORTED_TIMERS = {"synch", OperationType.AF.getOpName(), - OperationType.AR.getOpName(), OperationType.EFL.getOpName(), OperationType.GSM.getOpName(), - OperationType.GST.getOpName(), OperationType.LS.getOpName(), OperationType.MKD.getOpName(), - OperationType.MV.getOpName(), OperationType.PD.getOpName(), OperationType.PNG.getOpName(), - OperationType.RF.getOpName(), OperationType.RM.getOpName(), OperationType.RMD.getOpName(), - FilesystemMetric.FILE_ACL_OP.getOpName(), FilesystemMetric.FILE_ATTRIBUTE_OP.getOpName(), - "ea"}; + protected static final String[] REPORTED_TIMERS = { + "synch", + OperationType.AF.getOpName(), + OperationType.AR.getOpName(), + OperationType.EFL.getOpName(), + OperationType.GSM.getOpName(), + OperationType.GST.getOpName(), + OperationType.LS.getOpName(), + OperationType.MKD.getOpName(), + OperationType.MV.getOpName(), + OperationType.PD.getOpName(), + OperationType.PNG.getOpName(), + OperationType.RF.getOpName(), + OperationType.RM.getOpName(), + OperationType.RMD.getOpName(), + FilesystemMetric.FILE_ACL_OP.getOpName(), + FilesystemMetric.FILE_ATTRIBUTE_OP.getOpName(), + "ea" + }; private static final Logger LOG = LoggerFactory.getLogger(METRICS_LOGGER_NAME); - private StormMetricsReporter(MetricRegistry registry, MetricFilter filter, TimeUnit rateUnit, - TimeUnit durationUnit) { + private StormMetricsReporter( + MetricRegistry registry, MetricFilter filter, TimeUnit rateUnit, TimeUnit durationUnit) { super(registry, "storm", filter, rateUnit, durationUnit); } @@ -87,17 +95,23 @@ public StormMetricsReporter build() { } } - public StormMetricsReporter(MetricRegistry registry, String name, MetricFilter filter, - TimeUnit rateUnit, TimeUnit durationUnit) { + public StormMetricsReporter( + MetricRegistry registry, + String name, + MetricFilter filter, + TimeUnit rateUnit, + TimeUnit durationUnit) { super(registry, name, filter, rateUnit, durationUnit); - } @SuppressWarnings({"rawtypes"}) @Override - public void report(SortedMap gauges, SortedMap counters, - SortedMap histograms, SortedMap meters, + public void report( + SortedMap gauges, + SortedMap counters, + SortedMap histograms, + SortedMap meters, SortedMap timers) { for (String metricName : REPORTED_TIMERS) { @@ -122,11 +136,18 @@ private void reportMetric(String name, Timer timer) { LOG.info( "{} [(count={}, m1_rate={}, m5_rate={}, m15_rate={}) (max={}, min={}, mean={}, p95={}, p99={})] duration_units={}, rate_units={}", - name, timer.getCount(), convertRate(timer.getOneMinuteRate()), - convertRate(timer.getFiveMinuteRate()), convertRate(timer.getFifteenMinuteRate()), - convertDuration(snapshot.getMax()), convertDuration(snapshot.getMin()), - convertDuration(snapshot.getMean()), convertDuration(snapshot.get95thPercentile()), - convertDuration(snapshot.get99thPercentile()), getDurationUnit(), getRateUnit()); + name, + timer.getCount(), + convertRate(timer.getOneMinuteRate()), + convertRate(timer.getFiveMinuteRate()), + convertRate(timer.getFifteenMinuteRate()), + convertDuration(snapshot.getMax()), + convertDuration(snapshot.getMin()), + convertDuration(snapshot.getMean()), + convertDuration(snapshot.get95thPercentile()), + convertDuration(snapshot.get99thPercentile()), + getDurationUnit(), + getRateUnit()); } @SuppressWarnings({"rawtypes"}) @@ -138,8 +159,14 @@ private void reportThreadPoolMetrics(String tpName, SortedMap gau int jobs = getIntValue(gauges.get(tpName + ".jobs")); double utilizationMax = round2dec(getDoubleValue(gauges.get(tpName + ".utilization-max"))); - LOG.info("{} [active-threads={}, idle-threads={}, jobs={}, utilization-max={}, percent-idle={}]", - tpName, activeThreads, idleThreads, jobs, utilizationMax, percentIdle); + LOG.info( + "{} [active-threads={}, idle-threads={}, jobs={}, utilization-max={}, percent-idle={}]", + tpName, + activeThreads, + idleThreads, + jobs, + utilizationMax, + percentIdle); } private void reportJettyHandlerMetrics(String handlerName, SortedMap meters) { @@ -150,16 +177,19 @@ private void reportJettyHandlerMetrics(String handlerName, SortedMap pathlist) { + public InvalidDescendantsEmptyRequestException(File fh, Collection pathlist) { - isEmptyDir = (pathlist != null && (pathlist.size() == 0)); - } + isEmptyDir = (pathlist != null && (pathlist.size() == 0)); + } - public String toString() { + public String toString() { - return ("Path Specified is EMPTY Directory = " + isEmptyDir); - } + return ("Path Specified is EMPTY Directory = " + isEmptyDir); + } } diff --git a/src/main/java/it/grid/storm/namespace/InvalidDescendantsFileRequestException.java b/src/main/java/it/grid/storm/namespace/InvalidDescendantsFileRequestException.java index 927e9366..ff3c265f 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidDescendantsFileRequestException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidDescendantsFileRequestException.java @@ -1,34 +1,31 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; import java.io.*; /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidDescendantsFileRequestException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private boolean isNotDirectory = false; + private boolean isNotDirectory = false; - public InvalidDescendantsFileRequestException(File fh) { + public InvalidDescendantsFileRequestException(File fh) { - isNotDirectory = fh.isDirectory(); - } + isNotDirectory = fh.isDirectory(); + } - public String toString() { + public String toString() { - return ("Path Specified is NOT a directory = " + isNotDirectory); - } + return ("Path Specified is NOT a directory = " + isNotDirectory); + } } diff --git a/src/main/java/it/grid/storm/namespace/InvalidDescendantsPathRequestException.java b/src/main/java/it/grid/storm/namespace/InvalidDescendantsPathRequestException.java index 96d2931d..876c8df0 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidDescendantsPathRequestException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidDescendantsPathRequestException.java @@ -1,34 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; import java.io.*; /** - * This class represents an Exception throws if getChildren is request on PATH - * that does not exist. * - * + * This class represents an Exception throws if getChildren is request on PATH that does not exist. + * * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidDescendantsPathRequestException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private boolean pathNotExists = false; + private boolean pathNotExists = false; - public InvalidDescendantsPathRequestException(File fh) { + public InvalidDescendantsPathRequestException(File fh) { - this.pathNotExists = !fh.isFile(); - } + this.pathNotExists = !fh.isFile(); + } - public String toString() { + public String toString() { - return ("PATH specified NOT EXISTS = " + pathNotExists); - } + return ("PATH specified NOT EXISTS = " + pathNotExists); + } } diff --git a/src/main/java/it/grid/storm/namespace/InvalidDescendantsTDirOptionRequestException.java b/src/main/java/it/grid/storm/namespace/InvalidDescendantsTDirOptionRequestException.java index d8d63664..6c8990ce 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidDescendantsTDirOptionRequestException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidDescendantsTDirOptionRequestException.java @@ -1,42 +1,42 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; import it.grid.storm.srm.types.TDirOption; - import java.io.*; import java.util.*; /** - * This class represents an Exception throws if TDirOptionData is not valid to - * explore directory content. * - * + * This class represents an Exception throws if TDirOptionData is not valid to explore directory + * content. * + * * @author Michele Dibenedetto * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - @SuppressWarnings("serial") public class InvalidDescendantsTDirOptionRequestException extends Exception { - private String filePath = ""; - private final boolean allRecursive; - private final int levels; + private String filePath = ""; + private final boolean allRecursive; + private final int levels; - public InvalidDescendantsTDirOptionRequestException(File fh, - TDirOption dirOption) { + public InvalidDescendantsTDirOptionRequestException(File fh, TDirOption dirOption) { - filePath = fh.getAbsolutePath(); - allRecursive = dirOption.isAllLevelRecursive(); - levels = dirOption.getNumLevel(); - } + filePath = fh.getAbsolutePath(); + allRecursive = dirOption.isAllLevelRecursive(); + levels = dirOption.getNumLevel(); + } - public String toString() { + public String toString() { - return ("Unable to explore folder " + filePath + " allRecursive = " - + allRecursive + " allowed recursion levels = " + levels); - } + return ("Unable to explore folder " + + filePath + + " allRecursive = " + + allRecursive + + " allowed recursion levels = " + + levels); + } } diff --git a/src/main/java/it/grid/storm/namespace/InvalidGetTURLProtocolException.java b/src/main/java/it/grid/storm/namespace/InvalidGetTURLProtocolException.java index 063115c9..6b7ebf78 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidGetTURLProtocolException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidGetTURLProtocolException.java @@ -1,24 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidGetTURLProtocolException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; public InvalidGetTURLProtocolException(String message) { diff --git a/src/main/java/it/grid/storm/namespace/InvalidProtocolForTURLException.java b/src/main/java/it/grid/storm/namespace/InvalidProtocolForTURLException.java index b62d3d1b..dada2f9a 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidProtocolForTURLException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidProtocolForTURLException.java @@ -1,32 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; import it.grid.storm.srm.types.InvalidTTURLAttributesException; -public class InvalidProtocolForTURLException extends - InvalidTTURLAttributesException { +public class InvalidProtocolForTURLException extends InvalidTTURLAttributesException { - private String protocolSchema; + private String protocolSchema; - public InvalidProtocolForTURLException(String protocolSchema) { + public InvalidProtocolForTURLException(String protocolSchema) { - super(); - this.protocolSchema = protocolSchema; - } + super(); + this.protocolSchema = protocolSchema; + } - public InvalidProtocolForTURLException(Throwable cause, String protocolSchema) { + public InvalidProtocolForTURLException(Throwable cause, String protocolSchema) { - super(cause); - this.protocolSchema = protocolSchema; - } + super(cause); + this.protocolSchema = protocolSchema; + } - public String toString() { - - return ("Impossible to build TURL with the protocol schema '" - + protocolSchema + "'"); - } + public String toString() { + return ("Impossible to build TURL with the protocol schema '" + protocolSchema + "'"); + } } diff --git a/src/main/java/it/grid/storm/namespace/InvalidSURLException.java b/src/main/java/it/grid/storm/namespace/InvalidSURLException.java index 7fdd69c5..87777bd9 100644 --- a/src/main/java/it/grid/storm/namespace/InvalidSURLException.java +++ b/src/main/java/it/grid/storm/namespace/InvalidSURLException.java @@ -1,39 +1,33 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.namespace; import it.grid.storm.srm.types.*; public class InvalidSURLException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1L; - private TSURL surl = null; + /** */ + private static final long serialVersionUID = 1L; - public InvalidSURLException(TSURL surl, String message) { + private TSURL surl = null; - super(message); - this.surl = surl; - } + public InvalidSURLException(TSURL surl, String message) { - public String toString() { + super(message); + this.surl = surl; + } - return String.format("InvalidSURLException for SURL='%s': %s", this.surl, - this.getMessage()); - } + public String toString() { + return String.format("InvalidSURLException for SURL='%s': %s", this.surl, this.getMessage()); + } } diff --git a/src/main/java/it/grid/storm/namespace/MalformedSURLException.java b/src/main/java/it/grid/storm/namespace/MalformedSURLException.java index 2e59c2dc..5fdb411d 100644 --- a/src/main/java/it/grid/storm/namespace/MalformedSURLException.java +++ b/src/main/java/it/grid/storm/namespace/MalformedSURLException.java @@ -1,39 +1,33 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.namespace; import it.grid.storm.srm.types.*; public class MalformedSURLException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1L; - private TSURL surl = null; + /** */ + private static final long serialVersionUID = 1L; - public MalformedSURLException(TSURL surl, String message) { + private TSURL surl = null; - super(message); - this.surl = surl; - } + public MalformedSURLException(TSURL surl, String message) { - public String toString() { + super(message); + this.surl = surl; + } - return String.format("MalformedSURLException for SURL='%s': %s", this.surl, - this.getMessage()); - } + public String toString() { + return String.format("MalformedSURLException for SURL='%s': %s", this.surl, this.getMessage()); + } } diff --git a/src/main/java/it/grid/storm/namespace/Namespace.java b/src/main/java/it/grid/storm/namespace/Namespace.java index a6ae3869..2427f0da 100644 --- a/src/main/java/it/grid/storm/namespace/Namespace.java +++ b/src/main/java/it/grid/storm/namespace/Namespace.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; @@ -8,21 +7,10 @@ import static it.grid.storm.namespace.naming.NamespaceUtil.getWinnerRule; import static it.grid.storm.namespace.naming.NamespaceUtil.getWinnerVFS; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.SortedSet; -import java.util.UUID; -import java.util.stream.Collectors; - -import org.slf4j.Logger; - import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - import it.grid.storm.common.GUID; import it.grid.storm.common.types.PFN; import it.grid.storm.filesystem.LocalFile; @@ -43,6 +31,14 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.SortedSet; +import java.util.UUID; +import java.util.stream.Collectors; +import org.slf4j.Logger; public class Namespace implements NamespaceInterface { @@ -122,8 +118,7 @@ public List getReadableByAnonymousVFS() throws NamespaceException { } @Override - public List getReadableOrApproachableByAnonymousVFS() - throws NamespaceException { + public List getReadableOrApproachableByAnonymousVFS() throws NamespaceException { List rowVFS = Lists.newLinkedList(); List allVFS = Lists.newLinkedList(getAllDefinedVFS()); @@ -144,13 +139,17 @@ public VirtualFS getDefaultVFS(GridUserInterface user) throws NamespaceException if (appRules.isEmpty()) { if (user instanceof AbstractGridUser) { String msg = - String.format("No approachable rules found for user with DN='%s' and VO = '%s'", + String.format( + "No approachable rules found for user with DN='%s' and VO = '%s'", user.getDn(), ((AbstractGridUser) user).getVO()); log.error(msg); throw new NamespaceException(msg); } else { - String msg = String.format("No approachable rules found for user with " - + "DN='%s' User certificate has not VOMS extension", user.getDn()); + String msg = + String.format( + "No approachable rules found for user with " + + "DN='%s' User certificate has not VOMS extension", + user.getDn()); log.error(msg); throw new NamespaceException(msg); } @@ -170,12 +169,11 @@ public boolean isApproachable(StoRI stori, GridUserInterface user) throws Namesp } /** - * * The resolution is based on the retrieving of the Winner Rule 1) First attempt is based on * StFN-Path 2) Second attempt is based on all StFN. That because is possible that SURL is * expressed without File Name so StFN is a directory. ( Special case is when the SFN does not * contain the File Name and ALL the StFN is considerable as StFN-Path. ) - * + * * @param surl TSURL * @return StoRI * @throws NamespaceException @@ -237,12 +235,16 @@ private StoRI resolveStoRI(TSURL surl, GridUserInterface user) // verify if StoRI canonical path is enclosed into the winner VFS if (isStoRIEnclosed(stori, winnerRule.getMappedFS())) { - log.debug("Resource '{}' belongs to '{}'", stori.getLocalFile(), + log.debug( + "Resource '{}' belongs to '{}'", + stori.getLocalFile(), winnerRule.getMappedFS().getAliasName()); return stori; } - log.debug("Resource '{}' doesn't belong to {}", stori.getLocalFile(), + log.debug( + "Resource '{}' doesn't belong to {}", + stori.getLocalFile(), winnerRule.getMappedFS().getAliasName()); if (isAnonymous(user)) { @@ -255,8 +257,8 @@ private StoRI resolveStoRI(TSURL surl, GridUserInterface user) VirtualFS targetVFS = getWinnerVFS(realPath, parser.getMapVFS_Root()); if (targetVFS == null) { log.debug("Unable to find a valid VFS from path '{}'", realPath); - throw new InvalidSURLException(surl, - "The requested SURL is not managed by this instance of StoRM"); + throw new InvalidSURLException( + surl, "The requested SURL is not managed by this instance of StoRM"); } log.debug("{} belongs to {}", realPath, targetVFS.getAliasName()); @@ -269,7 +271,6 @@ private StoRI resolveStoRI(TSURL surl, GridUserInterface user) log.debug("{} is approachable by the user", targetVFS.getAliasName()); return stori; - } private boolean isSolvable(TSURL surl) { @@ -356,15 +357,13 @@ public StoRI resolveStoRIbyAbsolutePath(String absolutePath, VirtualFS vfs) public VirtualFS resolveVFSbyAbsolutePath(String absolutePath, GridUserInterface user) throws NamespaceException { - /** - * @todo Check the approachable rules - */ + /** @todo Check the approachable rules */ return getWinnerVFS(absolutePath, parser.getMapVFS_Root()); } /** * Method used by srmGetSpaceMetadata - * + * * @param absolutePath String * @return VirtualFS * @throws NamespaceException @@ -374,8 +373,7 @@ public VirtualFS resolveVFSbyRoot(String absolutePath) throws NamespaceException return getWinnerVFS(absolutePath, parser.getMapVFS_Root()); } - public VirtualFS resolveVFSbyAbsolutePath(String absolutePath) - throws NamespaceException { + public VirtualFS resolveVFSbyAbsolutePath(String absolutePath) throws NamespaceException { return getWinnerVFS(absolutePath, parser.getMapVFS_Root()); } @@ -391,9 +389,7 @@ public VirtualFS resolveVFSbyLocalFile(LocalFile file) throws NamespaceException public StoRI resolveStoRIbyPFN(PFN pfn) throws NamespaceException { - /** - * @todo Check the approachable rules - */ + /** @todo Check the approachable rules */ VirtualFS vfs = resolveVFSbyPFN(pfn); String vfsRoot = vfs.getRootPath(); String relativePath = NamespaceUtil.extractRelativePath(vfsRoot, pfn.getValue()); @@ -402,7 +398,7 @@ public StoRI resolveStoRIbyPFN(PFN pfn) throws NamespaceException { /** * method used by GetSpaceMetaData Executor to retrieve the VFS and Quota Parameters. - * + * * @param pfn PFN * @return VirtualFS * @throws NamespaceException @@ -422,10 +418,10 @@ public Space retrieveSpaceByToken(TSizeInBytes totSize, TSpaceToken token) { return null; } - /*********************************************** - * UTILITY METHODS - **********************************************/ - + /** + * ********************************************* UTILITY METHODS + * ******************************************** + */ public String makeSpaceFileURI(GridUserInterface user) throws NamespaceException { String result = null; @@ -436,18 +432,26 @@ public String makeSpaceFileURI(GridUserInterface user) throws NamespaceException if (appRules.isEmpty()) { if (user instanceof AbstractGridUser) { - log.error("No approachable rules found for user with DN='{}' " + "and VO='{}'", - user.getDn(), ((AbstractGridUser) user).getVO()); - - throw new NamespaceException("No approachable rules found for user with DN='" + user.getDn() - + "' and VO = '" + ((AbstractGridUser) user).getVO() + "'"); + log.error( + "No approachable rules found for user with DN='{}' " + "and VO='{}'", + user.getDn(), + ((AbstractGridUser) user).getVO()); + + throw new NamespaceException( + "No approachable rules found for user with DN='" + + user.getDn() + + "' and VO = '" + + ((AbstractGridUser) user).getVO() + + "'"); } else { log.error( "No approachable rules found for user with DN='{}'. " + "No VOMS extensions found.", user.getDn()); - throw new NamespaceException("No approachable rules found for user with DN='" + user.getDn() - + "' User certificate has not VOMS extension"); + throw new NamespaceException( + "No approachable rules found for user with DN='" + + user.getDn() + + "' User certificate has not VOMS extension"); } } ApproachableRule firstAppRule = appRules.first(); @@ -492,15 +496,14 @@ public boolean isSpaceFile(String fileName) { Preconditions.checkNotNull(fileName, "Unable to check space file name. Invalid null fileName"); - if (!fileName.endsWith(SPACE_FILE_NAME_SUFFIX)) - return false; - if (fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) <= 0) - return false; - if (fileName.substring(fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) + 1) - .length() <= SPACE_FILE_NAME_SUFFIX.length()) - return false; - String uuidString = fileName.substring(fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) + 1, - fileName.lastIndexOf(SPACE_FILE_NAME_SUFFIX)); + if (!fileName.endsWith(SPACE_FILE_NAME_SUFFIX)) return false; + if (fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) <= 0) return false; + if (fileName.substring(fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) + 1).length() + <= SPACE_FILE_NAME_SUFFIX.length()) return false; + String uuidString = + fileName.substring( + fileName.indexOf(SPACE_FILE_NAME_SEPARATOR) + 1, + fileName.lastIndexOf(SPACE_FILE_NAME_SUFFIX)); try { UUID.fromString(uuidString); } catch (Exception e) { @@ -510,7 +513,6 @@ public boolean isSpaceFile(String fileName) { } /** - * * @param user GridUserInterface * @return SortedSet */ @@ -524,24 +526,25 @@ public SortedSet getApproachableRules(GridUserInterface user) Map appRulesUnorderd = Maps.newHashMap(parser.getApproachableRules()); // List the entries - appRulesUnorderd.keySet().forEach(key -> { - ApproachableRule appRule = appRulesUnorderd.get(key); - if (matchSubject(appRule, user)) { - // Insert into the result (that is an ordered set) - appRules.add(appRule); - } - }); + appRulesUnorderd + .keySet() + .forEach( + key -> { + ApproachableRule appRule = appRulesUnorderd.get(key); + if (matchSubject(appRule, user)) { + // Insert into the result (that is an ordered set) + appRules.add(appRule); + } + }); } return appRules; } /** - * * @param appRule ApproachableRule * @return VirtualFS */ - public VirtualFS getApproachableDefaultVFS(ApproachableRule appRule) - throws NamespaceException { + public VirtualFS getApproachableDefaultVFS(ApproachableRule appRule) throws NamespaceException { VirtualFS defaultVFS = null; String defaultVFSName = null; @@ -579,8 +582,7 @@ private static boolean matchSubject(ApproachableRule approachableRule, GridUserI return result; } - public VirtualFS resolveVFSbySpaceToken(TSpaceToken spaceToken) - throws NamespaceException { + public VirtualFS resolveVFSbySpaceToken(TSpaceToken spaceToken) throws NamespaceException { Optional vfs = getAllDefinedVFS().stream().filter(v -> spaceToken.equals(v.getSpaceToken())).findFirst(); @@ -658,7 +660,5 @@ private boolean isGPFSQuotaEnabled(VirtualFS vfs) { return false; } return (quota.getDefined() && quota.getEnabled()); - } - } diff --git a/src/main/java/it/grid/storm/namespace/NamespaceDirector.java b/src/main/java/it/grid/storm/namespace/NamespaceDirector.java index 85d06370..c2b5546b 100644 --- a/src/main/java/it/grid/storm/namespace/NamespaceDirector.java +++ b/src/main/java/it/grid/storm/namespace/NamespaceDirector.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; @@ -9,99 +8,86 @@ import it.grid.storm.namespace.config.NamespaceParser; import it.grid.storm.namespace.config.xml.XMLNamespaceLoader; import it.grid.storm.namespace.config.xml.XMLNamespaceParser; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NamespaceDirector { - private static final Logger log = LoggerFactory - .getLogger(NamespaceDirector.class);; - private static NamespaceInterface namespaceIstance = null; - - private static NamespaceLoader loader; - private static NamespaceParser parser; - - private static boolean initialized = false; + private static final Logger log = LoggerFactory.getLogger(NamespaceDirector.class);; + private static NamespaceInterface namespaceIstance = null; - private NamespaceDirector() {} + private static NamespaceLoader loader; + private static NamespaceParser parser; - public static void initializeDirector() { + private static boolean initialized = false; - log.info("NAMESPACE : Initializing ..."); - Configuration config = Configuration.getInstance(); + private NamespaceDirector() {} - log.info(" +++++++++++++++++++++++ "); - log.info(" Production Mode "); - log.info(" +++++++++++++++++++++++ "); + public static void initializeDirector() { - String configurationPATH = config.namespaceConfigPath(); - String namespaceConfigFileName = config.getNamespaceConfigFilename(); - int refreshInSeconds = config.getNamespaceConfigRefreshRateInSeconds(); - loader = new XMLNamespaceLoader(configurationPATH, namespaceConfigFileName, refreshInSeconds); + log.info("NAMESPACE : Initializing ..."); + Configuration config = Configuration.getInstance(); - // Check the validity of namespace. - if (loader instanceof XMLNamespaceLoader) { - XMLNamespaceLoader xmlLoader = (XMLNamespaceLoader) loader; - if (!(xmlLoader.schemaValidity)) { - // Error into the validity ckeck of namespace - log.error("Namespace configuration is not conformant with namespae grammar."); - log.error("Please validate namespace configuration file."); - System.exit(0); - } - } + log.info(" +++++++++++++++++++++++ "); + log.info(" Production Mode "); + log.info(" +++++++++++++++++++++++ "); - log.debug("Namespace Configuration PATH : {}" , configurationPATH); - log.debug("Namespace Configuration FILENAME : {}" , namespaceConfigFileName); - log.debug("Namespace Configuration GLANCE RATE : {}" , refreshInSeconds); + String configurationPATH = config.namespaceConfigPath(); + String namespaceConfigFileName = config.getNamespaceConfigFilename(); + int refreshInSeconds = config.getNamespaceConfigRefreshRateInSeconds(); + loader = new XMLNamespaceLoader(configurationPATH, namespaceConfigFileName, refreshInSeconds); - parser = new XMLNamespaceParser(loader); - namespaceIstance = new Namespace(parser); + // Check the validity of namespace. + if (loader instanceof XMLNamespaceLoader) { + XMLNamespaceLoader xmlLoader = (XMLNamespaceLoader) loader; + if (!(xmlLoader.schemaValidity)) { + // Error into the validity ckeck of namespace + log.error("Namespace configuration is not conformant with namespae grammar."); + log.error("Please validate namespace configuration file."); + System.exit(0); + } + } - log.debug("NAMESPACE INITIALIZATION : ... done!"); - initialized = true; + log.debug("Namespace Configuration PATH : {}", configurationPATH); + log.debug("Namespace Configuration FILENAME : {}", namespaceConfigFileName); + log.debug("Namespace Configuration GLANCE RATE : {}", refreshInSeconds); - } + parser = new XMLNamespaceParser(loader); + namespaceIstance = new Namespace(parser); - /** - * - * @return Namespace - */ - public static NamespaceInterface getNamespace() { + log.debug("NAMESPACE INITIALIZATION : ... done!"); + initialized = true; + } - if (!(initialized)) { - initializeDirector(); - } - return namespaceIstance; - } + /** @return Namespace */ + public static NamespaceInterface getNamespace() { - /** - * - * @return Namespace - */ - public static NamespaceParser getNamespaceParser() { + if (!(initialized)) { + initializeDirector(); + } + return namespaceIstance; + } - if (!(initialized)) { - initializeDirector(); - } - return parser; - } + /** @return Namespace */ + public static NamespaceParser getNamespaceParser() { - /** - * - * @return Namespace - */ - public static NamespaceLoader getNamespaceLoader() { + if (!(initialized)) { + initializeDirector(); + } + return parser; + } - if (!(initialized)) { - initializeDirector(); - } - return loader; - } + /** @return Namespace */ + public static NamespaceLoader getNamespaceLoader() { - public static Logger getLogger() { + if (!(initialized)) { + initializeDirector(); + } + return loader; + } - return log; - } + public static Logger getLogger() { + return log; + } } diff --git a/src/main/java/it/grid/storm/namespace/NamespaceException.java b/src/main/java/it/grid/storm/namespace/NamespaceException.java index 17648039..cfbfc7e0 100644 --- a/src/main/java/it/grid/storm/namespace/NamespaceException.java +++ b/src/main/java/it/grid/storm/namespace/NamespaceException.java @@ -1,28 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; public class NamespaceException extends Exception { - public NamespaceException() { + public NamespaceException() { - super(); - } + super(); + } - public NamespaceException(String message) { + public NamespaceException(String message) { - super(message); - } + super(message); + } - public NamespaceException(String message, Throwable cause) { + public NamespaceException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public NamespaceException(Throwable cause) { + public NamespaceException(Throwable cause) { - super(cause); - } + super(cause); + } } diff --git a/src/main/java/it/grid/storm/namespace/NamespaceInterface.java b/src/main/java/it/grid/storm/namespace/NamespaceInterface.java index fded9e91..cf24927f 100644 --- a/src/main/java/it/grid/storm/namespace/NamespaceInterface.java +++ b/src/main/java/it/grid/storm/namespace/NamespaceInterface.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; -import java.util.List; -import java.util.Map; - import it.grid.storm.common.types.PFN; import it.grid.storm.filesystem.Space; import it.grid.storm.griduser.GridUserInterface; @@ -15,14 +11,16 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; +import java.util.List; +import java.util.Map; public interface NamespaceInterface { /** * getAllDefinedVFS - * - * @return List : Return a List of VirtualFS containing all the instances - * defined within Namespace + * + * @return List : Return a List of VirtualFS containing all the instances defined + * within Namespace * @throws NamespaceException */ public List getAllDefinedVFS(); @@ -30,17 +28,17 @@ public interface NamespaceInterface { /** * getAllDefinedVFSAsDictionary * - * @return Map : Return a Map of all VirtualFS defined within - * Namespace, indexed by their root-paths + * @return Map : Return a Map of all VirtualFS defined within Namespace, + * indexed by their root-paths * @throws NamespaceException */ public Map getAllDefinedVFSAsDictionary(); /** * getVFSWithQuotaEnabled - * - * @return Collection: Return a collection of VirtualFS with fs type GPFS and - * quota enabled + * + * @return Collection: Return a collection of VirtualFS with fs type GPFS and quota + * enabled * @throws NamespaceException */ public List getVFSWithQuotaEnabled(); @@ -49,46 +47,37 @@ public interface NamespaceInterface { * getAllDefinedMappingRules * * @return List : Return a List of mapping rules containing all the instances defined - * within Namespace + * within Namespace * @throws NamespaceException */ public List getAllDefinedMappingRules(); /** - * - * - * * @param user GridUserInterface : Represents the principal * @return List : Return a List of VirtualFS instances * @throws NamespaceException : Occur when */ - public List getApproachableVFS(GridUserInterface user) - throws NamespaceException; + public List getApproachableVFS(GridUserInterface user) throws NamespaceException; /** - * * @return List : Return a List of readable and writable by anonymous users VirtualFS instances * @throws NamespaceException */ public List getApproachableByAnonymousVFS() throws NamespaceException; /** - * * @return List : Return a List of readable by anonymous users VirtualFS instances * @throws NamespaceException */ public List getReadableByAnonymousVFS() throws NamespaceException; /** - * * @return List : Return a List of readable or writable by anonymous users VirtualFS instances * @throws NamespaceException */ - public List getReadableOrApproachableByAnonymousVFS() - throws NamespaceException; + public List getReadableOrApproachableByAnonymousVFS() throws NamespaceException; /** - * * @param user GridUserInterface * @return VirtualFS * @throws NamespaceException @@ -96,7 +85,6 @@ public List getReadableOrApproachableByAnonymousVFS() public VirtualFS getDefaultVFS(GridUserInterface user) throws NamespaceException; /** - * * @param storageResource StoRI * @param gridUser GridUserInterface * @return boolean @@ -106,7 +94,6 @@ public boolean isApproachable(StoRI storageResource, GridUserInterface gridUser) throws NamespaceException; /** - * * @param surl TSURL * @param user GridUserInterface * @return StoRI @@ -118,7 +105,6 @@ public StoRI resolveStoRIbySURL(TSURL surl, GridUserInterface user) throws UnapprochableSurlException, NamespaceException, InvalidSURLException; /** - * * @param surl TSURL * @return StoRI * @throws IllegalArgumentException @@ -129,7 +115,6 @@ public StoRI resolveStoRIbySURL(TSURL surl) throws UnapprochableSurlException, NamespaceException, InvalidSURLException; /** - * * @param absolutePath String * @param user GridUserInterface * @return StoRI @@ -139,7 +124,6 @@ public StoRI resolveStoRIbyAbsolutePath(String absolutePath, GridUserInterface u throws NamespaceException; /** - * * @param absolutePath String * @param vfs VirtualFS * @return StoRI @@ -149,7 +133,6 @@ public StoRI resolveStoRIbyAbsolutePath(String absolutePath, VirtualFS vfs) throws NamespaceException; /** - * * @param absolutePath String * @return StoRI * @throws NamespaceException @@ -157,7 +140,6 @@ public StoRI resolveStoRIbyAbsolutePath(String absolutePath, VirtualFS vfs) public StoRI resolveStoRIbyAbsolutePath(String absolutePath) throws NamespaceException; /** - * * @param absolutePath String * @param user GridUserInterface * @return VirtualFS @@ -167,7 +149,6 @@ public VirtualFS resolveVFSbyAbsolutePath(String absolutePath, GridUserInterface throws NamespaceException; /** - * * @param absolutePath String * @return VirtualFS * @throws NamespaceException @@ -175,7 +156,6 @@ public VirtualFS resolveVFSbyAbsolutePath(String absolutePath, GridUserInterface public VirtualFS resolveVFSbyAbsolutePath(String absolutePath) throws NamespaceException; /** - * * @param pfn PFN * @return StoRI * @throws NamespaceException @@ -183,7 +163,6 @@ public VirtualFS resolveVFSbyAbsolutePath(String absolutePath, GridUserInterface public StoRI resolveStoRIbyPFN(PFN pfn) throws NamespaceException; /** - * * @param file LocalFile * @return VirtualFS * @throws NamespaceException @@ -192,7 +171,6 @@ public VirtualFS resolveVFSbyLocalFile(it.grid.storm.filesystem.LocalFile file) throws NamespaceException; /** - * * @param pfn PFN * @return VirtualFS * @throws NamespaceException @@ -200,7 +178,6 @@ public VirtualFS resolveVFSbyLocalFile(it.grid.storm.filesystem.LocalFile file) public VirtualFS resolveVFSbyPFN(PFN pfn) throws NamespaceException; /** - * * @param user GridUserInterface * @return StoRI * @throws NamespaceException @@ -211,17 +188,14 @@ public VirtualFS resolveVFSbyLocalFile(it.grid.storm.filesystem.LocalFile file) * Method that retrieves a previously reserved Space as identified by the SpaceToken, for the * given new size. If null or Empty TSizeInBytes are supplied, a Space object built off deafult * values is returned instead. - * - * + * * @param totSize TSizeInBytes * @param token TSpaceToken * @return Space */ - public Space retrieveSpaceByToken(TSizeInBytes totSize, TSpaceToken token); /** - * * @param user GridUserInterface * @return String * @throws NamespaceException @@ -249,7 +223,5 @@ public VirtualFS resolveVFSbyLocalFile(it.grid.storm.filesystem.LocalFile file) * @return * @throws NamespaceException */ - public VirtualFS resolveVFSbySpaceToken(TSpaceToken spaceToken) - throws NamespaceException; - + public VirtualFS resolveVFSbySpaceToken(TSpaceToken spaceToken) throws NamespaceException; } diff --git a/src/main/java/it/grid/storm/namespace/NamespaceValidator.java b/src/main/java/it/grid/storm/namespace/NamespaceValidator.java index 6c413684..7b2ec306 100644 --- a/src/main/java/it/grid/storm/namespace/NamespaceValidator.java +++ b/src/main/java/it/grid/storm/namespace/NamespaceValidator.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; @@ -13,87 +12,86 @@ public class NamespaceValidator { - private Logger log = LoggerFactory.getLogger(NamespaceValidator.class); - - public boolean validateSchema(String SchemaUrl, String XmlDocumentUrl) { - boolean valid = false; - SAXParser parser = new SAXParser(); - try { - parser.setFeature("http://xml.org/sax/features/validation", true); - parser.setFeature("http://apache.org/xml/features/validation/schema", - true); - parser.setFeature( - "http://apache.org/xml/features/validation/schema-full-checking", true); - parser - .setProperty( - "http://apache.org/xml/properties/schema/external-noNamespaceSchemaLocation", - SchemaUrl); - Validator handler = new Validator(); - parser.setErrorHandler(handler); - parser.parse(XmlDocumentUrl); - if (handler.validationError == true) { - manageErrorWithinNamespace(handler); - } else { - log.info("Namespace Document is valid with Schema"); - valid = true; - } - } catch (Throwable e) { - log.error(e.getMessage(), e); - } - return valid; - } - - private void manageErrorWithinNamespace( - it.grid.storm.namespace.NamespaceValidator.Validator handler) { - - log.error("namespace.xml validation error."); - log.error("Error: {} line: {}, column: {}, entity: {}", - handler.saxParseException.getMessage(), - handler.saxParseException.getLineNumber(), - handler.saxParseException.getColumnNumber(), - handler.saxParseException.getSystemId()); - } - - private class Validator extends DefaultHandler { - - public boolean validationError = false; - public SAXParseException saxParseException = null; - - @Override - public void error(SAXParseException exception) throws SAXException { - - log.error("XML error: {}. Line: {}, column: {}, entity: {}", - exception.getMessage(), - exception.getLineNumber(), - exception.getColumnNumber(), - exception.getSystemId()); - - validationError = true; - saxParseException = exception; - } - - @Override - public void fatalError(SAXParseException exception) throws SAXException { - - log.error("XML FATAL error: {}. Line: {}, column: {}, entity: {}", - exception.getMessage(), - exception.getLineNumber(), - exception.getColumnNumber(), - exception.getSystemId()); - - validationError = true; - saxParseException = exception; - } - - @Override - public void warning(SAXParseException exception) throws SAXException { - - log.warn("XML warning: {}. Line: {}, column: {}, entity: {}", - exception.getMessage(), - exception.getLineNumber(), - exception.getColumnNumber(), - exception.getSystemId()); - - } - } + private Logger log = LoggerFactory.getLogger(NamespaceValidator.class); + + public boolean validateSchema(String SchemaUrl, String XmlDocumentUrl) { + boolean valid = false; + SAXParser parser = new SAXParser(); + try { + parser.setFeature("http://xml.org/sax/features/validation", true); + parser.setFeature("http://apache.org/xml/features/validation/schema", true); + parser.setFeature("http://apache.org/xml/features/validation/schema-full-checking", true); + parser.setProperty( + "http://apache.org/xml/properties/schema/external-noNamespaceSchemaLocation", SchemaUrl); + Validator handler = new Validator(); + parser.setErrorHandler(handler); + parser.parse(XmlDocumentUrl); + if (handler.validationError == true) { + manageErrorWithinNamespace(handler); + } else { + log.info("Namespace Document is valid with Schema"); + valid = true; + } + } catch (Throwable e) { + log.error(e.getMessage(), e); + } + return valid; + } + + private void manageErrorWithinNamespace( + it.grid.storm.namespace.NamespaceValidator.Validator handler) { + + log.error("namespace.xml validation error."); + log.error( + "Error: {} line: {}, column: {}, entity: {}", + handler.saxParseException.getMessage(), + handler.saxParseException.getLineNumber(), + handler.saxParseException.getColumnNumber(), + handler.saxParseException.getSystemId()); + } + + private class Validator extends DefaultHandler { + + public boolean validationError = false; + public SAXParseException saxParseException = null; + + @Override + public void error(SAXParseException exception) throws SAXException { + + log.error( + "XML error: {}. Line: {}, column: {}, entity: {}", + exception.getMessage(), + exception.getLineNumber(), + exception.getColumnNumber(), + exception.getSystemId()); + + validationError = true; + saxParseException = exception; + } + + @Override + public void fatalError(SAXParseException exception) throws SAXException { + + log.error( + "XML FATAL error: {}. Line: {}, column: {}, entity: {}", + exception.getMessage(), + exception.getLineNumber(), + exception.getColumnNumber(), + exception.getSystemId()); + + validationError = true; + saxParseException = exception; + } + + @Override + public void warning(SAXParseException exception) throws SAXException { + + log.warn( + "XML warning: {}. Line: {}, column: {}, entity: {}", + exception.getMessage(), + exception.getLineNumber(), + exception.getColumnNumber(), + exception.getSystemId()); + } + } } diff --git a/src/main/java/it/grid/storm/namespace/PropertyInterface.java b/src/main/java/it/grid/storm/namespace/PropertyInterface.java index 3d6820bf..98e8326f 100644 --- a/src/main/java/it/grid/storm/namespace/PropertyInterface.java +++ b/src/main/java/it/grid/storm/namespace/PropertyInterface.java @@ -1,28 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; -import it.grid.storm.srm.types.TSizeInBytes; -import it.grid.storm.namespace.model.RetentionPolicy; -import it.grid.storm.namespace.model.ExpirationMode; import it.grid.storm.namespace.model.AccessLatency; +import it.grid.storm.namespace.model.ExpirationMode; +import it.grid.storm.namespace.model.RetentionPolicy; +import it.grid.storm.srm.types.TSizeInBytes; public interface PropertyInterface { - public TSizeInBytes getTotalOnlineSize(); - - public TSizeInBytes getTotalNearlineSize(); + public TSizeInBytes getTotalOnlineSize(); - public RetentionPolicy getRetentionPolicy(); + public TSizeInBytes getTotalNearlineSize(); - public ExpirationMode getExpirationMode(); + public RetentionPolicy getRetentionPolicy(); - public AccessLatency getAccessLatency(); + public ExpirationMode getExpirationMode(); - public boolean isOnlineSpaceLimited(); + public AccessLatency getAccessLatency(); - public boolean hasLimitedSize(); + public boolean isOnlineSpaceLimited(); + public boolean hasLimitedSize(); } diff --git a/src/main/java/it/grid/storm/namespace/StoRI.java b/src/main/java/it/grid/storm/namespace/StoRI.java index 9670c9be..6ec99bb0 100644 --- a/src/main/java/it/grid/storm/namespace/StoRI.java +++ b/src/main/java/it/grid/storm/namespace/StoRI.java @@ -1,13 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - import it.grid.storm.common.types.PFN; import it.grid.storm.common.types.StFN; import it.grid.storm.common.types.TURLPrefix; @@ -23,71 +18,70 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TTURL; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; public interface StoRI { - public void setStoRIType(StoRIType type); + public void setStoRIType(StoRIType type); - public TTURL getTURL(TURLPrefix prefixOfAcceptedTransferProtocols) - throws IllegalArgumentException, InvalidGetTURLProtocolException, - TURLBuildingException; + public TTURL getTURL(TURLPrefix prefixOfAcceptedTransferProtocols) + throws IllegalArgumentException, InvalidGetTURLProtocolException, TURLBuildingException; - public TSURL getSURL(); + public TSURL getSURL(); - public PFN getPFN(); + public PFN getPFN(); - public StFN getStFN(); - - public StFN getStFNFromMappingRule(); + public StFN getStFN(); - public String getRelativePath(); + public StFN getStFNFromMappingRule(); - public String getRelativeStFN(); + public String getRelativePath(); - public TLifeTimeInSeconds getFileLifeTime(); + public String getRelativeStFN(); - public Date getFileStartTime(); + public TLifeTimeInSeconds getFileLifeTime(); - public StoRIType getStoRIType(); + public Date getFileStartTime(); - public Space getSpace(); + public StoRIType getStoRIType(); - public void setSpace(Space space); + public Space getSpace(); - public LocalFile getLocalFile(); + public void setSpace(Space space); - public VirtualFS getVirtualFileSystem(); + public LocalFile getLocalFile(); - public String getStFNRoot(); + public VirtualFS getVirtualFileSystem(); - public String getStFNPath(); + public String getStFNRoot(); - public String getFilename(); + public String getStFNPath(); - public void setStFNRoot(String stfnRoot); + public String getFilename(); - public void setMappingRule(MappingRule winnerRule); + public void setStFNRoot(String stfnRoot); - public MappingRule getMappingRule(); + public void setMappingRule(MappingRule winnerRule); - public ArrayList getChildren(TDirOption dirOption) - throws InvalidDescendantsEmptyRequestException, - InvalidDescendantsPathRequestException, - InvalidDescendantsFileRequestException; + public MappingRule getMappingRule(); - public String getAbsolutePath(); + public ArrayList getChildren(TDirOption dirOption) + throws InvalidDescendantsEmptyRequestException, InvalidDescendantsPathRequestException, + InvalidDescendantsFileRequestException; - public boolean hasJustInTimeACLs(); + public String getAbsolutePath(); - public List getParents(); + public boolean hasJustInTimeACLs(); - public void allotSpaceForFile(TSizeInBytes totSize) - throws ReservationException; + public List getParents(); - public void allotSpaceByToken(TSpaceToken token) throws ReservationException, - ExpiredSpaceTokenException; + public void allotSpaceForFile(TSizeInBytes totSize) throws ReservationException; - public void allotSpaceByToken(TSpaceToken token, TSizeInBytes totSize) - throws ReservationException, ExpiredSpaceTokenException; + public void allotSpaceByToken(TSpaceToken token) + throws ReservationException, ExpiredSpaceTokenException; + public void allotSpaceByToken(TSpaceToken token, TSizeInBytes totSize) + throws ReservationException, ExpiredSpaceTokenException; } diff --git a/src/main/java/it/grid/storm/namespace/StoRIImpl.java b/src/main/java/it/grid/storm/namespace/StoRIImpl.java index 472b30fc..ed3578e3 100644 --- a/src/main/java/it/grid/storm/namespace/StoRIImpl.java +++ b/src/main/java/it/grid/storm/namespace/StoRIImpl.java @@ -1,20 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; import static org.apache.commons.lang.StringUtils.join; -import java.io.File; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Collection; -import java.util.Date; -import java.util.List; - -import org.slf4j.Logger; - import it.grid.storm.balancer.BalancingStrategy; import it.grid.storm.balancer.Node; import it.grid.storm.balancer.exception.BalancingStrategyException; @@ -48,655 +38,633 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TTURL; +import java.io.File; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.List; +import org.slf4j.Logger; public class StoRIImpl implements StoRI { - private Logger log = NamespaceDirector.getLogger(); - - private TSURL surl; - private PFN pfn; - private ACLMode aclMode = ACLMode.UNDEF; - private TLifeTimeInSeconds lifetime = null; - private Date startTime = null; - private LocalFile localFile = null; - private Space space; - - private VirtualFS vfs; - private FilesystemIF fs; - private SpaceSystem spaceDriver; - private StoRIType type; - private Capability capability; - - // Elements of Name of StoRI - private String stfn; - private String vfsRoot; - private String relativeStFN; - private String relativePath; - private String fileName; - private String stfnPath; - private String stfnRoot; - - private MappingRule winnerRule; - - // Boolean status for full detailed metadata - private boolean volatileInformationAreSet = false; - - public StoRIImpl(VirtualFS vfs, MappingRule winnerRule, String relativeStFN, StoRIType type) { - - if (vfs != null) { - this.vfs = vfs; - capability = (Capability) vfs.getCapabilities(); - } else { - log.error("StoRI built without VFS!"); - } - - if (winnerRule != null) { - stfnRoot = winnerRule.getStFNRoot(); - stfn = stfnRoot + NamingConst.SEPARATOR + relativeStFN; - - vfsRoot = vfs.getRootPath(); - - this.relativeStFN = relativeStFN; - - stfnPath = NamespaceUtil.getStFNPath(stfn); - - relativePath = NamespaceUtil.consumeFileName(relativeStFN); - - if (relativePath != null) { - if (relativePath.startsWith(NamingConst.SEPARATOR)) { - relativePath = relativePath.substring(1); - } - } else { - relativePath = "/"; - } - - fileName = NamespaceUtil.getFileName(relativeStFN); - log.debug("StFN Filename : {} [StFN = '{}']", fileName, - relativeStFN); - - if (type == null) { - if (relativeStFN.endsWith(NamingConst.SEPARATOR)) { - type = StoRIType.FOLDER; - } else { - type = StoRIType.UNKNOWN; - } - } else { - this.type = type; - } - - } else { - log.warn("StoRI built without mapping rule"); - } - } - - public StoRIImpl(VirtualFS vfs, String stfnStr, TLifeTimeInSeconds lifetime, StoRIType type) { - - this.vfs = vfs; - this.capability = (Capability) vfs.getCapabilities(); - // Relative path has to be a path in a relative form! (without "/" at - // begins) - if (relativePath != null) { - if (relativePath.startsWith(NamingConst.SEPARATOR)) { - this.relativePath = relativePath.substring(1); - } - } else { - this.relativePath = "/"; - } - - this.lifetime = lifetime; - - if (type == null) { - this.type = StoRIType.UNKNOWN; - } else { - this.type = type; - } - - this.stfnRoot = null; - - this.fileName = NamespaceUtil.getFileName(stfnStr); - log.debug("StFN Filename : {} [StFN = '{}']", fileName, - stfnStr); - - this.stfnPath = NamespaceUtil.getStFNPath(stfnStr); - log.debug("StFN StFNPath : {} [StFN = '{}']", stfnPath, stfnStr); - - } - - public void allotSpaceByToken(TSpaceToken token) throws ReservationException, - ExpiredSpaceTokenException { - - // Retrieve SpaceSystem Driver - if (spaceDriver == null) { - try { - this.spaceDriver = vfs.getSpaceSystemDriverInstance(); - } catch (NamespaceException e) { - log.error(e.getMessage(), e); - throw new ReservationException( - "Error while retrieving Space System Driver for VFS", e); - } - } - - try { - vfs.useAllSpaceForFile(token, this); - } catch (NamespaceException e) { - log.error("Error using space token {} for file {}: {}", - token, fileName, e.getMessage(),e); - throw new ReservationException(e.getMessage(), e); - } - - } - - public void allotSpaceByToken(TSpaceToken token, TSizeInBytes totSize) - throws ReservationException, ExpiredSpaceTokenException { - - if (spaceDriver == null) { - try { - this.spaceDriver = vfs.getSpaceSystemDriverInstance(); - } catch (NamespaceException e) { - log.error(e.getMessage(),e); - throw new ReservationException( - "Error while retrieving Space System Driver for VFS", e); - } - } - - try { - vfs.useSpaceForFile(token, this, totSize); - } catch (NamespaceException e) { - log.error("Error using space token {} for file {}: {}", - token, fileName, e.getMessage(),e); - throw new ReservationException(e.getMessage(), e); - } - - } - - public void allotSpaceForFile(TSizeInBytes totSize) - throws ReservationException { - - if (spaceDriver == null) { - try { - this.spaceDriver = vfs.getSpaceSystemDriverInstance(); - } catch (NamespaceException e) { - log.error("Error while retrieving Space System Driver for VFS {}", - e.getMessage(), e); - - throw new ReservationException( - "Error while retrieving Space System Driver for VFS", e); - } - } - - try { - vfs.makeSilhouetteForFile(this, totSize); - } catch (NamespaceException e) { - log.error(e.getMessage(),e); - throw new ReservationException( - "Error while constructing 'Space Silhouette' for " + this.fileName, e); - } - - log.debug("Space built. Space " + this.getSpace().getSpaceFile().getPath()); - this.getSpace().allot(); - } - - public String getAbsolutePath() { - return vfs.getRootPath() + NamingConst.SEPARATOR + relativeStFN; - } - - public TLifeTimeInSeconds getFileLifeTime() { - if (!(volatileInformationAreSet)) { - setVolatileInformation(); - } - return lifetime; - } - - public String getFilename() { - - return this.fileName; - } - - public Date getFileStartTime() { - - if (!(volatileInformationAreSet)) { - setVolatileInformation(); - } - return startTime; - } - - public ArrayList getChildren(TDirOption dirOption) - throws InvalidDescendantsEmptyRequestException, - InvalidDescendantsPathRequestException, - InvalidDescendantsFileRequestException { - - ArrayList stoRIList = new ArrayList(); - File fileHandle = new File(getAbsolutePath()); - - if (!fileHandle.isDirectory()) { - if (fileHandle.isFile()) { - log.error("SURL represents a File, not a Directory!"); - throw new InvalidDescendantsFileRequestException(fileHandle); - } else { - log.warn("SURL does not exists!"); - throw new InvalidDescendantsPathRequestException(fileHandle); - } - } else { // SURL point to an existent directory. - // Create ArrayList containing all Valid fileName path found in - // PFN of StoRI's SURL - PathCreator pCreator = new PathCreator(fileHandle, - dirOption.isAllLevelRecursive(), 1); - Collection pathList = pCreator.generateChildren(); - if (pathList.size() == 0) { - log.debug("SURL point to an EMPTY DIRECTORY"); - throw new InvalidDescendantsEmptyRequestException(fileHandle, pathList); - } else { // Creation of StoRI LIST - NamespaceInterface namespace = NamespaceDirector.getNamespace(); - for (String childPath : pathList) { - log.debug(":Creation of new StoRI with path: {}", - childPath); - try { - - StoRI childStorI = namespace.resolveStoRIbyAbsolutePath(childPath, vfs); - childStorI.setMappingRule(getMappingRule()); - - stoRIList.add(childStorI); - } catch (NamespaceException ex) { - log.error("Error occurred while resolving StoRI by absolute path", - ex); - } - } - } - } - return stoRIList; - } - - public LocalFile getLocalFile() { - - if (localFile == null) { - try { - fs = vfs.getFilesystem(); - } catch (NamespaceException ex) { - log.error("Error while retrieving FS driver ", ex); - } - localFile = new LocalFile(getAbsolutePath(), fs); - } - return localFile; - } - - public MappingRule getMappingRule() { - return this.winnerRule; - } - - public List getParents() { - - StoRI createdStoRI = null; - ArrayList parentList = new ArrayList(); - String consumeElements = this.relativePath; - String consumed; - boolean lastElements = false; - - do { - createdStoRI = new StoRIImpl(this.vfs, this.winnerRule, consumeElements, - StoRIType.FOLDER); - parentList.add(createdStoRI); - consumed = NamespaceUtil.consumeElement(consumeElements); - if (consumed.equals(consumeElements)) { - lastElements = true; - } else { - consumeElements = consumed; - } - } while ((!lastElements)); - - return parentList; - } - - public PFN getPFN() { - - if (pfn == null) { - try { - this.pfn = PFN.make(getAbsolutePath()); - } catch (InvalidPFNAttributeException e) { - log.error(e.getMessage(),e); - } - } - return this.pfn; - } - - public String getRelativePath() { - - return this.relativePath; - } - - public String getRelativeStFN() { - - return this.relativeStFN; - } - - public Space getSpace() { - - if (space == null) { - log.error("No space bound with this StoRI!"); - return null; - } - return this.space; - } - - public StFN getStFN() { - - StFN stfn = null; - if (this.surl == null) { - getSURL(); - } - stfn = surl.sfn().stfn(); - return stfn; - } - - public String getStFNPath() { - - return this.stfnPath; - } - - public String getStFNRoot() { - - return this.stfnRoot; - } - - public StoRIType getStoRIType() { - - return this.type; - } - - public TSURL getSURL() { - - /** - * The String passed to TSURL.makeFromString MUST contains a valid TSURL in - * string format, not only relativePath. - */ - if (this.surl == null) { - try { - this.surl = TSURL.makeFromStringValidate(buildSURLString()); - } catch (Throwable e) { - log.error("Unable to build the SURL with relative path: {}. {}", - relativePath, e.getMessage(), e); - } - } - return surl; - } - - public TTURL getTURL(TURLPrefix desiredProtocols) - throws IllegalArgumentException, InvalidGetTURLProtocolException, - TURLBuildingException { - - TTURL resultTURL = null; - - if (desiredProtocols == null || desiredProtocols.size() == 0) { - log - .error(" request with NULL or empty prefixOfAcceptedTransferProtocol!"); - throw new IllegalArgumentException( - "unable to build the TTURL, invalid arguments: desiredProtocols=" - + desiredProtocols); - } else { - - // Within the request there are some protocol preferences - // Calculate the intersection between Desired Protocols and Available - // Protocols - List desiredP = new ArrayList<>(desiredProtocols.getDesiredProtocols()); - List availableP = new ArrayList<>(capability.getAllManagedProtocols()); - desiredP.retainAll(availableP); - - if (desiredP.isEmpty()) { - String msg = String.format("None of [%s] protocols matches the available " - + "protocols [%s]", join(desiredP, ','), join(availableP, ',')); - log.error(msg); - throw new InvalidGetTURLProtocolException(msg); - - } else { - - log.debug("Protocol matching.. Intersection size: {}", - desiredP.size()); - - Protocol choosen = null; - Authority authority = null; - int index = 0; - boolean turlBuilt = false; - while (!turlBuilt && index < desiredP.size()) { - choosen = desiredP.get(index); - authority = null; - log.debug("Selected Protocol: {}", choosen); - if (capability.isPooledProtocol(choosen)) { - log.debug("The protocol selected is in POOL Configuration"); - try { - authority = getPooledAuthority(choosen); - } catch (BalancingStrategyException e) { - log - .warn("Unable to get the pool member to be used to build the turl. BalancerException : {}", - e.getMessage()); - index++; - continue; - } - } else { - log.debug("The protocol selected is in NON-POOL Configuration"); - TransportProtocol transProt = null; - List protList = capability - .getManagedProtocolByScheme(choosen); - if (protList.size() > 1) { // Strange case - log - .warn("More than one protocol {}" - + " defined but NOT in POOL Configuration. Taking the first one.", - choosen); - } - transProt = protList.get(0); - authority = transProt.getAuthority(); - } - - if (choosen.equals(Protocol.HTTP) || choosen.equals(Protocol.HTTPS)){ - resultTURL = buildHTTPTURL(choosen,authority); - } else { - resultTURL = buildTURL(choosen, authority); - } - - turlBuilt = true; - } - if (!turlBuilt) { - throw new TURLBuildingException( - "Unable to build the turl given protocols " + desiredP.toString()); - } - } - } - return resultTURL; - } - - public VirtualFS getVirtualFileSystem() { - return this.vfs; - } - - public boolean hasJustInTimeACLs() { - - boolean result = true; - - if (aclMode.equals(ACLMode.UNDEF)) { - this.aclMode = vfs.getCapabilities().getACLMode(); - } - if (aclMode.equals(ACLMode.JUST_IN_TIME)) { - result = true; - } else { - result = false; - } - - return result; - } - - - public void setMappingRule(MappingRule winnerRule) { - this.winnerRule = winnerRule; - } - - public void setSpace(Space space) { - this.space = space; - } - - public void setStFNRoot(String stfnRoot) { - - this.stfnRoot = stfnRoot; - } - - public void setStoRIType(StoRIType type) { - - this.type = type; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - - sb.append("\n"); - sb.append(" stori.stfn : " + this.getStFN().toString() + "\n"); - sb.append(" stori.vfs-root :" + this.vfsRoot + "\n"); - sb.append(" stori.absolutePath : " + this.getAbsolutePath() + "\n"); - sb.append(" stori.vfs NAME : " + this.getVFSName() + "\n"); - sb.append(" stori.stfn FileName : " + this.fileName + "\n"); - sb.append(" stori.stfn StFN path : " + this.stfnPath + "\n"); - sb.append(" stori.stfn rel. Path : " + this.relativePath + "\n"); - sb.append(" stori.relative StFN : " + this.relativeStFN + "\n"); - sb.append(" stori.stfn-root : " + this.stfnRoot + "\n"); - sb.append(" story.type : " + this.type + "\n"); - sb.append(" stori.SURL : " + this.getSURL() + "\n"); - sb.append(" stori.localFile : " + this.getLocalFile() + "\n"); - sb.append(" stori.mappingRule : " + this.getMappingRule() + "\n"); - - return sb.toString(); - } - - private String buildSURLString() throws NamespaceException { - String stfn = stfnRoot + NamingConst.SEPARATOR + relativeStFN; - SURL surl = new SURL(stfn); - return surl.toString(); - } - - private TTURL buildHTTPTURL(Protocol p, Authority authority){ - - String prefix = Configuration.getInstance().getHTTPTURLPrefix(); - StringBuilder sb = new StringBuilder(); - sb.append(p.getProtocolPrefix()); - sb.append(authority); - - if ( prefix != null){ + private Logger log = NamespaceDirector.getLogger(); + + private TSURL surl; + private PFN pfn; + private ACLMode aclMode = ACLMode.UNDEF; + private TLifeTimeInSeconds lifetime = null; + private Date startTime = null; + private LocalFile localFile = null; + private Space space; + + private VirtualFS vfs; + private FilesystemIF fs; + private SpaceSystem spaceDriver; + private StoRIType type; + private Capability capability; + + // Elements of Name of StoRI + private String stfn; + private String vfsRoot; + private String relativeStFN; + private String relativePath; + private String fileName; + private String stfnPath; + private String stfnRoot; + + private MappingRule winnerRule; + + // Boolean status for full detailed metadata + private boolean volatileInformationAreSet = false; + + public StoRIImpl(VirtualFS vfs, MappingRule winnerRule, String relativeStFN, StoRIType type) { + + if (vfs != null) { + this.vfs = vfs; + capability = (Capability) vfs.getCapabilities(); + } else { + log.error("StoRI built without VFS!"); + } + + if (winnerRule != null) { + stfnRoot = winnerRule.getStFNRoot(); + stfn = stfnRoot + NamingConst.SEPARATOR + relativeStFN; + + vfsRoot = vfs.getRootPath(); + + this.relativeStFN = relativeStFN; + + stfnPath = NamespaceUtil.getStFNPath(stfn); + + relativePath = NamespaceUtil.consumeFileName(relativeStFN); + + if (relativePath != null) { + if (relativePath.startsWith(NamingConst.SEPARATOR)) { + relativePath = relativePath.substring(1); + } + } else { + relativePath = "/"; + } + + fileName = NamespaceUtil.getFileName(relativeStFN); + log.debug("StFN Filename : {} [StFN = '{}']", fileName, relativeStFN); + + if (type == null) { + if (relativeStFN.endsWith(NamingConst.SEPARATOR)) { + type = StoRIType.FOLDER; + } else { + type = StoRIType.UNKNOWN; + } + } else { + this.type = type; + } + + } else { + log.warn("StoRI built without mapping rule"); + } + } + + public StoRIImpl(VirtualFS vfs, String stfnStr, TLifeTimeInSeconds lifetime, StoRIType type) { + + this.vfs = vfs; + this.capability = (Capability) vfs.getCapabilities(); + // Relative path has to be a path in a relative form! (without "/" at + // begins) + if (relativePath != null) { + if (relativePath.startsWith(NamingConst.SEPARATOR)) { + this.relativePath = relativePath.substring(1); + } + } else { + this.relativePath = "/"; + } + + this.lifetime = lifetime; + + if (type == null) { + this.type = StoRIType.UNKNOWN; + } else { + this.type = type; + } + + this.stfnRoot = null; + + this.fileName = NamespaceUtil.getFileName(stfnStr); + log.debug("StFN Filename : {} [StFN = '{}']", fileName, stfnStr); + + this.stfnPath = NamespaceUtil.getStFNPath(stfnStr); + log.debug("StFN StFNPath : {} [StFN = '{}']", stfnPath, stfnStr); + } + + public void allotSpaceByToken(TSpaceToken token) + throws ReservationException, ExpiredSpaceTokenException { + + // Retrieve SpaceSystem Driver + if (spaceDriver == null) { + try { + this.spaceDriver = vfs.getSpaceSystemDriverInstance(); + } catch (NamespaceException e) { + log.error(e.getMessage(), e); + throw new ReservationException("Error while retrieving Space System Driver for VFS", e); + } + } + + try { + vfs.useAllSpaceForFile(token, this); + } catch (NamespaceException e) { + log.error("Error using space token {} for file {}: {}", token, fileName, e.getMessage(), e); + throw new ReservationException(e.getMessage(), e); + } + } + + public void allotSpaceByToken(TSpaceToken token, TSizeInBytes totSize) + throws ReservationException, ExpiredSpaceTokenException { + + if (spaceDriver == null) { + try { + this.spaceDriver = vfs.getSpaceSystemDriverInstance(); + } catch (NamespaceException e) { + log.error(e.getMessage(), e); + throw new ReservationException("Error while retrieving Space System Driver for VFS", e); + } + } + + try { + vfs.useSpaceForFile(token, this, totSize); + } catch (NamespaceException e) { + log.error("Error using space token {} for file {}: {}", token, fileName, e.getMessage(), e); + throw new ReservationException(e.getMessage(), e); + } + } + + public void allotSpaceForFile(TSizeInBytes totSize) throws ReservationException { + + if (spaceDriver == null) { + try { + this.spaceDriver = vfs.getSpaceSystemDriverInstance(); + } catch (NamespaceException e) { + log.error("Error while retrieving Space System Driver for VFS {}", e.getMessage(), e); + + throw new ReservationException("Error while retrieving Space System Driver for VFS", e); + } + } + + try { + vfs.makeSilhouetteForFile(this, totSize); + } catch (NamespaceException e) { + log.error(e.getMessage(), e); + throw new ReservationException( + "Error while constructing 'Space Silhouette' for " + this.fileName, e); + } + + log.debug("Space built. Space " + this.getSpace().getSpaceFile().getPath()); + this.getSpace().allot(); + } + + public String getAbsolutePath() { + return vfs.getRootPath() + NamingConst.SEPARATOR + relativeStFN; + } + + public TLifeTimeInSeconds getFileLifeTime() { + if (!(volatileInformationAreSet)) { + setVolatileInformation(); + } + return lifetime; + } + + public String getFilename() { + + return this.fileName; + } + + public Date getFileStartTime() { + + if (!(volatileInformationAreSet)) { + setVolatileInformation(); + } + return startTime; + } + + public ArrayList getChildren(TDirOption dirOption) + throws InvalidDescendantsEmptyRequestException, InvalidDescendantsPathRequestException, + InvalidDescendantsFileRequestException { + + ArrayList stoRIList = new ArrayList(); + File fileHandle = new File(getAbsolutePath()); + + if (!fileHandle.isDirectory()) { + if (fileHandle.isFile()) { + log.error("SURL represents a File, not a Directory!"); + throw new InvalidDescendantsFileRequestException(fileHandle); + } else { + log.warn("SURL does not exists!"); + throw new InvalidDescendantsPathRequestException(fileHandle); + } + } else { // SURL point to an existent directory. + // Create ArrayList containing all Valid fileName path found in + // PFN of StoRI's SURL + PathCreator pCreator = new PathCreator(fileHandle, dirOption.isAllLevelRecursive(), 1); + Collection pathList = pCreator.generateChildren(); + if (pathList.size() == 0) { + log.debug("SURL point to an EMPTY DIRECTORY"); + throw new InvalidDescendantsEmptyRequestException(fileHandle, pathList); + } else { // Creation of StoRI LIST + NamespaceInterface namespace = NamespaceDirector.getNamespace(); + for (String childPath : pathList) { + log.debug(":Creation of new StoRI with path: {}", childPath); + try { + + StoRI childStorI = namespace.resolveStoRIbyAbsolutePath(childPath, vfs); + childStorI.setMappingRule(getMappingRule()); + + stoRIList.add(childStorI); + } catch (NamespaceException ex) { + log.error("Error occurred while resolving StoRI by absolute path", ex); + } + } + } + } + return stoRIList; + } + + public LocalFile getLocalFile() { + + if (localFile == null) { + try { + fs = vfs.getFilesystem(); + } catch (NamespaceException ex) { + log.error("Error while retrieving FS driver ", ex); + } + localFile = new LocalFile(getAbsolutePath(), fs); + } + return localFile; + } + + public MappingRule getMappingRule() { + return this.winnerRule; + } + + public List getParents() { + + StoRI createdStoRI = null; + ArrayList parentList = new ArrayList(); + String consumeElements = this.relativePath; + String consumed; + boolean lastElements = false; + + do { + createdStoRI = new StoRIImpl(this.vfs, this.winnerRule, consumeElements, StoRIType.FOLDER); + parentList.add(createdStoRI); + consumed = NamespaceUtil.consumeElement(consumeElements); + if (consumed.equals(consumeElements)) { + lastElements = true; + } else { + consumeElements = consumed; + } + } while ((!lastElements)); + + return parentList; + } + + public PFN getPFN() { + + if (pfn == null) { + try { + this.pfn = PFN.make(getAbsolutePath()); + } catch (InvalidPFNAttributeException e) { + log.error(e.getMessage(), e); + } + } + return this.pfn; + } + + public String getRelativePath() { + + return this.relativePath; + } + + public String getRelativeStFN() { + + return this.relativeStFN; + } + + public Space getSpace() { + + if (space == null) { + log.error("No space bound with this StoRI!"); + return null; + } + return this.space; + } + + public StFN getStFN() { + + StFN stfn = null; + if (this.surl == null) { + getSURL(); + } + stfn = surl.sfn().stfn(); + return stfn; + } + + public String getStFNPath() { + + return this.stfnPath; + } + + public String getStFNRoot() { + + return this.stfnRoot; + } + + public StoRIType getStoRIType() { + + return this.type; + } + + public TSURL getSURL() { + + /** + * The String passed to TSURL.makeFromString MUST contains a valid TSURL in string format, not + * only relativePath. + */ + if (this.surl == null) { + try { + this.surl = TSURL.makeFromStringValidate(buildSURLString()); + } catch (Throwable e) { + log.error( + "Unable to build the SURL with relative path: {}. {}", relativePath, e.getMessage(), e); + } + } + return surl; + } + + public TTURL getTURL(TURLPrefix desiredProtocols) + throws IllegalArgumentException, InvalidGetTURLProtocolException, TURLBuildingException { + + TTURL resultTURL = null; + + if (desiredProtocols == null || desiredProtocols.size() == 0) { + log.error(" request with NULL or empty prefixOfAcceptedTransferProtocol!"); + throw new IllegalArgumentException( + "unable to build the TTURL, invalid arguments: desiredProtocols=" + desiredProtocols); + } else { + + // Within the request there are some protocol preferences + // Calculate the intersection between Desired Protocols and Available + // Protocols + List desiredP = new ArrayList<>(desiredProtocols.getDesiredProtocols()); + List availableP = new ArrayList<>(capability.getAllManagedProtocols()); + desiredP.retainAll(availableP); + + if (desiredP.isEmpty()) { + String msg = + String.format( + "None of [%s] protocols matches the available " + "protocols [%s]", + join(desiredP, ','), join(availableP, ',')); + log.error(msg); + throw new InvalidGetTURLProtocolException(msg); + + } else { + + log.debug("Protocol matching.. Intersection size: {}", desiredP.size()); + + Protocol choosen = null; + Authority authority = null; + int index = 0; + boolean turlBuilt = false; + while (!turlBuilt && index < desiredP.size()) { + choosen = desiredP.get(index); + authority = null; + log.debug("Selected Protocol: {}", choosen); + if (capability.isPooledProtocol(choosen)) { + log.debug("The protocol selected is in POOL Configuration"); + try { + authority = getPooledAuthority(choosen); + } catch (BalancingStrategyException e) { + log.warn( + "Unable to get the pool member to be used to build the turl. BalancerException : {}", + e.getMessage()); + index++; + continue; + } + } else { + log.debug("The protocol selected is in NON-POOL Configuration"); + TransportProtocol transProt = null; + List protList = capability.getManagedProtocolByScheme(choosen); + if (protList.size() > 1) { // Strange case + log.warn( + "More than one protocol {}" + + " defined but NOT in POOL Configuration. Taking the first one.", + choosen); + } + transProt = protList.get(0); + authority = transProt.getAuthority(); + } + + if (choosen.equals(Protocol.HTTP) || choosen.equals(Protocol.HTTPS)) { + resultTURL = buildHTTPTURL(choosen, authority); + } else { + resultTURL = buildTURL(choosen, authority); + } + + turlBuilt = true; + } + if (!turlBuilt) { + throw new TURLBuildingException( + "Unable to build the turl given protocols " + desiredP.toString()); + } + } + } + return resultTURL; + } + + public VirtualFS getVirtualFileSystem() { + return this.vfs; + } + + public boolean hasJustInTimeACLs() { + + boolean result = true; + + if (aclMode.equals(ACLMode.UNDEF)) { + this.aclMode = vfs.getCapabilities().getACLMode(); + } + if (aclMode.equals(ACLMode.JUST_IN_TIME)) { + result = true; + } else { + result = false; + } + + return result; + } + + public void setMappingRule(MappingRule winnerRule) { + this.winnerRule = winnerRule; + } + + public void setSpace(Space space) { + this.space = space; + } + + public void setStFNRoot(String stfnRoot) { + + this.stfnRoot = stfnRoot; + } + + public void setStoRIType(StoRIType type) { + + this.type = type; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + + sb.append("\n"); + sb.append(" stori.stfn : " + this.getStFN().toString() + "\n"); + sb.append(" stori.vfs-root :" + this.vfsRoot + "\n"); + sb.append(" stori.absolutePath : " + this.getAbsolutePath() + "\n"); + sb.append(" stori.vfs NAME : " + this.getVFSName() + "\n"); + sb.append(" stori.stfn FileName : " + this.fileName + "\n"); + sb.append(" stori.stfn StFN path : " + this.stfnPath + "\n"); + sb.append(" stori.stfn rel. Path : " + this.relativePath + "\n"); + sb.append(" stori.relative StFN : " + this.relativeStFN + "\n"); + sb.append(" stori.stfn-root : " + this.stfnRoot + "\n"); + sb.append(" story.type : " + this.type + "\n"); + sb.append(" stori.SURL : " + this.getSURL() + "\n"); + sb.append(" stori.localFile : " + this.getLocalFile() + "\n"); + sb.append(" stori.mappingRule : " + this.getMappingRule() + "\n"); + + return sb.toString(); + } + + private String buildSURLString() throws NamespaceException { + String stfn = stfnRoot + NamingConst.SEPARATOR + relativeStFN; + SURL surl = new SURL(stfn); + return surl.toString(); + } + + private TTURL buildHTTPTURL(Protocol p, Authority authority) { + + String prefix = Configuration.getInstance().getHTTPTURLPrefix(); + StringBuilder sb = new StringBuilder(); + sb.append(p.getProtocolPrefix()); + sb.append(authority); + + if (prefix != null) { sb.append(prefix); } - sb.append(getStFN().toString()); - - log.debug("built http turl: {}", sb.toString()); - - return TTURL.makeFromString(sb.toString()); - - } - private TTURL buildTURL(Protocol protocol, Authority authority) - throws InvalidProtocolForTURLException { - - TTURL result = null; - - switch (protocol.getProtocolIndex()) { - case 0: // EMPTY Protocol - throw new InvalidProtocolForTURLException(protocol.getSchema()); - case 1: - result = TURLBuilder.buildFileTURL(authority, this.getPFN()); - break; // FILE Protocol - case 2: - result = TURLBuilder.buildGsiftpTURL(authority, this.getPFN()); - break; // GSIFTP Protocol - case 3: - result = TURLBuilder.buildRFIOTURL(authority, this.getPFN()); - break; // RFIO Protocol - case 4: // SRM Protocol - throw new InvalidProtocolForTURLException(protocol.getSchema()); - case 5: - result = TURLBuilder.buildROOTTURL(authority, this.getPFN()); - break; // ROOT Protocol - case 8: - result = TURLBuilder.buildXROOTTURL(authority, this.getPFN()); - break; // XROOT Protocol - default: - // Unknown protocol - throw new InvalidProtocolForTURLException(protocol.getSchema()); - } - return result; - } - - /** - * @param pooledProtocol - * @return - * @throws BalancerException - */ - private Authority getPooledAuthority(Protocol pooledProtocol) - throws BalancingStrategyException { - - Authority authority = null; - if (pooledProtocol.equals(Protocol.GSIFTP) - || pooledProtocol.equals(Protocol.HTTP) - || pooledProtocol.equals(Protocol.HTTPS)) { - BalancingStrategy bal = vfs - .getProtocolBalancingStrategy(pooledProtocol); - if (bal != null) { - Node node = bal.getNextElement(); - authority = new Authority(node.getHostname(), node.getPort()); - } - } else { - log.error("Unable to manage pool with protocol different from GSIFTP."); - } - return authority; - } - - private String getVFSName() { - - String result = "UNDEF"; - if (vfs != null) { - result = vfs.getAliasName(); - } - return result; - } - - /** - * Set "lifetime" and "startTime" information. The corresponding values are - * retrieved from the DB. - */ - private void setVolatileInformation() { - - VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog.getInstance(); - List volatileInfo = catalog.volatileInfoOn(getPFN()); - if (volatileInfo.size() != 2) { - lifetime = TLifeTimeInSeconds.makeInfinite(); - startTime = null; - return; - } - startTime = new Date(((Calendar) volatileInfo.get(0)).getTimeInMillis()); - lifetime = (TLifeTimeInSeconds) volatileInfo.get(1); - volatileInformationAreSet = true; - } + sb.append(getStFN().toString()); + + log.debug("built http turl: {}", sb.toString()); + + return TTURL.makeFromString(sb.toString()); + } + + private TTURL buildTURL(Protocol protocol, Authority authority) + throws InvalidProtocolForTURLException { + + TTURL result = null; + + switch (protocol.getProtocolIndex()) { + case 0: // EMPTY Protocol + throw new InvalidProtocolForTURLException(protocol.getSchema()); + case 1: + result = TURLBuilder.buildFileTURL(authority, this.getPFN()); + break; // FILE Protocol + case 2: + result = TURLBuilder.buildGsiftpTURL(authority, this.getPFN()); + break; // GSIFTP Protocol + case 3: + result = TURLBuilder.buildRFIOTURL(authority, this.getPFN()); + break; // RFIO Protocol + case 4: // SRM Protocol + throw new InvalidProtocolForTURLException(protocol.getSchema()); + case 5: + result = TURLBuilder.buildROOTTURL(authority, this.getPFN()); + break; // ROOT Protocol + case 8: + result = TURLBuilder.buildXROOTTURL(authority, this.getPFN()); + break; // XROOT Protocol + default: + // Unknown protocol + throw new InvalidProtocolForTURLException(protocol.getSchema()); + } + return result; + } + + /** + * @param pooledProtocol + * @return + * @throws BalancerException + */ + private Authority getPooledAuthority(Protocol pooledProtocol) throws BalancingStrategyException { + + Authority authority = null; + if (pooledProtocol.equals(Protocol.GSIFTP) + || pooledProtocol.equals(Protocol.HTTP) + || pooledProtocol.equals(Protocol.HTTPS)) { + BalancingStrategy bal = vfs.getProtocolBalancingStrategy(pooledProtocol); + if (bal != null) { + Node node = bal.getNextElement(); + authority = new Authority(node.getHostname(), node.getPort()); + } + } else { + log.error("Unable to manage pool with protocol different from GSIFTP."); + } + return authority; + } + + private String getVFSName() { + + String result = "UNDEF"; + if (vfs != null) { + result = vfs.getAliasName(); + } + return result; + } + + /** + * Set "lifetime" and "startTime" information. The corresponding values are retrieved from the DB. + */ + private void setVolatileInformation() { + + VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog.getInstance(); + List volatileInfo = catalog.volatileInfoOn(getPFN()); + if (volatileInfo.size() != 2) { + lifetime = TLifeTimeInSeconds.makeInfinite(); + startTime = null; + return; + } + startTime = new Date(((Calendar) volatileInfo.get(0)).getTimeInMillis()); + lifetime = (TLifeTimeInSeconds) volatileInfo.get(1); + volatileInformationAreSet = true; + } @Override - public StFN getStFNFromMappingRule() { + public StFN getStFNFromMappingRule() { try { - if (getMappingRule() == null){ - log.warn("Mapping rule is null for this StorI. " + - "Falling back to VFS StFN."); + if (getMappingRule() == null) { + log.warn("Mapping rule is null for this StorI. " + "Falling back to VFS StFN."); return getStFN(); } - + String mappingRuleRoot = getMappingRule().getStFNRoot(); - String mappedStfn = mappingRuleRoot + NamingConst.SEPARATOR - + relativeStFN; - + String mappedStfn = mappingRuleRoot + NamingConst.SEPARATOR + relativeStFN; + return StFN.make(mappedStfn); } catch (InvalidStFNAttributeException e) { - - log.error("Error building StFN from mapping rule. Reason: {}", - e.getMessage(),e); - + + log.error("Error building StFN from mapping rule. Reason: {}", e.getMessage(), e); + log.error("Falling back to VFS StFN."); - - return getStFN(); + return getStFN(); } } - } diff --git a/src/main/java/it/grid/storm/namespace/TURLBuilder.java b/src/main/java/it/grid/storm/namespace/TURLBuilder.java index 7ea48147..fdc69e28 100644 --- a/src/main/java/it/grid/storm/namespace/TURLBuilder.java +++ b/src/main/java/it/grid/storm/namespace/TURLBuilder.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; @@ -10,66 +9,63 @@ import it.grid.storm.namespace.model.Protocol; import it.grid.storm.srm.types.InvalidTTURLAttributesException; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; public class TURLBuilder { - private static Logger log = NamespaceDirector.getLogger(); + private static Logger log = NamespaceDirector.getLogger(); + + public TURLBuilder() { - public TURLBuilder() { + super(); + } - super(); - } + private static TTURL buildTURL( + Protocol protocol, Authority authority, String extraSlashes, PFN physicalFN) { - private static TTURL buildTURL(Protocol protocol, Authority authority, - String extraSlashes, PFN physicalFN) { + TTURL turl = null; + String turlString = null; + try { + turlString = + protocol.getProtocolPrefix() + + authority.toString() + + extraSlashes + + physicalFN.getValue(); + log.debug("turlString used to build the TURL : {}", turlString); + turl = TTURL.makeFromString(turlString); + } catch (InvalidTTURLAttributesException ex) { + log.error( + "Error while constructing TURL with Authority '{}': {}", authority, ex.getMessage(), ex); + } + return turl; + } - TTURL turl = null; - String turlString = null; - try { - turlString = protocol.getProtocolPrefix() + authority.toString() - + extraSlashes + physicalFN.getValue(); - log.debug("turlString used to build the TURL : {}", turlString); - turl = TTURL.makeFromString(turlString); - } catch (InvalidTTURLAttributesException ex) { - log.error("Error while constructing TURL with Authority '{}': {}", - authority, ex.getMessage(), ex); - } - return turl; - } + public static TTURL buildFileTURL(Authority authority, PFN physicalFN) { - public static TTURL buildFileTURL(Authority authority, PFN physicalFN) { + String extraSlashesForFile = Configuration.getInstance().getExtraSlashesForFileTURL(); + return buildTURL(Protocol.FILE, authority, extraSlashesForFile, physicalFN); + } - String extraSlashesForFile = Configuration.getInstance() - .getExtraSlashesForFileTURL(); - return buildTURL(Protocol.FILE, authority, extraSlashesForFile, physicalFN); - } + public static TTURL buildGsiftpTURL(Authority authority, PFN physicalFN) { - public static TTURL buildGsiftpTURL(Authority authority, PFN physicalFN) { + String extraSlashesForGSIFTP = Configuration.getInstance().getExtraSlashesForGsiFTPTURL(); + return buildTURL(Protocol.GSIFTP, authority, extraSlashesForGSIFTP, physicalFN); + } - String extraSlashesForGSIFTP = Configuration.getInstance() - .getExtraSlashesForGsiFTPTURL(); - return buildTURL(Protocol.GSIFTP, authority, extraSlashesForGSIFTP, - physicalFN); - } + public static TTURL buildRFIOTURL(Authority authority, PFN physicalFN) { - public static TTURL buildRFIOTURL(Authority authority, PFN physicalFN) { + String extraSlashesForRFIO = Configuration.getInstance().getExtraSlashesForRFIOTURL(); + return buildTURL(Protocol.RFIO, authority, extraSlashesForRFIO, physicalFN); + } - String extraSlashesForRFIO = Configuration.getInstance() - .getExtraSlashesForRFIOTURL(); - return buildTURL(Protocol.RFIO, authority, extraSlashesForRFIO, physicalFN); - } + public static TTURL buildROOTTURL(Authority authority, PFN physicalFN) { - public static TTURL buildROOTTURL(Authority authority, PFN physicalFN) { + String extraSlashesForROOT = Configuration.getInstance().getExtraSlashesForROOTTURL(); + return buildTURL(Protocol.ROOT, authority, extraSlashesForROOT, physicalFN); + } - String extraSlashesForROOT = Configuration.getInstance() - .getExtraSlashesForROOTTURL(); - return buildTURL(Protocol.ROOT, authority, extraSlashesForROOT, physicalFN); - } - - public static TTURL buildXROOTTURL(Authority authority, PFN physicalFN) { + public static TTURL buildXROOTTURL(Authority authority, PFN physicalFN) { - return buildROOTTURL(authority, physicalFN); - } -} \ No newline at end of file + return buildROOTTURL(authority, physicalFN); + } +} diff --git a/src/main/java/it/grid/storm/namespace/TURLBuildingException.java b/src/main/java/it/grid/storm/namespace/TURLBuildingException.java index c96a546a..f2fa7810 100644 --- a/src/main/java/it/grid/storm/namespace/TURLBuildingException.java +++ b/src/main/java/it/grid/storm/namespace/TURLBuildingException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; @@ -8,19 +7,17 @@ public class TURLBuildingException extends Exception { private static final long serialVersionUID = 1L; - public TURLBuildingException() { - } + public TURLBuildingException() {} - public TURLBuildingException(String message) { - super(message); - } + public TURLBuildingException(String message) { + super(message); + } - public TURLBuildingException(Throwable cause) { - super(cause); - } - - public TURLBuildingException(String message, Throwable cause) { - super(message, cause); - } + public TURLBuildingException(Throwable cause) { + super(cause); + } + public TURLBuildingException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/namespace/UnapprochableSurlException.java b/src/main/java/it/grid/storm/namespace/UnapprochableSurlException.java index 8789b316..f180d664 100644 --- a/src/main/java/it/grid/storm/namespace/UnapprochableSurlException.java +++ b/src/main/java/it/grid/storm/namespace/UnapprochableSurlException.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace; public class UnapprochableSurlException extends Exception { - private static final long serialVersionUID = -1004206794152723169L; - - public UnapprochableSurlException(String string) { - super(string); - } + private static final long serialVersionUID = -1004206794152723169L; + public UnapprochableSurlException(String string) { + super(string); + } } diff --git a/src/main/java/it/grid/storm/namespace/config/InvalidConfigurationFileFormatException.java b/src/main/java/it/grid/storm/namespace/config/InvalidConfigurationFileFormatException.java index f978a5a4..e94a04ed 100644 --- a/src/main/java/it/grid/storm/namespace/config/InvalidConfigurationFileFormatException.java +++ b/src/main/java/it/grid/storm/namespace/config/InvalidConfigurationFileFormatException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config; import it.grid.storm.namespace.*; /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidConfigurationFileFormatException extends NamespaceException { - private boolean notSupported = false; + private boolean notSupported = false; - public InvalidConfigurationFileFormatException(String fileName) { + public InvalidConfigurationFileFormatException(String fileName) { - notSupported = fileName.endsWith(".cfg") || fileName.endsWith(".xml"); - } + notSupported = fileName.endsWith(".cfg") || fileName.endsWith(".xml"); + } - public String toString() { + public String toString() { - return ("Configuration File Format NOT SUPPORTED = Not .xml or .cfg formati = " + notSupported); - } + return ("Configuration File Format NOT SUPPORTED = Not .xml or .cfg formati = " + notSupported); + } } diff --git a/src/main/java/it/grid/storm/namespace/config/NamespaceCheck.java b/src/main/java/it/grid/storm/namespace/config/NamespaceCheck.java index ea879c1a..6fa0cac1 100644 --- a/src/main/java/it/grid/storm/namespace/config/NamespaceCheck.java +++ b/src/main/java/it/grid/storm/namespace/config/NamespaceCheck.java @@ -1,19 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config; -import java.io.File; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; - import com.google.common.collect.Lists; - import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.model.ACLEntry; import it.grid.storm.namespace.model.ApproachableRule; @@ -22,161 +12,169 @@ import it.grid.storm.namespace.model.MappingRule; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.namespace.util.userinfo.LocalGroups; +import java.io.File; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; public class NamespaceCheck { - private final Logger log = NamespaceDirector.getLogger(); - private final Map vfss; - private final Map maprules; - private final Map apprules; - - public NamespaceCheck(Map vfss, - Map maprules, - Map apprules) { - - this.vfss = vfss; - this.maprules = maprules; - this.apprules = apprules; - } - - public boolean check() { - - boolean vfsCheck = checkVFS(); - boolean mapRulesCheck = checkMapRules(); - boolean appRules = checkAppRules(); - checkGroups(vfsCheck); - return vfsCheck && mapRulesCheck && appRules; - } - - private boolean checkGroups(boolean vfsCheckResult) { - - log - .info("Namespace check. Checking of the existence of the needed Local group ..."); - boolean result = true; - if (!vfsCheckResult) { - log - .warn("Skip the check of the needed Local Group, because check of VFSs failed."); - } else { - - List vf = new ArrayList<>(vfss.values()); - for (VirtualFS vfs : vf) { - - // Check the presence of Default ACL - Capability cap = vfs.getCapabilities(); - if (cap != null) { - DefaultACL defACL = cap.getDefaultACL(); - if (defACL != null) { - List acl = new ArrayList<>(defACL.getACL()); - if (!acl.isEmpty()) { - for (ACLEntry aclEntry : acl) { - if (!LocalGroups.getInstance().isGroupDefined( - aclEntry.getGroupName())) { - log.warn("!!!!! Local Group for ACL ('{}') is not defined!", aclEntry); - result = false; - } - } - } - } - } - } - } - if (result) { - log.info("All local groups are defined. "); - } else { - log.warn("Please check the local group needed to StoRM"); - } - return result; - } - - /** - * Check if the root of the VFS exists. - * - * @todo: this method don't check if the root is accessible by storm user. - * - * @return true if "filesystems" element (list of VFS) is valid false - * otherwise - */ - private boolean checkVFS() { - - log.info("Namespace checking VFSs .."); - boolean result = true; - if (vfss == null) { - log.error("Anyone VFS is defined in namespace!"); - return false; - } else { - List rules = new ArrayList<>(vfss.values()); - Iterator scan = rules.iterator(); - - while (scan.hasNext()) { - VirtualFS vfs = scan.next(); - - String aliasName = vfs.getAliasName(); - log.debug("VFS named '{}' found.", aliasName); - String root = vfs.getRootPath(); - File file = new File(root); - boolean exists = file.exists(); - if (!exists) { - log.error("ERROR in NAMESPACE: The VFS '{}' does not have a valid root :'{}'", aliasName, root); - result = false; - } - } - } - if (result) { - log.info(" VFSs are well-defined."); - } - return result; - } - - private boolean checkMapRules() { - - boolean result = true; - if (maprules == null) { - return false; - } else { - int nrOfMappingRules = maprules.size(); - log.debug("Number of Mapping rules = {}", nrOfMappingRules); - List rules = new ArrayList<>(maprules.values()); - Iterator scan = rules.iterator(); - MappingRule rule; - String mappedVFS; - boolean check = false; - while (scan.hasNext()) { - rule = scan.next(); - mappedVFS = rule.getMappedFS().getAliasName(); - check = vfss.containsKey(mappedVFS); - if (!check) { - log.error("ERROR in NAMESPACE - MAP RULE '{}' point a UNKNOWN VFS '{}'!", rule.getRuleName(), mappedVFS); - result = false; - } - } - } - return result; - - } - - private boolean checkAppRules() { - - boolean result = true; - if (apprules == null) { - return false; - } else { - int nrOfApproachableRules = apprules.size(); - log.debug("Number of Approachable rules = {}", nrOfApproachableRules); - List rules = new ArrayList<>(apprules.values()); - Iterator scan = rules.iterator(); - boolean check = false; - while (scan.hasNext()) { - ApproachableRule rule = scan.next(); - List approachVFSs = Lists.newArrayList(rule.getApproachableVFS()); - for (VirtualFS aVfs : approachVFSs) { - check = vfss.containsKey(aVfs.getAliasName()); - if (!check) { - log.error("ERROR in NAMESPACE - APP RULE '{}' point a UNKNOWN VFS '{}'!", rule.getRuleName(), aVfs); - result = false; - } - } - } - } - return result; - } + private final Logger log = NamespaceDirector.getLogger(); + private final Map vfss; + private final Map maprules; + private final Map apprules; + + public NamespaceCheck( + Map vfss, + Map maprules, + Map apprules) { + + this.vfss = vfss; + this.maprules = maprules; + this.apprules = apprules; + } + + public boolean check() { + + boolean vfsCheck = checkVFS(); + boolean mapRulesCheck = checkMapRules(); + boolean appRules = checkAppRules(); + checkGroups(vfsCheck); + return vfsCheck && mapRulesCheck && appRules; + } + + private boolean checkGroups(boolean vfsCheckResult) { + + log.info("Namespace check. Checking of the existence of the needed Local group ..."); + boolean result = true; + if (!vfsCheckResult) { + log.warn("Skip the check of the needed Local Group, because check of VFSs failed."); + } else { + + List vf = new ArrayList<>(vfss.values()); + for (VirtualFS vfs : vf) { + + // Check the presence of Default ACL + Capability cap = vfs.getCapabilities(); + if (cap != null) { + DefaultACL defACL = cap.getDefaultACL(); + if (defACL != null) { + List acl = new ArrayList<>(defACL.getACL()); + if (!acl.isEmpty()) { + for (ACLEntry aclEntry : acl) { + if (!LocalGroups.getInstance().isGroupDefined(aclEntry.getGroupName())) { + log.warn("!!!!! Local Group for ACL ('{}') is not defined!", aclEntry); + result = false; + } + } + } + } + } + } + } + if (result) { + log.info("All local groups are defined. "); + } else { + log.warn("Please check the local group needed to StoRM"); + } + return result; + } + + /** + * Check if the root of the VFS exists. + * + * @todo: this method don't check if the root is accessible by storm user. + * @return true if "filesystems" element (list of VFS) is valid false otherwise + */ + private boolean checkVFS() { + + log.info("Namespace checking VFSs .."); + boolean result = true; + if (vfss == null) { + log.error("Anyone VFS is defined in namespace!"); + return false; + } else { + List rules = new ArrayList<>(vfss.values()); + Iterator scan = rules.iterator(); + + while (scan.hasNext()) { + VirtualFS vfs = scan.next(); + + String aliasName = vfs.getAliasName(); + log.debug("VFS named '{}' found.", aliasName); + String root = vfs.getRootPath(); + File file = new File(root); + boolean exists = file.exists(); + if (!exists) { + log.error( + "ERROR in NAMESPACE: The VFS '{}' does not have a valid root :'{}'", aliasName, root); + result = false; + } + } + } + if (result) { + log.info(" VFSs are well-defined."); + } + return result; + } + + private boolean checkMapRules() { + + boolean result = true; + if (maprules == null) { + return false; + } else { + int nrOfMappingRules = maprules.size(); + log.debug("Number of Mapping rules = {}", nrOfMappingRules); + List rules = new ArrayList<>(maprules.values()); + Iterator scan = rules.iterator(); + MappingRule rule; + String mappedVFS; + boolean check = false; + while (scan.hasNext()) { + rule = scan.next(); + mappedVFS = rule.getMappedFS().getAliasName(); + check = vfss.containsKey(mappedVFS); + if (!check) { + log.error( + "ERROR in NAMESPACE - MAP RULE '{}' point a UNKNOWN VFS '{}'!", + rule.getRuleName(), + mappedVFS); + result = false; + } + } + } + return result; + } + + private boolean checkAppRules() { + + boolean result = true; + if (apprules == null) { + return false; + } else { + int nrOfApproachableRules = apprules.size(); + log.debug("Number of Approachable rules = {}", nrOfApproachableRules); + List rules = new ArrayList<>(apprules.values()); + Iterator scan = rules.iterator(); + boolean check = false; + while (scan.hasNext()) { + ApproachableRule rule = scan.next(); + List approachVFSs = Lists.newArrayList(rule.getApproachableVFS()); + for (VirtualFS aVfs : approachVFSs) { + check = vfss.containsKey(aVfs.getAliasName()); + if (!check) { + log.error( + "ERROR in NAMESPACE - APP RULE '{}' point a UNKNOWN VFS '{}'!", + rule.getRuleName(), + aVfs); + result = false; + } + } + } + } + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/config/NamespaceLoader.java b/src/main/java/it/grid/storm/namespace/config/NamespaceLoader.java index b01c1513..1356d192 100644 --- a/src/main/java/it/grid/storm/namespace/config/NamespaceLoader.java +++ b/src/main/java/it/grid/storm/namespace/config/NamespaceLoader.java @@ -1,33 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config; import org.apache.commons.configuration.*; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF + * * @author Riccardoi Zappi * @version 1.0 */ public interface NamespaceLoader { - public Configuration getConfiguration(); - + public Configuration getConfiguration(); } diff --git a/src/main/java/it/grid/storm/namespace/config/NamespaceParser.java b/src/main/java/it/grid/storm/namespace/config/NamespaceParser.java index 97c8d9b9..5aea7265 100644 --- a/src/main/java/it/grid/storm/namespace/config/NamespaceParser.java +++ b/src/main/java/it/grid/storm/namespace/config/NamespaceParser.java @@ -1,57 +1,45 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config; -import java.util.List; -import java.util.Map; - import it.grid.storm.namespace.model.ApproachableRule; import it.grid.storm.namespace.model.MappingRule; import it.grid.storm.namespace.model.VirtualFS; +import java.util.List; +import java.util.Map; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF + * * @author Riccardo Zappi * @version 1.0 */ - public interface NamespaceParser { - public String getNamespaceVersion(); - - public Map getVFSs(); + public String getNamespaceVersion(); - public VirtualFS getVFS(String vfsName); + public Map getVFSs(); - public List getAllVFS_Roots(); + public VirtualFS getVFS(String vfsName); - public Map getMapVFS_Root(); + public List getAllVFS_Roots(); - public List getAllMappingRule_StFNRoots(); + public Map getMapVFS_Root(); - public Map getMappingRules(); + public List getAllMappingRule_StFNRoots(); - public Map getMappingRuleMAP(); + public Map getMappingRules(); - public Map getApproachableRules(); + public Map getMappingRuleMAP(); - public long getLastUpdateTime(); + public Map getApproachableRules(); + public long getLastUpdateTime(); } diff --git a/src/main/java/it/grid/storm/namespace/config/xml/XMLConst.java b/src/main/java/it/grid/storm/namespace/config/xml/XMLConst.java index fb2fc273..1b68b5e7 100644 --- a/src/main/java/it/grid/storm/namespace/config/xml/XMLConst.java +++ b/src/main/java/it/grid/storm/namespace/config/xml/XMLConst.java @@ -1,345 +1,306 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config.xml; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.SAAuthzType; - import java.util.List; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ public interface XMLConst { - public final char FS_SUB_PATTERN = '°'; - public final char MAP_SUB_PATTERN = 'ç'; - public final char PROT_SUB_PATTERN = '§'; - public final char POOL_SUB_PATTERN = '£'; - public final char APPRULE_SUB_PATTERN = '^'; - public final char ACL_ENTRY_SUB_PATTERN = '~'; - public final char MEMBER_SUB_PATTERN = '*'; - - /** - * ELEMENTS - */ - - // ######### Level-0 ######### - public final String MAIN_ELEMENT = "namespace"; - public final String NAMESPACE_VERSION = "[@version]"; - - // ######### Level-1 ######### - public final String FILESYSTEMS = "filesystems"; - public final String MAPPINGRULES = "mapping-rules"; - public final String APPROACHABLERULES = "approachable-rules"; - - // ######### Level-2 ######### - // FILESYSTEMS-TYPE - public String FILESYSTEM_NUDE = FILESYSTEMS + ".filesystem"; - public String FILESYSTEM = FILESYSTEM_NUDE + "(" + FS_SUB_PATTERN + ")"; - public String FILESYSTEM_NAME = FILESYSTEM + "[@name]"; - public String FILESYSTEM_TYPE = FILESYSTEM + "[@fs_type]"; - // MAPPING-RULES-TYPE - public String MAP_RULE_NUDE = MAPPINGRULES + ".map-rule"; - public String MAP_RULE = MAP_RULE_NUDE + "(" + MAP_SUB_PATTERN + ")"; - public String MAP_RULE_NAME = MAP_RULE + "[@name]"; - // APPROACHABLE-RULES-TYPE - public String APP_RULE_NUDE = APPROACHABLERULES + ".app-rule"; - public String APP_RULE = APP_RULE_NUDE + "(" + APPRULE_SUB_PATTERN + ")"; - public String APP_RULE_NAME = APP_RULE + "[@name]"; - - // ######### Level-3 ######### - // FILESYSTEM-TYPE - public String FS_COUNTING = FILESYSTEM_NUDE + ".root"; - public String FS_BY_NAME = FILESYSTEM_NUDE + "[@name]"; - public String FS_ROOT = FILESYSTEM + ".root"; - public String FS_SPACE_TOKEN_DESCRIPTION = FILESYSTEM - + ".space-token-description"; - public String FS_STORAGE_CLASS = FILESYSTEM + ".storage-class"; - public String FS_DRIVER = FILESYSTEM + ".filesystem-driver"; - public String FS_SPACE_DRIVER = FILESYSTEM + ".spacesystem-driver"; - public String FS_AUTHZ = FILESYSTEM + ".storage-area-authz"; // 1.4.0 - public String FS_DEFAULTVALUES = FILESYSTEM + ".defaults-values"; - public String FS_CAPABILITIES = FILESYSTEM + ".capabilities"; - public String FS_PROPERTIES = FILESYSTEM + ".properties"; - // MAP-RULE-TYPE - public String MAP_RULE_COUNTING = MAP_RULE_NUDE + ".stfn-root"; - public String MAP_RULE_BY_NAME = MAP_RULE_NUDE + "[@name]"; - public String MAP_RULE_STFNROOT = MAP_RULE + ".stfn-root"; - public String MAP_RULE_MAPPED_FS = MAP_RULE + ".mapped-fs"; - // APP-RULE-TYPE - public String APP_RULE_COUNTING = APP_RULE_NUDE + "[@name]"; - public String APP_RULE_BY_NAME = APP_RULE_NUDE + "[@name]"; - public String APP_SUBJECTS = APP_RULE + ".subjects"; - public String APPROACHABLE_FS = APP_RULE + ".approachable-fs"; - public String APP_SPACE_REL_PATH = APP_RULE + ".space-rel-path"; - public String APP_ANONYMOUS_HTTP_READ = APP_RULE + ".anonymous-http-read"; - // ######### Level-4 ######### - // STORAGE-AREA-AUTHZ - public String SA_AUTHZ_FIXED = FS_AUTHZ + ".fixed"; // 1.4.0 - public String SA_AUTHZ_DB = FS_AUTHZ + ".authz-db"; // 1.4.0 - // DEFAULTS-VALUES-TYPE - public String DEF_SPACE = FS_DEFAULTVALUES + ".space"; - public String DEF_SPACE_LT = DEF_SPACE + "[@lifetime]"; - public String DEF_SPACE_TYPE = DEF_SPACE + "[@type]"; - public String DEF_SPACE_GUARSIZE = DEF_SPACE + "[@guarsize]"; - public String DEF_SPACE_TOTSIZE = DEF_SPACE + "[@totalsize]"; - public String DEF_FILE = FS_DEFAULTVALUES + ".file"; - public String DEF_FILE_LT = DEF_FILE + "[@lifetime]"; - public String DEF_FILE_TYPE = DEF_FILE + "[@type]"; - // PROPERTIES-TYPE - public String RETENTION_POLICY = FS_PROPERTIES + ".RetentionPolicy"; - public String ACCESS_LATENCY = FS_PROPERTIES + ".AccessLatency"; - public String EXPIRATION_MODE = FS_PROPERTIES + ".ExpirationMode"; - public String ONLINE_SIZE = FS_PROPERTIES + ".TotalOnlineSize"; - public String ONLINE_SIZE_UNIT = ONLINE_SIZE + "[@unit]"; - public String LIMITED_SIZE = ONLINE_SIZE + "[@limited-size]"; // 1.4.0 - public String NEARLINE_SIZE = FS_PROPERTIES + ".TotalNearlineSize"; - public String NEARLINE_SIZE_UNIT = NEARLINE_SIZE + "[@unit]"; - // CAPABILITIES-TYPE - public String ACL_MODE = FS_CAPABILITIES + ".aclMode"; - public String DEFAULT_ACL = FS_CAPABILITIES + ".default-acl"; - public String TRANS_PROT = FS_CAPABILITIES + ".trans-prot"; - public String QUOTA = FS_CAPABILITIES + ".quota"; - public String QUOTA_ENABLED = QUOTA + "[@enabled]"; - // SUBJECTS-TYPE - public String APP_DN = APP_SUBJECTS + ".dn"; - public String APP_VO_NAME = APP_SUBJECTS + ".vo-name"; - // PROTOCOL POOL DEFINITION - public String POOL = FS_CAPABILITIES + ".pool(" + POOL_SUB_PATTERN + ")"; // 1.4.0 - public String POOL_COUNTING = FS_CAPABILITIES + ".pool.balance-strategy"; - - // ######### Level-5 ######### - // DEFAULT_ACL - public String ACL_ENTRY = DEFAULT_ACL + ".acl-entry"; - public String ACL_ENTRY_COUNTING = DEFAULT_ACL + ".acl-entry.groupName"; - - // QUOTA-PROPERTIES - // public String QUOTA_PROPERTIES = QUOTA + ".properties"; - // public String QUOTA_PROPERTIES_FILE = QUOTA + ".properties-file"; - // TRANS-PROT-TYPE - public String PROTOCOL_BY_NAME = TRANS_PROT + ".prot[@name]"; - public String PROTOCOL_COUNTING = TRANS_PROT + ".prot.schema"; - public String PROTOCOL = TRANS_PROT + ".prot(" + PROT_SUB_PATTERN + ")"; - public String PROTOCOL_NAME = PROTOCOL + "[@name]"; - - // ######### Level-6 ######### - // ACL-ENTRY DETAILS - public String GROUP_NAME = ACL_ENTRY + ".groupName"; - public String PERMISSIONS = ACL_ENTRY + ".permissions"; - - // QUOTA-TYPE - public String QUOTA_ELEMENT = QUOTA + ".quotaElement"; - public String QUOTA_DEVICE = QUOTA + ".device"; - - // PROT-TYPE - public String PROT_ID = PROTOCOL + ".id"; // 1.4.0 - public String PROT_SCHEMA = PROTOCOL + ".schema"; - public String PROT_HOST = PROTOCOL + ".host"; - public String PROT_PORT = PROTOCOL + ".port"; - - // POOL DETAILS - public String BALANCE_STRATEGY = POOL + ".balance-strategy"; // 1.4.0 - public String POOL_MEMBERS = POOL + ".members"; // 1.4.0 - - // ######### Level-7 ######### - // POOL MEMBER - public String POOL_MEMBER_COUNTING = POOL_MEMBERS + ".member"; // 1.4.0 - public String POOL_MEMBER_NUDE = POOL_MEMBERS + ".member"; // 1.4.0 - public String POOL_MEMBER = POOL_MEMBER_NUDE + "(" + MEMBER_SUB_PATTERN + ")"; // 1.4.0 - public String POOL_MEMBER_ID = POOL_MEMBER + "[@member-id]"; // 1.4.0 - public String POOL_MEMBER_WEIGHT = POOL_MEMBER + ".weight"; // 1.4.0 - - // QUOTA-TYPE-ID - public String QUOTA_FILE_SET_NAME = QUOTA_ELEMENT + ".filesetName"; - public String QUOTA_GROUP_NAME = QUOTA_ELEMENT + ".groupName"; - public String QUOTA_USER_NAME = QUOTA_ELEMENT + ".userName"; - - // ##################################### - // OPTIONAL ELEMENT and DEFAULT VALUES - // ##################################### - public final String DEFAULT_UNIT_TYPE = "TB"; - public final String DEFAULT_AUTHZ_SOURCE = "PermitAll"; - public final String DEFAULT_STORAGE_CLASS = "T0D1"; + public final char FS_SUB_PATTERN = '°'; + public final char MAP_SUB_PATTERN = 'ç'; + public final char PROT_SUB_PATTERN = '§'; + public final char POOL_SUB_PATTERN = '£'; + public final char APPRULE_SUB_PATTERN = '^'; + public final char ACL_ENTRY_SUB_PATTERN = '~'; + public final char MEMBER_SUB_PATTERN = '*'; + + /** ELEMENTS */ + + // ######### Level-0 ######### + public final String MAIN_ELEMENT = "namespace"; + + public final String NAMESPACE_VERSION = "[@version]"; + + // ######### Level-1 ######### + public final String FILESYSTEMS = "filesystems"; + public final String MAPPINGRULES = "mapping-rules"; + public final String APPROACHABLERULES = "approachable-rules"; + + // ######### Level-2 ######### + // FILESYSTEMS-TYPE + public String FILESYSTEM_NUDE = FILESYSTEMS + ".filesystem"; + public String FILESYSTEM = FILESYSTEM_NUDE + "(" + FS_SUB_PATTERN + ")"; + public String FILESYSTEM_NAME = FILESYSTEM + "[@name]"; + public String FILESYSTEM_TYPE = FILESYSTEM + "[@fs_type]"; + // MAPPING-RULES-TYPE + public String MAP_RULE_NUDE = MAPPINGRULES + ".map-rule"; + public String MAP_RULE = MAP_RULE_NUDE + "(" + MAP_SUB_PATTERN + ")"; + public String MAP_RULE_NAME = MAP_RULE + "[@name]"; + // APPROACHABLE-RULES-TYPE + public String APP_RULE_NUDE = APPROACHABLERULES + ".app-rule"; + public String APP_RULE = APP_RULE_NUDE + "(" + APPRULE_SUB_PATTERN + ")"; + public String APP_RULE_NAME = APP_RULE + "[@name]"; + + // ######### Level-3 ######### + // FILESYSTEM-TYPE + public String FS_COUNTING = FILESYSTEM_NUDE + ".root"; + public String FS_BY_NAME = FILESYSTEM_NUDE + "[@name]"; + public String FS_ROOT = FILESYSTEM + ".root"; + public String FS_SPACE_TOKEN_DESCRIPTION = FILESYSTEM + ".space-token-description"; + public String FS_STORAGE_CLASS = FILESYSTEM + ".storage-class"; + public String FS_DRIVER = FILESYSTEM + ".filesystem-driver"; + public String FS_SPACE_DRIVER = FILESYSTEM + ".spacesystem-driver"; + public String FS_AUTHZ = FILESYSTEM + ".storage-area-authz"; // 1.4.0 + public String FS_DEFAULTVALUES = FILESYSTEM + ".defaults-values"; + public String FS_CAPABILITIES = FILESYSTEM + ".capabilities"; + public String FS_PROPERTIES = FILESYSTEM + ".properties"; + // MAP-RULE-TYPE + public String MAP_RULE_COUNTING = MAP_RULE_NUDE + ".stfn-root"; + public String MAP_RULE_BY_NAME = MAP_RULE_NUDE + "[@name]"; + public String MAP_RULE_STFNROOT = MAP_RULE + ".stfn-root"; + public String MAP_RULE_MAPPED_FS = MAP_RULE + ".mapped-fs"; + // APP-RULE-TYPE + public String APP_RULE_COUNTING = APP_RULE_NUDE + "[@name]"; + public String APP_RULE_BY_NAME = APP_RULE_NUDE + "[@name]"; + public String APP_SUBJECTS = APP_RULE + ".subjects"; + public String APPROACHABLE_FS = APP_RULE + ".approachable-fs"; + public String APP_SPACE_REL_PATH = APP_RULE + ".space-rel-path"; + public String APP_ANONYMOUS_HTTP_READ = APP_RULE + ".anonymous-http-read"; + // ######### Level-4 ######### + // STORAGE-AREA-AUTHZ + public String SA_AUTHZ_FIXED = FS_AUTHZ + ".fixed"; // 1.4.0 + public String SA_AUTHZ_DB = FS_AUTHZ + ".authz-db"; // 1.4.0 + // DEFAULTS-VALUES-TYPE + public String DEF_SPACE = FS_DEFAULTVALUES + ".space"; + public String DEF_SPACE_LT = DEF_SPACE + "[@lifetime]"; + public String DEF_SPACE_TYPE = DEF_SPACE + "[@type]"; + public String DEF_SPACE_GUARSIZE = DEF_SPACE + "[@guarsize]"; + public String DEF_SPACE_TOTSIZE = DEF_SPACE + "[@totalsize]"; + public String DEF_FILE = FS_DEFAULTVALUES + ".file"; + public String DEF_FILE_LT = DEF_FILE + "[@lifetime]"; + public String DEF_FILE_TYPE = DEF_FILE + "[@type]"; + // PROPERTIES-TYPE + public String RETENTION_POLICY = FS_PROPERTIES + ".RetentionPolicy"; + public String ACCESS_LATENCY = FS_PROPERTIES + ".AccessLatency"; + public String EXPIRATION_MODE = FS_PROPERTIES + ".ExpirationMode"; + public String ONLINE_SIZE = FS_PROPERTIES + ".TotalOnlineSize"; + public String ONLINE_SIZE_UNIT = ONLINE_SIZE + "[@unit]"; + public String LIMITED_SIZE = ONLINE_SIZE + "[@limited-size]"; // 1.4.0 + public String NEARLINE_SIZE = FS_PROPERTIES + ".TotalNearlineSize"; + public String NEARLINE_SIZE_UNIT = NEARLINE_SIZE + "[@unit]"; + // CAPABILITIES-TYPE + public String ACL_MODE = FS_CAPABILITIES + ".aclMode"; + public String DEFAULT_ACL = FS_CAPABILITIES + ".default-acl"; + public String TRANS_PROT = FS_CAPABILITIES + ".trans-prot"; + public String QUOTA = FS_CAPABILITIES + ".quota"; + public String QUOTA_ENABLED = QUOTA + "[@enabled]"; + // SUBJECTS-TYPE + public String APP_DN = APP_SUBJECTS + ".dn"; + public String APP_VO_NAME = APP_SUBJECTS + ".vo-name"; + // PROTOCOL POOL DEFINITION + public String POOL = FS_CAPABILITIES + ".pool(" + POOL_SUB_PATTERN + ")"; // 1.4.0 + public String POOL_COUNTING = FS_CAPABILITIES + ".pool.balance-strategy"; + + // ######### Level-5 ######### + // DEFAULT_ACL + public String ACL_ENTRY = DEFAULT_ACL + ".acl-entry"; + public String ACL_ENTRY_COUNTING = DEFAULT_ACL + ".acl-entry.groupName"; + + // QUOTA-PROPERTIES + // public String QUOTA_PROPERTIES = QUOTA + ".properties"; + // public String QUOTA_PROPERTIES_FILE = QUOTA + ".properties-file"; + // TRANS-PROT-TYPE + public String PROTOCOL_BY_NAME = TRANS_PROT + ".prot[@name]"; + public String PROTOCOL_COUNTING = TRANS_PROT + ".prot.schema"; + public String PROTOCOL = TRANS_PROT + ".prot(" + PROT_SUB_PATTERN + ")"; + public String PROTOCOL_NAME = PROTOCOL + "[@name]"; + + // ######### Level-6 ######### + // ACL-ENTRY DETAILS + public String GROUP_NAME = ACL_ENTRY + ".groupName"; + public String PERMISSIONS = ACL_ENTRY + ".permissions"; + + // QUOTA-TYPE + public String QUOTA_ELEMENT = QUOTA + ".quotaElement"; + public String QUOTA_DEVICE = QUOTA + ".device"; + + // PROT-TYPE + public String PROT_ID = PROTOCOL + ".id"; // 1.4.0 + public String PROT_SCHEMA = PROTOCOL + ".schema"; + public String PROT_HOST = PROTOCOL + ".host"; + public String PROT_PORT = PROTOCOL + ".port"; + + // POOL DETAILS + public String BALANCE_STRATEGY = POOL + ".balance-strategy"; // 1.4.0 + public String POOL_MEMBERS = POOL + ".members"; // 1.4.0 + + // ######### Level-7 ######### + // POOL MEMBER + public String POOL_MEMBER_COUNTING = POOL_MEMBERS + ".member"; // 1.4.0 + public String POOL_MEMBER_NUDE = POOL_MEMBERS + ".member"; // 1.4.0 + public String POOL_MEMBER = POOL_MEMBER_NUDE + "(" + MEMBER_SUB_PATTERN + ")"; // 1.4.0 + public String POOL_MEMBER_ID = POOL_MEMBER + "[@member-id]"; // 1.4.0 + public String POOL_MEMBER_WEIGHT = POOL_MEMBER + ".weight"; // 1.4.0 + + // QUOTA-TYPE-ID + public String QUOTA_FILE_SET_NAME = QUOTA_ELEMENT + ".filesetName"; + public String QUOTA_GROUP_NAME = QUOTA_ELEMENT + ".groupName"; + public String QUOTA_USER_NAME = QUOTA_ELEMENT + ".userName"; - /** - * METHOD Interface - */ + // ##################################### + // OPTIONAL ELEMENT and DEFAULT VALUES + // ##################################### + public final String DEFAULT_UNIT_TYPE = "TB"; + public final String DEFAULT_AUTHZ_SOURCE = "PermitAll"; + public final String DEFAULT_STORAGE_CLASS = "T0D1"; - public String getNamespaceVersion() throws NamespaceException; + /** METHOD Interface */ + public String getNamespaceVersion() throws NamespaceException; - public int getNumberOfFS() throws NamespaceException; + public int getNumberOfFS() throws NamespaceException; - public String getFSName(int numOfFS) throws NamespaceException; + public String getFSName(int numOfFS) throws NamespaceException; - public int getFSNumber(String nameOfFS) throws NamespaceException; + public int getFSNumber(String nameOfFS) throws NamespaceException; - public String getFSType(String nameOfFS) throws NamespaceException; + public String getFSType(String nameOfFS) throws NamespaceException; - public String getFSSpaceTokenDescription(String nameOfFS) - throws NamespaceException; + public String getFSSpaceTokenDescription(String nameOfFS) throws NamespaceException; - public String getFSRoot(String nameOfFS) throws NamespaceException; + public String getFSRoot(String nameOfFS) throws NamespaceException; - public String getFSDriver(String nameOfFS) throws NamespaceException; + public String getFSDriver(String nameOfFS) throws NamespaceException; - public String getSpaceDriver(String nameOfFS) throws NamespaceException; + public String getSpaceDriver(String nameOfFS) throws NamespaceException; - public String getStorageAreaAuthz(String nameOfFS, SAAuthzType type) - throws NamespaceException; // Modified in 1.4.0 + public String getStorageAreaAuthz(String nameOfFS, SAAuthzType type) + throws NamespaceException; // Modified in 1.4.0 - public boolean getStorageAreaAuthzFixedDefined(String nameOfFS) - throws NamespaceException; // 1.4.0 + public boolean getStorageAreaAuthzFixedDefined(String nameOfFS) + throws NamespaceException; // 1.4.0 - public boolean getStorageAreaAuthzDBDefined(String nameOfFS) - throws NamespaceException; // 1.4.0 + public boolean getStorageAreaAuthzDBDefined(String nameOfFS) throws NamespaceException; // 1.4.0 - public SAAuthzType getStorageAreaAuthzType(String nameOfFS) - throws NamespaceException; // 1.4.0 + public SAAuthzType getStorageAreaAuthzType(String nameOfFS) throws NamespaceException; // 1.4.0 - public String getDefaultSpaceType(String nameOfFS) throws NamespaceException; + public String getDefaultSpaceType(String nameOfFS) throws NamespaceException; - public long getDefaultSpaceLifeTime(String nameOfFS) - throws NamespaceException; + public long getDefaultSpaceLifeTime(String nameOfFS) throws NamespaceException; - public long getDefaultSpaceGuarSize(String nameOfFS) - throws NamespaceException; + public long getDefaultSpaceGuarSize(String nameOfFS) throws NamespaceException; - public long getDefaultSpaceTotSize(String nameOfFS) throws NamespaceException; + public long getDefaultSpaceTotSize(String nameOfFS) throws NamespaceException; - public String getDefaultFileType(String nameOfFS) throws NamespaceException; + public String getDefaultFileType(String nameOfFS) throws NamespaceException; - public long getDefaultFileLifeTime(String nameOfFS) throws NamespaceException; + public long getDefaultFileLifeTime(String nameOfFS) throws NamespaceException; - public String getACLMode(String nameOfFS) throws NamespaceException; + public String getACLMode(String nameOfFS) throws NamespaceException; - public boolean getDefaultACLDefined(String nameOfFS) - throws NamespaceException; + public boolean getDefaultACLDefined(String nameOfFS) throws NamespaceException; - public int getNumberOfACL(String nameOfFS) throws NamespaceException; + public int getNumberOfACL(String nameOfFS) throws NamespaceException; - public String getGroupName(String nameOfFS, int aclEntryNumber) - throws NamespaceException; + public String getGroupName(String nameOfFS, int aclEntryNumber) throws NamespaceException; - public String getPermissionString(String nameOfFS, int aclEntryNumber) - throws NamespaceException; + public String getPermissionString(String nameOfFS, int aclEntryNumber) throws NamespaceException; - public boolean getQuotaDefined(String nameOfFS) throws NamespaceException; + public boolean getQuotaDefined(String nameOfFS) throws NamespaceException; - public boolean getQuotaEnabled(String nameOfFS) throws NamespaceException; + public boolean getQuotaEnabled(String nameOfFS) throws NamespaceException; - public boolean getQuotaDeviceDefined(String nameOfFS) - throws NamespaceException; + public boolean getQuotaDeviceDefined(String nameOfFS) throws NamespaceException; - public String getQuotaDevice(String nameOfFS) throws NamespaceException; + public String getQuotaDevice(String nameOfFS) throws NamespaceException; - public boolean getQuotaFilesetDefined(String nameOfFS) - throws NamespaceException; + public boolean getQuotaFilesetDefined(String nameOfFS) throws NamespaceException; - public String getQuotaFileset(String nameOfFS) throws NamespaceException; + public String getQuotaFileset(String nameOfFS) throws NamespaceException; - public boolean getQuotaGroupIDDefined(String nameOfFS) - throws NamespaceException; + public boolean getQuotaGroupIDDefined(String nameOfFS) throws NamespaceException; - public String getQuotaGroupID(String nameOfFS) throws NamespaceException; + public String getQuotaGroupID(String nameOfFS) throws NamespaceException; - public boolean getQuotaUserIDDefined(String nameOfFS) - throws NamespaceException; + public boolean getQuotaUserIDDefined(String nameOfFS) throws NamespaceException; - public String getQuotaUserID(String nameOfFS) throws NamespaceException; + public String getQuotaUserID(String nameOfFS) throws NamespaceException; - public int getNumberOfProt(String nameOfFS) throws NamespaceException; + public int getNumberOfProt(String nameOfFS) throws NamespaceException; - public String getProtName(String nameOfFS, int numOfProt) - throws NamespaceException; + public String getProtName(String nameOfFS, int numOfProt) throws NamespaceException; - public int getProtId(String nameOfFS, int numOfProt) - throws NamespaceException; // 1.4.0 + public int getProtId(String nameOfFS, int numOfProt) throws NamespaceException; // 1.4.0 - public String getProtSchema(String nameOfFS, int numOfProt) - throws NamespaceException; // Modified in 1.4.0 + public String getProtSchema(String nameOfFS, int numOfProt) + throws NamespaceException; // Modified in 1.4.0 - public String getProtHost(String nameOfFS, int numOfProt) - throws NamespaceException; // Modified in 1.4.0 + public String getProtHost(String nameOfFS, int numOfProt) + throws NamespaceException; // Modified in 1.4.0 - public String getProtPort(String nameOfFS, int numOfProt) - throws NamespaceException; // Modified in 1.4.0 + public String getProtPort(String nameOfFS, int numOfProt) + throws NamespaceException; // Modified in 1.4.0 - public String getRetentionPolicyType(String nameOfFS) - throws NamespaceException; + public String getRetentionPolicyType(String nameOfFS) throws NamespaceException; - public String getAccessLatencyType(String nameOfFS) throws NamespaceException; + public String getAccessLatencyType(String nameOfFS) throws NamespaceException; - public String getExpirationModeType(String nameOfFS) - throws NamespaceException; + public String getExpirationModeType(String nameOfFS) throws NamespaceException; - public String getOnlineSpaceUnitType(String nameOfFS) - throws NamespaceException; + public String getOnlineSpaceUnitType(String nameOfFS) throws NamespaceException; - public boolean getOnlineSpaceLimitedSize(String nameOfFS) - throws NamespaceException; // 1.4.0 + public boolean getOnlineSpaceLimitedSize(String nameOfFS) throws NamespaceException; // 1.4.0 - public long getOnlineSpaceSize(String nameOfFS) throws NamespaceException; + public long getOnlineSpaceSize(String nameOfFS) throws NamespaceException; - public String getNearlineSpaceUnitType(String nameOfFS) - throws NamespaceException; + public String getNearlineSpaceUnitType(String nameOfFS) throws NamespaceException; - public long getNearlineSpaceSize(String nameOfFS) throws NamespaceException; + public long getNearlineSpaceSize(String nameOfFS) throws NamespaceException; - public boolean getPoolDefined(String nameOfFS) throws NamespaceException; // 1.4.0 + public boolean getPoolDefined(String nameOfFS) throws NamespaceException; // 1.4.0 - public String getBalancerStrategy(String nameOfFS) throws NamespaceException; // 1.4.0 + public String getBalancerStrategy(String nameOfFS) throws NamespaceException; // 1.4.0 - public int getNumberOfPoolMembers(String nameOfFS, int numOfPool) - throws NamespaceException; // 1.4.0 + public int getNumberOfPoolMembers(String nameOfFS, int numOfPool) + throws NamespaceException; // 1.4.0 - public int getMemberID(String nameOfFS, int numOfPool, int memberNr) - throws NamespaceException; // 1.4.0 + public int getMemberID(String nameOfFS, int numOfPool, int memberNr) + throws NamespaceException; // 1.4.0 - public int getMemberWeight(String nameOfFS, int numOfPool, int memberNr) - throws NamespaceException; // 1.4.0 + public int getMemberWeight(String nameOfFS, int numOfPool, int memberNr) + throws NamespaceException; // 1.4.0 - public int getNumberOfMappingRule() throws NamespaceException; + public int getNumberOfMappingRule() throws NamespaceException; - public String getMapRuleName(int numOfMapRule) throws NamespaceException; + public String getMapRuleName(int numOfMapRule) throws NamespaceException; - public String getMapRule_StFNRoot(String nameOfMapRule) - throws NamespaceException; + public String getMapRule_StFNRoot(String nameOfMapRule) throws NamespaceException; - public String getMapRule_mappedFS(String nameOfMapRule) - throws NamespaceException; + public String getMapRule_mappedFS(String nameOfMapRule) throws NamespaceException; - public int getNumberOfApproachRule() throws NamespaceException; + public int getNumberOfApproachRule() throws NamespaceException; - public String getApproachRuleName(int numOfAppRule) throws NamespaceException; + public String getApproachRuleName(int numOfAppRule) throws NamespaceException; - public String getAppRule_SubjectDN(String nameOfAppRule) - throws NamespaceException; + public String getAppRule_SubjectDN(String nameOfAppRule) throws NamespaceException; - public String getAppRule_SubjectVO(String nameOfAppRule) - throws NamespaceException; + public String getAppRule_SubjectVO(String nameOfAppRule) throws NamespaceException; - public List getAppRule_AppFS(String nameOfAppRule) throws NamespaceException; + public List getAppRule_AppFS(String nameOfAppRule) throws NamespaceException; - public String getAppRule_RelativePath(String nameOfAppRule) - throws NamespaceException; + public String getAppRule_RelativePath(String nameOfAppRule) throws NamespaceException; } diff --git a/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceLoader.java b/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceLoader.java index c25dd134..e0245eb9 100644 --- a/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceLoader.java +++ b/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceLoader.java @@ -1,25 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config.xml; -import it.grid.storm.namespace.NamespaceValidator; -import it.grid.storm.namespace.config.NamespaceLoader; - import static java.io.File.separatorChar; +import it.grid.storm.namespace.NamespaceValidator; +import it.grid.storm.namespace.config.NamespaceLoader; import java.io.File; import java.io.IOException; import java.util.Observable; import java.util.Observer; import java.util.Timer; import java.util.TimerTask; - import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; - import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.XMLConfiguration; @@ -30,320 +26,292 @@ import org.xml.sax.SAXException; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF + * * @author Riccardo Zappi * @version 1.0 */ public class XMLNamespaceLoader extends Observable implements NamespaceLoader { - private static Logger log = LoggerFactory.getLogger(XMLNamespaceLoader.class); - - public String filename; - public String path; - public int refresh; // refresh time in seconds before the configuration is - // checked for a change in parameters! - private XMLConfiguration config = null; - private final int delay = 1000; // delay for 5 sec. - private long period = -1; - private final Timer timer = new Timer(); - private XMLReloadingStrategy xmlStrategy; - private String namespaceFN = null; - private final String namespaceSchemaURL; - - public boolean schemaValidity = false; - - public XMLNamespaceLoader() { - - // Build the namespaceFileName - namespaceFN = getNamespaceFileName(); - namespaceSchemaURL = getNamespaceSchemaFileName(); - init(namespaceFN, refresh); - } - - public XMLNamespaceLoader(int refresh) { - - if (refresh < 0) { - this.refresh = 0; - } else { - this.refresh = refresh; - } - namespaceFN = getNamespaceFileName(); - namespaceSchemaURL = getNamespaceSchemaFileName(); - log.debug("Namespace XSD : {}", namespaceSchemaURL); - init(namespaceFN, refresh); - } - - public XMLNamespaceLoader(String filename) { - - this.filename = filename; - namespaceFN = getNamespaceFileName(); - namespaceSchemaURL = getNamespaceSchemaFileName(); - log.debug("Namespace XSD : {}", namespaceSchemaURL); - init(namespaceFN, refresh); - } - - public XMLNamespaceLoader(String path, String filename) { - - this.path = path; - this.filename = filename; - namespaceFN = getNamespaceFileName(); - namespaceSchemaURL = getNamespaceSchemaFileName(); - log.debug("Namespace XSD : {}", namespaceSchemaURL); - init(namespaceFN, refresh); - } - - public XMLNamespaceLoader(String path, String filename, int refresh) { - - if (refresh < 0) { - this.refresh = 0; - } else { - this.refresh = refresh; - } - this.path = path; - this.filename = filename; - namespaceFN = getNamespaceFileName(); - namespaceSchemaURL = getNamespaceSchemaFileName(); - log.debug("Namespace XSD : {}", namespaceSchemaURL); - init(namespaceFN, refresh); - } - - public void setObserver(Observer obs) { - - addObserver(obs); - } - - public void setNotifyManaged() { - - xmlStrategy.notifingPerformed(); - config.setReloadingStrategy(xmlStrategy); - } - - /** - * The setChanged() protected method must overridden to make it public - */ - @Override - public synchronized void setChanged() { - - super.setChanged(); - } - - private void init(String namespaceFileName, int refresh) { - - log.info("Reading Namespace configuration file {} and setting refresh rate to {} seconds.", namespaceFileName, refresh); - - // create reloading strategy for refresh - xmlStrategy = new XMLReloadingStrategy(); - period = 3000; // Conversion in millisec. - log.debug(" Refresh time is {} millisec", period); - xmlStrategy.setRefreshDelay(period); // Set to refresh sec the refreshing delay. - - namespaceFN = namespaceFileName; - - // specify the properties file and set the reloading strategy for that file - try { - config = new XMLConfiguration(); - config.setFileName(namespaceFileName); - - // Validation of Namespace.xml - log.debug(" ... CHECK of VALIDITY of NAMESPACE Configuration ..."); - - schemaValidity = XMLNamespaceLoader.checkValidity(namespaceSchemaURL, - namespaceFileName); - if (!(schemaValidity)) { - log.error("NAMESPACE IS NOT VALID IN RESPECT OF NAMESPACE SCHEMA! "); - throw new ConfigurationException("XML is not valid!"); - } else { - log.debug("Namespace is valid in respect of NAMESPACE SCHEMA."); - } - - // This will throw a ConfigurationException if the XML document does not - // conform to its DTD. - - config.setReloadingStrategy(xmlStrategy); - - Peeper peeper = new Peeper(this); - timer.schedule(peeper, delay, period); - - log.debug("Timer initialized"); - - config.load(); - log.debug("Namespace Configuration read!"); - - } catch (ConfigurationException cex) { - log.error("ATTENTION! Unable to load Namespace Configuration!", cex); - log.error(toString()); - } - - } - - private String getNamespaceFileName() { - - String configurationDir = it.grid.storm.config.Configuration.getInstance() - .configurationDir(); - // Looking for namespace configuration file - String namespaceFN = it.grid.storm.config.Configuration.getInstance() - .getNamespaceConfigFilename(); - // Build the filename - if (configurationDir.charAt(configurationDir.length() - 1) != separatorChar) { - configurationDir += Character.toString(separatorChar); - } - String namespaceAbsFN = configurationDir + namespaceFN; - // Check the namespace conf file accessibility - File nsFile = new File(namespaceAbsFN); - if (nsFile.exists()) { - log.debug("Found the namespace file : {}", namespaceAbsFN); - } else { - log.error("Unable to find the namespace file : {}", namespaceAbsFN); - } - return namespaceAbsFN; - } - - private String getNamespaceSchemaFileName() { - - String schemaName = it.grid.storm.config.Configuration.getInstance() - .getNamespaceSchemaFilename(); - - if ("Schema UNKNOWN!".equals(schemaName)) { - - schemaName = "namespace.xsd"; - String namespaceFN = getNamespaceFileName(); - File namespaceFile = new File(namespaceFN); - if (namespaceFile.exists()) { - DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); - try { - DocumentBuilder builder = factory.newDocumentBuilder(); - Document doc = builder.parse(namespaceFN); - Element rootElement = doc.getDocumentElement(); - String tagName = rootElement.getTagName(); - if ("namespace".equals(tagName)) { - if (rootElement.hasAttributes()) { - String value = rootElement - .getAttribute("xsi:noNamespaceSchemaLocation"); - if ((value != null) && (value.length() > 0)) { - schemaName = value; - } - } else { - log.error("{} don't have a valid root element attributes", namespaceFN); - } - } else { - log.error("{} don't have a valid root element.", namespaceFN); - } - - } catch (ParserConfigurationException | SAXException | IOException e) { - log.error("Error while parsing {}: {}", namespaceFN, e.getMessage(), e); - } - } - } - - return schemaName; - - } - - public Configuration getConfiguration() { - - return config; - } - - private static boolean checkValidity(String namespaceSchemaURL, - String filename) { - - NamespaceValidator validator = new NamespaceValidator(); - return validator.validateSchema(namespaceSchemaURL, filename); - } - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * - * @author Riccardo Zappi - * @version 1.0 - */ - private class Peeper extends TimerTask { - - private XMLReloadingStrategy reloadingStrategy; - - private boolean signal; - private final XMLNamespaceLoader observed; - - public Peeper(XMLNamespaceLoader obs) { - - observed = obs; - } - - @Override - public void run() { - - // log.debug(" The glange of peeper.."); - reloadingStrategy = (XMLReloadingStrategy) config.getReloadingStrategy(); - boolean changed = reloadingStrategy.reloadingRequired(); - if (changed) { - log.debug(" NAMESPACE CONFIGURATION is changed ! "); - log.debug(" ... CHECK of VALIDITY of NAMESPACE Configuration ..."); - boolean valid = XMLNamespaceLoader.checkValidity(namespaceSchemaURL, - namespaceFN); - if (!valid) { - log - .debug(" Namespace configuration is not reloaded.. Please rectify the error."); - schemaValidity = false; - reloadingStrategy.notifingPerformed(); - reloadingStrategy.reloadingPerformed(); - } else { - log - .debug(" ... NAMESPACE Configuration is VALID in respect of Schema Grammar."); - log.debug(" ----> RELOADING "); - - schemaValidity = true; - - boolean forceReloading = it.grid.storm.config.Configuration - .getInstance().getNamespaceAutomaticReloading(); - if (forceReloading) { - config.reload(); - } else { - log - .debug(" ----> RELOAD of namespace don't be executed because NO AUTOMATIC RELOAD is configured."); - } - reloadingStrategy.reloadingPerformed(); - } - } - - signal = reloadingStrategy.notifingRequired(); - if ((signal)) { - observed.setChanged(); - observed.notifyObservers(" MSG : Namespace is changed!"); - reloadingStrategy.notifingPerformed(); - } - - } - - } - + private static Logger log = LoggerFactory.getLogger(XMLNamespaceLoader.class); + + public String filename; + public String path; + public int refresh; // refresh time in seconds before the configuration is + // checked for a change in parameters! + private XMLConfiguration config = null; + private final int delay = 1000; // delay for 5 sec. + private long period = -1; + private final Timer timer = new Timer(); + private XMLReloadingStrategy xmlStrategy; + private String namespaceFN = null; + private final String namespaceSchemaURL; + + public boolean schemaValidity = false; + + public XMLNamespaceLoader() { + + // Build the namespaceFileName + namespaceFN = getNamespaceFileName(); + namespaceSchemaURL = getNamespaceSchemaFileName(); + init(namespaceFN, refresh); + } + + public XMLNamespaceLoader(int refresh) { + + if (refresh < 0) { + this.refresh = 0; + } else { + this.refresh = refresh; + } + namespaceFN = getNamespaceFileName(); + namespaceSchemaURL = getNamespaceSchemaFileName(); + log.debug("Namespace XSD : {}", namespaceSchemaURL); + init(namespaceFN, refresh); + } + + public XMLNamespaceLoader(String filename) { + + this.filename = filename; + namespaceFN = getNamespaceFileName(); + namespaceSchemaURL = getNamespaceSchemaFileName(); + log.debug("Namespace XSD : {}", namespaceSchemaURL); + init(namespaceFN, refresh); + } + + public XMLNamespaceLoader(String path, String filename) { + + this.path = path; + this.filename = filename; + namespaceFN = getNamespaceFileName(); + namespaceSchemaURL = getNamespaceSchemaFileName(); + log.debug("Namespace XSD : {}", namespaceSchemaURL); + init(namespaceFN, refresh); + } + + public XMLNamespaceLoader(String path, String filename, int refresh) { + + if (refresh < 0) { + this.refresh = 0; + } else { + this.refresh = refresh; + } + this.path = path; + this.filename = filename; + namespaceFN = getNamespaceFileName(); + namespaceSchemaURL = getNamespaceSchemaFileName(); + log.debug("Namespace XSD : {}", namespaceSchemaURL); + init(namespaceFN, refresh); + } + + public void setObserver(Observer obs) { + + addObserver(obs); + } + + public void setNotifyManaged() { + + xmlStrategy.notifingPerformed(); + config.setReloadingStrategy(xmlStrategy); + } + + /** The setChanged() protected method must overridden to make it public */ + @Override + public synchronized void setChanged() { + + super.setChanged(); + } + + private void init(String namespaceFileName, int refresh) { + + log.info( + "Reading Namespace configuration file {} and setting refresh rate to {} seconds.", + namespaceFileName, + refresh); + + // create reloading strategy for refresh + xmlStrategy = new XMLReloadingStrategy(); + period = 3000; // Conversion in millisec. + log.debug(" Refresh time is {} millisec", period); + xmlStrategy.setRefreshDelay(period); // Set to refresh sec the refreshing delay. + + namespaceFN = namespaceFileName; + + // specify the properties file and set the reloading strategy for that file + try { + config = new XMLConfiguration(); + config.setFileName(namespaceFileName); + + // Validation of Namespace.xml + log.debug(" ... CHECK of VALIDITY of NAMESPACE Configuration ..."); + + schemaValidity = XMLNamespaceLoader.checkValidity(namespaceSchemaURL, namespaceFileName); + if (!(schemaValidity)) { + log.error("NAMESPACE IS NOT VALID IN RESPECT OF NAMESPACE SCHEMA! "); + throw new ConfigurationException("XML is not valid!"); + } else { + log.debug("Namespace is valid in respect of NAMESPACE SCHEMA."); + } + + // This will throw a ConfigurationException if the XML document does not + // conform to its DTD. + + config.setReloadingStrategy(xmlStrategy); + + Peeper peeper = new Peeper(this); + timer.schedule(peeper, delay, period); + + log.debug("Timer initialized"); + + config.load(); + log.debug("Namespace Configuration read!"); + + } catch (ConfigurationException cex) { + log.error("ATTENTION! Unable to load Namespace Configuration!", cex); + log.error(toString()); + } + } + + private String getNamespaceFileName() { + + String configurationDir = it.grid.storm.config.Configuration.getInstance().configurationDir(); + // Looking for namespace configuration file + String namespaceFN = + it.grid.storm.config.Configuration.getInstance().getNamespaceConfigFilename(); + // Build the filename + if (configurationDir.charAt(configurationDir.length() - 1) != separatorChar) { + configurationDir += Character.toString(separatorChar); + } + String namespaceAbsFN = configurationDir + namespaceFN; + // Check the namespace conf file accessibility + File nsFile = new File(namespaceAbsFN); + if (nsFile.exists()) { + log.debug("Found the namespace file : {}", namespaceAbsFN); + } else { + log.error("Unable to find the namespace file : {}", namespaceAbsFN); + } + return namespaceAbsFN; + } + + private String getNamespaceSchemaFileName() { + + String schemaName = + it.grid.storm.config.Configuration.getInstance().getNamespaceSchemaFilename(); + + if ("Schema UNKNOWN!".equals(schemaName)) { + + schemaName = "namespace.xsd"; + String namespaceFN = getNamespaceFileName(); + File namespaceFile = new File(namespaceFN); + if (namespaceFile.exists()) { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + try { + DocumentBuilder builder = factory.newDocumentBuilder(); + Document doc = builder.parse(namespaceFN); + Element rootElement = doc.getDocumentElement(); + String tagName = rootElement.getTagName(); + if ("namespace".equals(tagName)) { + if (rootElement.hasAttributes()) { + String value = rootElement.getAttribute("xsi:noNamespaceSchemaLocation"); + if ((value != null) && (value.length() > 0)) { + schemaName = value; + } + } else { + log.error("{} don't have a valid root element attributes", namespaceFN); + } + } else { + log.error("{} don't have a valid root element.", namespaceFN); + } + + } catch (ParserConfigurationException | SAXException | IOException e) { + log.error("Error while parsing {}: {}", namespaceFN, e.getMessage(), e); + } + } + } + + return schemaName; + } + + public Configuration getConfiguration() { + + return config; + } + + private static boolean checkValidity(String namespaceSchemaURL, String filename) { + + NamespaceValidator validator = new NamespaceValidator(); + return validator.validateSchema(namespaceSchemaURL, filename); + } + + /** + * Title: + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * + * @author Riccardo Zappi + * @version 1.0 + */ + private class Peeper extends TimerTask { + + private XMLReloadingStrategy reloadingStrategy; + + private boolean signal; + private final XMLNamespaceLoader observed; + + public Peeper(XMLNamespaceLoader obs) { + + observed = obs; + } + + @Override + public void run() { + + // log.debug(" The glange of peeper.."); + reloadingStrategy = (XMLReloadingStrategy) config.getReloadingStrategy(); + boolean changed = reloadingStrategy.reloadingRequired(); + if (changed) { + log.debug(" NAMESPACE CONFIGURATION is changed ! "); + log.debug(" ... CHECK of VALIDITY of NAMESPACE Configuration ..."); + boolean valid = XMLNamespaceLoader.checkValidity(namespaceSchemaURL, namespaceFN); + if (!valid) { + log.debug(" Namespace configuration is not reloaded.. Please rectify the error."); + schemaValidity = false; + reloadingStrategy.notifingPerformed(); + reloadingStrategy.reloadingPerformed(); + } else { + log.debug(" ... NAMESPACE Configuration is VALID in respect of Schema Grammar."); + log.debug(" ----> RELOADING "); + + schemaValidity = true; + + boolean forceReloading = + it.grid.storm.config.Configuration.getInstance().getNamespaceAutomaticReloading(); + if (forceReloading) { + config.reload(); + } else { + log.debug( + " ----> RELOAD of namespace don't be executed because NO AUTOMATIC RELOAD is configured."); + } + reloadingStrategy.reloadingPerformed(); + } + } + + signal = reloadingStrategy.notifingRequired(); + if ((signal)) { + observed.setChanged(); + observed.notifyObservers(" MSG : Namespace is changed!"); + reloadingStrategy.notifingPerformed(); + } + } + } } diff --git a/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceParser.java b/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceParser.java index 9ef02b31..9e390b4e 100644 --- a/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceParser.java +++ b/src/main/java/it/grid/storm/namespace/config/xml/XMLNamespaceParser.java @@ -1,27 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config.xml; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Observable; -import java.util.Observer; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.XMLConfiguration; -import org.slf4j.Logger; - import com.google.common.collect.Lists; - import it.grid.storm.balancer.BalancingStrategyType; import it.grid.storm.check.sanity.filesystem.SupportedFSType; import it.grid.storm.namespace.DefaultValuesInterface; @@ -57,803 +39,766 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.util.GPFSSizeHelper; - +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Observable; +import java.util.Observer; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.XMLConfiguration; +import org.slf4j.Logger; public class XMLNamespaceParser implements NamespaceParser, Observer { - private final Logger log = NamespaceDirector.getLogger(); - - private String version; - private Map vfss; - private Map maprules; - private Map apprules; - - private XMLParserUtil parserUtil; - private final XMLConfiguration configuration; - private XMLNamespaceLoader xmlLoader; - - private final Lock refreshing = new ReentrantLock(); - - /** - * Constructor - * - * @param loader - * NamespaceLoader - */ - public XMLNamespaceParser(NamespaceLoader loader) { - - configuration = (XMLConfiguration) loader.getConfiguration(); - if (loader instanceof XMLNamespaceLoader) { - xmlLoader = (XMLNamespaceLoader) loader; - xmlLoader.setObserver(this); - } else { - log.error("XMLParser initialized with a non-XML Loader"); - } - - parserUtil = new XMLParserUtil(configuration); - - for (Iterator iter = parserUtil.getKeys(); iter.hasNext();) { - log.debug("current item: {}", iter.next()); - } - - vfss = new HashMap<>(); - maprules = new HashMap<>(); - apprules = new HashMap<>(); - - boolean validNamespaceConfiguration = refreshCachedData(); - if (!validNamespaceConfiguration) { - log.error(" ???????????????????????????????????? "); - log.error(" ???? NAMESPACE does not VALID ???? "); - log.error(" ???????????????????????????????????? "); - log.error(" Please see the log. "); - System.exit(0); - } - - } - - public Map getVFSs() { - - return vfss; - } - - public Map getApproachableRules() { - - return apprules; - } - - public Map getMappingRules() { - - return maprules; - } - - public long getLastUpdateTime() { - - return 0L; - } - - public void update(Observable observed, Object arg) { - - log.debug("{} Refreshing Namespace Memory Cache .. ", arg); - - XMLNamespaceLoader loader = (XMLNamespaceLoader) observed; - parserUtil = new XMLParserUtil(loader.getConfiguration()); - - if (loader.schemaValidity) { - refreshCachedData(); - } - - loader.setNotifyManaged(); - - log.debug(" ... Cache Refreshing ended"); - } - - /**************************************************************** - * PRIVATE METHODs - *****************************************************************/ - - private boolean refreshCachedData() { - - boolean result = false; - try { - refreshing.lock(); - configuration.clear(); - configuration.clearTree("filesystems"); - configuration.clearTree("mapping-rules"); - configuration.clearTree("approachable-rules"); - try { - configuration.load(); - log.debug(" ... reading and parsing the namespace configuration from file!"); - } catch (ConfigurationException ex) { - log.error(ex.getMessage(), ex); - } - log.debug("REFRESHING CACHE.."); - // Save the cache content - log.debug(" ..save the cache content before semantic check"); - Map vfssSAVED = vfss; - Map maprulesSAVED = maprules; - Map apprulesSAVED = apprules; - // Refresh the cache content with new values - - log.debug(" ..refresh the cache"); - refreshCache(); - - // Do the checking on Namespace - log.debug(" ..semantic check of namespace"); - NamespaceCheck checker = new NamespaceCheck(vfss, maprules, apprules); - boolean semanticCheck = checker.check(); - - // If there is an error restore old cache content - log.debug("REFRESHING ENDED."); - if (semanticCheck) { - log.debug("Namespace is semantically valid"); - result = true; - } else { - log - .warn("Namespace does not semantically valid!, so no load performed!"); - vfss = vfssSAVED; - maprules = maprulesSAVED; - apprules = apprulesSAVED; - result = false; - } - } finally { - refreshing.unlock(); - } - return result; - } - - private void refreshCache() { - - log - .info(" ############## REFRESHING NAMESPACE CONFIGURATION CACHE : start ###############"); - - /************************** - * Retrieve Version Number - *************************/ - try { - retrieveVersion(); - } catch (NamespaceException ex1) { - log - .warn( - "Namespace configuration does not contain a valid version number.", - ex1); - /** - * @todo Manage this exceptional status! - */ - } - - /************************** - * Building VIRTUAL FS - *************************/ - try { - buildVFSs(); - } catch (ClassNotFoundException ex) { - log - .error("Namespace Configuration ERROR in VFS-DRIVER specification", ex); - /** - * @todo Manage this exceptional status! - */ - } catch (NamespaceException ex) { - log - .error( - "Namespace Configuration ERROR in VFS definition, please check it.", - ex); - /** - * @todo Manage this exceptional status! - */ - } - - /************************** - * Building MAPPING RULES - *************************/ - try { - buildMapRules(); - } catch (NamespaceException ex1) { - log - .error( - "Namespace Configuration ERROR in MAPPING RULES definition, please check it.", - ex1); - /** - * @todo Manage this exceptional status! - */ - } - - /************************** - * Building APPROACHABLE RULES - *************************/ - try { - buildAppRules(); - } catch (NamespaceException ex2) { - log - .error( - "Namespace Configuration ERROR in APPROACHABLE RULES definition, please check it.", - ex2); - /** - * @todo Manage this exceptional status! - */ - } - log - .info(" ############## REFRESHING NAMESPACE CONFIGURATION CACHE : end ###############"); - - handleTotalOnlineSizeFromGPFSQuota(); - // Update SA within Reserved Space Catalog - updateSA(); - } - - private void handleTotalOnlineSizeFromGPFSQuota() { - - for (Entry entry : vfss.entrySet()) { - String storageAreaName = entry.getKey(); - VirtualFS storageArea = entry.getValue(); - if (SupportedFSType.parseFS(storageArea.getFSType()) == SupportedFSType.GPFS) { - Quota quota = storageArea.getCapabilities().getQuota(); - if (quota != null && quota.getEnabled()) { - - GPFSFilesetQuotaInfo quotaInfo = getGPFSQuotaInfo(storageArea); - if (quotaInfo != null) { - updateTotalOnlineSizeFromGPFSQuota(storageAreaName, storageArea, - quotaInfo); - } - } - } - } - } - - private GPFSFilesetQuotaInfo getGPFSQuotaInfo(VirtualFS storageArea) { - - GetGPFSFilesetQuotaInfoCommand cmd = new GetGPFSFilesetQuotaInfoCommand(storageArea); - - try { - return cmd.call(); - } catch (Throwable t) { - log - .warn( - "Cannot get quota information out of GPFS. Using the TotalOnlineSize in namespace.xml " - + "for Storage Area {}. Reason: {}", storageArea.getAliasName(), - t.getMessage()); - return null; - } - } - - private void updateTotalOnlineSizeFromGPFSQuota(String storageAreaName, - VirtualFS storageArea, GPFSFilesetQuotaInfo quotaInfo) { - - long gpfsTotalOnlineSize = GPFSSizeHelper.getBytesFromKIB(quotaInfo - .getBlockSoftLimit()); - Property newProperties = Property.from(storageArea.getProperties()); - try { - newProperties.setTotalOnlineSize(SizeUnitType.BYTE.getTypeName(), - gpfsTotalOnlineSize); - storageArea.setProperties(newProperties); - log.warn("TotalOnlineSize as specified in namespace.xml will be ignored " - + "since quota is enabled on the GPFS {} Storage Area.", - storageAreaName); - } catch (NamespaceException e) { - log - .warn( - "Cannot get quota information out of GPFS. Using the TotalOnlineSize in namespace.xml " - + "for Storage Area {}.", storageAreaName, e); - } - } - - // ******************* Update SA Catalog *************************** - private void updateSA() { - - TSpaceToken spaceToken = null; - // ReservedSpaceCatalog spaceCatalog = new ReservedSpaceCatalog(); - SpaceHelper spaceHelp = new SpaceHelper(); - log - .debug("Updating Space Catalog with Storage Area defined within NAMESPACE"); - VirtualFS vfs = null; - Iterator scan = vfss.values().iterator(); - while (scan.hasNext()) { - - vfs = (VirtualFS) scan.next(); - String vfsAliasName = vfs.getAliasName(); - log.debug(" Considering VFS : {}", vfsAliasName); - String aliasName = vfs.getSpaceTokenDescription(); - if (aliasName == null) { - // Found a VFS without the optional element Space Token Description - log.debug("XMLNamespaceParser.UpdateSA() : Found a VFS ('{}') without space-token-description. " - + "Skipping the Update of SA", vfsAliasName); - } else { - TSizeInBytes onlineSize = vfs.getProperties().getTotalOnlineSize(); - String spaceFileName = vfs.getRootPath(); - spaceToken = spaceHelp.createVOSA_Token(aliasName, onlineSize, - spaceFileName); - vfs.setSpaceToken(spaceToken); - - log.debug(" Updating SA ('{}'), token:'{}', onlineSize:'{}', spaceFileName:'{}'", - aliasName, spaceToken, onlineSize, spaceFileName); - } - - } - spaceHelp.purgeOldVOSA_token(); - log.debug("Updating Space Catalog... DONE!!"); - - } - - // ******************* VERSION NUMBER *************************** - private void retrieveVersion() throws NamespaceException { - - version = parserUtil.getNamespaceVersion(); - log.debug(" ==== NAMESPACE VERSION : '{}' ====", version); - } - - // ******************* VIRTUAL FS *************************** - - private void buildVFSs() throws ClassNotFoundException, NamespaceException { - - int nrOfVFS = parserUtil.getNumberOfFS(); - // For each VFS within configuration build VFS class instance -// VirtualFS vfs; -// String spaceTokenDescription = null; -// StorageClassType storageClass; -// String root = null; -// String name; -// String fsType; -// Class driver; -// String storageAreaAuthz; -// PropertyInterface prop; -// CapabilityInterface cap; -// DefaultValuesInterface defValues; -// SAAuthzType saAuthzType; - - for (int i = 0; i < nrOfVFS; i++) { - // Building VFS - VirtualFS vfs = new VirtualFS(); - // name - String name = parserUtil.getFSName(i); - vfs.setAliasName(name); - log.debug("VFS({}).name = '{}'", i, name); - // fs type - String fsType = parserUtil.getFSType(name); - vfs.setFSType(fsType); - log.debug("VFS({}).fs_type = '{}'", name, fsType); - // space token - String spaceTokenDescription = parserUtil.getFSSpaceTokenDescription(name); - vfs.setSpaceTokenDescription(spaceTokenDescription); - log.debug("VFS({}).space-token-description = '{}'", name, spaceTokenDescription); - // storage class - StorageClassType storageClass = StorageClassType.getStorageClassType(parserUtil.getStorageClass(name)); - vfs.setStorageClassType(storageClass); - log.debug("VFS({}).storage-class = '{}'", name, storageClass); - // root path - String root = parserUtil.getFSRoot(name); - vfs.setRoot(root); - log.debug("VFS({}).root = '{}'", name, root); - // fs driver - Class fsDriver = Class.forName(parserUtil.getFSDriver(name)); - vfs.setFSDriver(fsDriver); - log.debug("VFS({}).fsDriver [CLASS Name] = '{}'", name, fsDriver.getName()); - // space driver - Class spaceDriver = Class.forName(parserUtil.getSpaceDriver(name)); - vfs.setSpaceSystemDriver(spaceDriver); - log.debug("VFS({}).spaceDriver [CLASS Name] = '{}'", name, spaceDriver.getName()); - // authz type - SAAuthzType saAuthzType = parserUtil.getStorageAreaAuthzType(name); - vfs.setSAAuthzType(saAuthzType); - log.debug("VFS({}).storage-area-authz.TYPE = '{}'", name, saAuthzType); - // storage area authz - String storageAreaAuthz = parserUtil.getStorageAreaAuthz(name, saAuthzType); - vfs.setSAAuthzSource(storageAreaAuthz); - log.debug("VFS({}).storage-area-authz = '{}'", name, storageAreaAuthz); - // properties - PropertyInterface prop = buildProperties(name); - vfs.setProperties(prop); - // capabilities - Capability cap = buildCapabilities(name); - vfs.setCapabilities(cap); - - DefaultValuesInterface defValues = buildDefaultValues(name); - vfs.setDefaultValues(defValues); - - // Adding VFS - synchronized (this) { - vfss.remove(name); - vfss.put(name, vfs); - } - } - } - - // ******************* PROPERTY *************************** - private PropertyInterface buildProperties(String fsName) - throws NamespaceException { - - Property prop = new Property(); - - String accessLatency = parserUtil.getAccessLatencyType(fsName); - prop.setAccessLatency(accessLatency); - log.debug("VFS({}).Properties.AccessLatency = '{}'", fsName, accessLatency); - - String expirationMode = parserUtil.getExpirationModeType(fsName); - prop.setExpirationMode(expirationMode); - log.debug("VFS({}).Properties.ExpirationMode = '{}'", fsName, expirationMode); - - String retentionPolicy = parserUtil.getRetentionPolicyType(fsName); - prop.setRetentionPolicy(retentionPolicy); - log.debug("VFS({}).Properties.RetentionPolicy = '{}'", fsName, retentionPolicy); - - String unitType = parserUtil.getNearlineSpaceUnitType(fsName); - long nearLineSize = parserUtil.getNearlineSpaceSize(fsName); - prop.setTotalNearlineSize(unitType, nearLineSize); - log.debug("VFS({}).Properties.NearlineSpaceSize = '{} {}'", fsName, nearLineSize, unitType); - - unitType = parserUtil.getOnlineSpaceUnitType(fsName); - long onlineSize = parserUtil.getOnlineSpaceSize(fsName); - prop.setTotalOnlineSize(unitType, onlineSize); - log.debug("VFS({}).Properties.OnlineSpaceSize = '{} {}'", fsName, onlineSize, unitType); - - boolean hasLimitedSize = parserUtil.getOnlineSpaceLimitedSize(fsName); - prop.setLimitedSize(hasLimitedSize); - log.debug("VFS({}).Properties.OnlineSpaceLimitedSize = '{}'", fsName, hasLimitedSize); - - return prop; - } - - // ******************* CAPABILITY *************************** - - private Capability buildCapabilities(String fsName) - throws NamespaceException { - - /** - * ACL MODE ELEMENT - */ - ACLMode aclMode = ACLMode.makeFromString(parserUtil.getACLMode(fsName)); - Capability cap = new Capability(aclMode); - log.debug("VFS({}).Capabilities.aclMode = '{}'", fsName, aclMode); - - /** - * DEFAULT ACL - */ - boolean defaultACLDefined = parserUtil.getDefaultACLDefined(fsName); - log.debug("VFS({}).Capabilities.defaultACL [Defined?] = {}", fsName, defaultACLDefined); - if (defaultACLDefined) { - int nrACLEntries = parserUtil.getNumberOfACL(fsName); - String groupName = null; - String filePermString = null; - ACLEntry aclEntry = null; - for (int entryNumber = 0; entryNumber < nrACLEntries; entryNumber++) { - groupName = parserUtil.getGroupName(fsName, entryNumber); - filePermString = parserUtil.getPermissionString(fsName, entryNumber); - try { - aclEntry = new ACLEntry(groupName, filePermString); - cap.addACLEntry(aclEntry); - } catch (PermissionException permEx) { - log.error("Namespace XML Parser -- ERROR -- : {}", permEx.getMessage()); - } - } - log.debug("VFS({}).Capabilities.defaultACL = {}", fsName, cap.getDefaultACL()); - } - - /** - * QUOTA ELEMENT - */ - boolean quotaDefined = parserUtil.getQuotaDefined(fsName); - Quota quota = null; - if (quotaDefined) { - boolean quotaEnabled = parserUtil.getQuotaEnabled(fsName); - String device = parserUtil.getQuotaDevice(fsName); - - QuotaType quotaType; - String quotaValue = null; - - if (parserUtil.getQuotaFilesetDefined(fsName)) { - quotaType = QuotaType.buildQuotaType(QuotaType.FILESET); - quotaValue = parserUtil.getQuotaFileset(fsName); - } else { - if (parserUtil.getQuotaGroupIDDefined(fsName)) { - quotaType = QuotaType.buildQuotaType(QuotaType.GRP); - quotaValue = parserUtil.getQuotaGroupID(fsName); - } else { - if (parserUtil.getQuotaUserIDDefined(fsName)) { - quotaType = QuotaType.buildQuotaType(QuotaType.USR); - quotaValue = parserUtil.getQuotaUserID(fsName); - } else { - quotaType = QuotaType.buildQuotaType(QuotaType.UNKNOWN); - quotaValue = "unknown"; - } - } - } - - quotaType.setValue(quotaValue); - quota = new Quota(quotaEnabled, device, quotaType); - - } else { - quota = new Quota(); - } - cap.setQuota(quota); - - log.debug("VFS({}).Capabilities.quota = '{}'", fsName, quota); - - /** - * TRANSFER PROTOCOL - */ - int nrProtocols = parserUtil.getNumberOfProt(fsName); - for (int protCounter = 0; protCounter < nrProtocols; protCounter++) { - int protocolIndex = parserUtil.getProtId(fsName, protCounter); - String name = parserUtil.getProtName(fsName, protCounter); - String schema = parserUtil.getProtSchema(fsName, protCounter); - Protocol protocol = Protocol.getProtocol(schema); - protocol.setProtocolServiceName(name); - String serviceHostName = parserUtil.getProtHost(fsName, protCounter); - String servicePortValue = parserUtil.getProtPort(fsName, protCounter); - int portIntValue = -1; - Authority service = null; - if (servicePortValue != null) { - try { - portIntValue = Integer.parseInt(servicePortValue); - service = new Authority(serviceHostName, portIntValue); - } catch (NumberFormatException nfe) { - log - .warn("to evaluate the environmental variable " + servicePortValue); - } - } else { - service = new Authority(serviceHostName); - } - TransportProtocol transportProt = new TransportProtocol(protocol, service); - transportProt.setProtocolID(protocolIndex); - log.debug("VFS({}).Capabilities.protocol({}) = '{}'", fsName, protCounter, transportProt); - cap.addTransportProtocolByScheme(protocol, transportProt); - cap.addTransportProtocol(transportProt); - if (protocolIndex != -1) { - cap.addTransportProtocolByID(protocolIndex, transportProt); - } - - } - - /** - * PROTOCOL POOL - */ - int nrPools = parserUtil.getNumberOfPool(fsName); - if (nrPools > 0) { - - for (int poolCounter = 0; poolCounter < nrPools; poolCounter++) { - BalancingStrategyType balanceStrategy = BalancingStrategyType - .getByValue(parserUtil.getBalancerStrategy(fsName, poolCounter)); - List poolMembers = Lists.newArrayList(); - int nrMembers = parserUtil.getNumberOfPoolMembers(fsName, poolCounter); - for (int i = 0; i < nrMembers; i++) { - int protIndex = parserUtil.getMemberID(fsName, poolCounter, i); - TransportProtocol tProtMember = cap.getProtocolByID(protIndex); - if (tProtMember != null) { - PoolMember poolMember; - if (balanceStrategy.requireWeight()) { - int memberWeight = parserUtil.getMemberWeight(fsName, - poolCounter, i); - poolMember = new PoolMember(protIndex, tProtMember, memberWeight); - } else { - poolMember = new PoolMember(protIndex, tProtMember); - } - poolMembers.add(poolMember); - } else { // member pointed out doesn't exist!! - String errorMessage = String.format("POOL Building: Protocol with index %d does not exists in the VFS : %s", protIndex, fsName); - log.error(errorMessage); - throw new NamespaceException(errorMessage); - } - } - verifyPoolIsValid(poolMembers); - Protocol poolProtocol = poolMembers.get(0).getMemberProtocol().getProtocol(); - log.debug("Defined pool for protocol {} with size {}", poolProtocol, - poolMembers.size()); - ProtocolPool pool = new ProtocolPool(balanceStrategy, poolMembers); - cap.addProtocolPool(pool); - cap.addProtocolPoolBySchema(poolProtocol, pool); - log.debug("PROTOCOL POOL: {}", cap.getPoolByScheme(poolProtocol)); - } - } else { - log.debug("Pool is not defined in VFS {}", fsName); - } - - return cap; - } - - /** - * @param poolMembers - * @throws NamespaceException - */ - private void verifyPoolIsValid(List poolMembers) - throws NamespaceException { - - if (poolMembers.isEmpty()) { - throw new NamespaceException("POOL Defined is EMPTY!"); - } - Protocol prot = poolMembers.get(0).getMemberProtocol().getProtocol(); - for (PoolMember member : poolMembers) { - if (!(member.getMemberProtocol().getProtocol().equals(prot))) { - throw new NamespaceException( - "Defined Pool is NOT HOMOGENEOUS! Protocols " + prot.toString() - + " and " + member.toString() + " differs"); - } - } - } - - // ******************* DEFAULT VALUES *************************** - - private DefaultValuesInterface buildDefaultValues(String fsName) - throws NamespaceException { - - DefaultValues def = new DefaultValues(); - if (parserUtil.isDefaultElementPresent(fsName)) { - setSpaceDef(fsName, def); - setFileDef(fsName, def); - } else { // Produce Default Values with default values :o ! - log.debug("VFS({}).DefaultValues is ABSENT. Using DEFAULT values.", fsName); - } - return def; - } - - private void setSpaceDef(String fsName, DefaultValues def) - throws NamespaceException { - - String spaceType = parserUtil.getDefaultSpaceType(fsName); - log.debug("VFS({}).DefaultValues.space.type = '{}'", fsName, spaceType); - long lifeTime = parserUtil.getDefaultSpaceLifeTime(fsName); - log.debug("VFS({}).DefaultValues.space.lifeTime = ''", fsName, lifeTime); - long guarSize = parserUtil.getDefaultSpaceGuarSize(fsName); - log.debug("VFS({}).DefaultValues.space.guarSize = '{}'", fsName, guarSize); - long totSize = parserUtil.getDefaultSpaceTotSize(fsName); - log.debug("VFS({}).DefaultValues.space.totSize = '{}'", fsName, totSize); - def.setSpaceDefaults(spaceType, lifeTime, guarSize, totSize); - } - - private void setFileDef(String fsName, DefaultValues def) - throws NamespaceException { - - String fileType = parserUtil.getDefaultFileType(fsName); - log.debug("VFS({}).DefaultValues.file.type = '{}'", fsName, fileType); - long lifeTime = parserUtil.getDefaultFileLifeTime(fsName); - log.debug("VFS({}).DefaultValues.file.lifeTime = '{}'", fsName, lifeTime); - def.setFileDefaults(fileType, lifeTime); - } - - // ******************* MAPPING RULE *************************** - - private void buildMapRules() throws NamespaceException { - - int numOfMapRules = parserUtil.getNumberOfMappingRule(); - String ruleName; - String stfnRoot; - String mappedFS; - MappingRule mapRule; - - for (int i = 0; i < numOfMapRules; i++) { - ruleName = parserUtil.getMapRuleName(i); - mappedFS = parserUtil.getMapRule_mappedFS(ruleName); - // Adding mapping rule to VFS within vfss; - if (vfss.containsKey(mappedFS)) { - log.debug("VFS '{}' pointed by RULE : '{}' exists.", mappedFS, ruleName); - stfnRoot = parserUtil.getMapRule_StFNRoot(ruleName); - VirtualFS vfs = vfss.get(mappedFS); - mapRule = new MappingRule(ruleName, stfnRoot, vfs); - ((VirtualFS) vfs).addMappingRule(mapRule); - maprules.put(ruleName, mapRule); - } else { - log.error("VFS '{}' pointed by RULE : '{}' DOES NOT EXISTS.", mappedFS, ruleName); - } - } - } - - // ******************* APPROACHABLE RULE *************************** - - private void buildAppRules() throws NamespaceException { - - int numOfAppRules = parserUtil.getNumberOfApproachRule(); - - String ruleName; - String dn; - String vo_name; - String relPath; - String anonymousHttpReadString; - List appFSList; - ApproachableRule appRule; - - log.debug("Number of APP Rule : {}", numOfAppRules); - - - for (int i = 0; i < numOfAppRules; i++) { - ruleName = parserUtil.getApproachRuleName(i); - log.debug(" APP rule nr: {} is named : {}", i, ruleName); - - dn = parserUtil.getAppRule_SubjectDN(ruleName); - vo_name = parserUtil.getAppRule_SubjectVO(ruleName); - SubjectRules subjectRules = new SubjectRules(dn, vo_name); - - relPath = parserUtil.getAppRule_RelativePath(ruleName); - - anonymousHttpReadString = parserUtil - .getAppRule_AnonymousHttpRead(ruleName); - if (anonymousHttpReadString != null - && !anonymousHttpReadString.trim().isEmpty()) { - appRule = new ApproachableRule(ruleName, subjectRules, relPath, - Boolean.parseBoolean(anonymousHttpReadString)); - } else { - appRule = new ApproachableRule(ruleName, subjectRules, relPath); - } - - appFSList = parserUtil.getAppRule_AppFS(ruleName); - for (String appFS : appFSList) { - if (vfss.containsKey(appFS)) { - log.debug("VFS '{}' pointed by RULE : '{}' exists.", appFS, ruleName); - VirtualFS vfs = vfss.get(appFS); - ((VirtualFS) vfs).addApproachableRule(appRule); - appRule.addApproachableVFS(vfs); - } else { - log.error("VFS '{}' pointed by RULE : '{}' DOES NOT EXISTS.", appFS, ruleName); - } - } - apprules.put(ruleName, appRule); - } - } - - /***************************************************************************** - * BUSINESS METHODs - ****************************************************************************/ - - public String getNamespaceVersion() { - - return version; - } - - public List getAllVFS_Roots() { - - Collection elem = vfss.values(); - List roots = new ArrayList<>(vfss.size()); - Iterator scan = elem.iterator(); - while (scan.hasNext()) { - String root = null; - root = scan.next().getRootPath(); - roots.add(root); - } - return roots; - } - - public Map getMapVFS_Root() { - - Map result = new HashMap<>(); - Collection elem = vfss.values(); - Iterator scan = elem.iterator(); - while (scan.hasNext()) { - String root = null; - VirtualFS vfs = scan.next(); - root = vfs.getRootPath(); - result.put(root, vfs); - } - return result; - } - - public List getAllMappingRule_StFNRoots() { - - Collection elem = maprules.values(); - List roots = new ArrayList<>(maprules.size()); - Iterator scan = elem.iterator(); - String root = null; - while (scan.hasNext()) { - root = scan.next().getStFNRoot(); - roots.add(root); - } - return roots; - } - - public Map getMappingRuleMAP() { - - Map map = new HashMap<>(); - Collection elem = maprules.values(); - Iterator scan = elem.iterator(); - String root = null; - String name = null; - MappingRule rule; - while (scan.hasNext()) { - rule = scan.next(); - root = rule.getStFNRoot(); - name = rule.getRuleName(); - map.put(name, root); - } - return map; - } - - public VirtualFS getVFS(String vfsName) { - - return vfss.get(vfsName); - } + private final Logger log = NamespaceDirector.getLogger(); + private String version; + private Map vfss; + private Map maprules; + private Map apprules; + + private XMLParserUtil parserUtil; + private final XMLConfiguration configuration; + private XMLNamespaceLoader xmlLoader; + + private final Lock refreshing = new ReentrantLock(); + + /** + * Constructor + * + * @param loader NamespaceLoader + */ + public XMLNamespaceParser(NamespaceLoader loader) { + + configuration = (XMLConfiguration) loader.getConfiguration(); + if (loader instanceof XMLNamespaceLoader) { + xmlLoader = (XMLNamespaceLoader) loader; + xmlLoader.setObserver(this); + } else { + log.error("XMLParser initialized with a non-XML Loader"); + } + + parserUtil = new XMLParserUtil(configuration); + + for (Iterator iter = parserUtil.getKeys(); iter.hasNext(); ) { + log.debug("current item: {}", iter.next()); + } + + vfss = new HashMap<>(); + maprules = new HashMap<>(); + apprules = new HashMap<>(); + + boolean validNamespaceConfiguration = refreshCachedData(); + if (!validNamespaceConfiguration) { + log.error(" ???????????????????????????????????? "); + log.error(" ???? NAMESPACE does not VALID ???? "); + log.error(" ???????????????????????????????????? "); + log.error(" Please see the log. "); + System.exit(0); + } + } + + public Map getVFSs() { + + return vfss; + } + + public Map getApproachableRules() { + + return apprules; + } + + public Map getMappingRules() { + + return maprules; + } + + public long getLastUpdateTime() { + + return 0L; + } + + public void update(Observable observed, Object arg) { + + log.debug("{} Refreshing Namespace Memory Cache .. ", arg); + + XMLNamespaceLoader loader = (XMLNamespaceLoader) observed; + parserUtil = new XMLParserUtil(loader.getConfiguration()); + + if (loader.schemaValidity) { + refreshCachedData(); + } + + loader.setNotifyManaged(); + + log.debug(" ... Cache Refreshing ended"); + } + + /** + * ************************************************************** PRIVATE METHODs + * *************************************************************** + */ + private boolean refreshCachedData() { + + boolean result = false; + try { + refreshing.lock(); + configuration.clear(); + configuration.clearTree("filesystems"); + configuration.clearTree("mapping-rules"); + configuration.clearTree("approachable-rules"); + try { + configuration.load(); + log.debug(" ... reading and parsing the namespace configuration from file!"); + } catch (ConfigurationException ex) { + log.error(ex.getMessage(), ex); + } + log.debug("REFRESHING CACHE.."); + // Save the cache content + log.debug(" ..save the cache content before semantic check"); + Map vfssSAVED = vfss; + Map maprulesSAVED = maprules; + Map apprulesSAVED = apprules; + // Refresh the cache content with new values + + log.debug(" ..refresh the cache"); + refreshCache(); + + // Do the checking on Namespace + log.debug(" ..semantic check of namespace"); + NamespaceCheck checker = new NamespaceCheck(vfss, maprules, apprules); + boolean semanticCheck = checker.check(); + + // If there is an error restore old cache content + log.debug("REFRESHING ENDED."); + if (semanticCheck) { + log.debug("Namespace is semantically valid"); + result = true; + } else { + log.warn("Namespace does not semantically valid!, so no load performed!"); + vfss = vfssSAVED; + maprules = maprulesSAVED; + apprules = apprulesSAVED; + result = false; + } + } finally { + refreshing.unlock(); + } + return result; + } + + private void refreshCache() { + + log.info(" ############## REFRESHING NAMESPACE CONFIGURATION CACHE : start ###############"); + + /** ************************ Retrieve Version Number *********************** */ + try { + retrieveVersion(); + } catch (NamespaceException ex1) { + log.warn("Namespace configuration does not contain a valid version number.", ex1); + /** @todo Manage this exceptional status! */ + } + + /** ************************ Building VIRTUAL FS *********************** */ + try { + buildVFSs(); + } catch (ClassNotFoundException ex) { + log.error("Namespace Configuration ERROR in VFS-DRIVER specification", ex); + /** @todo Manage this exceptional status! */ + } catch (NamespaceException ex) { + log.error("Namespace Configuration ERROR in VFS definition, please check it.", ex); + /** @todo Manage this exceptional status! */ + } + + /** ************************ Building MAPPING RULES *********************** */ + try { + buildMapRules(); + } catch (NamespaceException ex1) { + log.error("Namespace Configuration ERROR in MAPPING RULES definition, please check it.", ex1); + /** @todo Manage this exceptional status! */ + } + + /** ************************ Building APPROACHABLE RULES *********************** */ + try { + buildAppRules(); + } catch (NamespaceException ex2) { + log.error( + "Namespace Configuration ERROR in APPROACHABLE RULES definition, please check it.", ex2); + /** @todo Manage this exceptional status! */ + } + log.info(" ############## REFRESHING NAMESPACE CONFIGURATION CACHE : end ###############"); + + handleTotalOnlineSizeFromGPFSQuota(); + // Update SA within Reserved Space Catalog + updateSA(); + } + + private void handleTotalOnlineSizeFromGPFSQuota() { + + for (Entry entry : vfss.entrySet()) { + String storageAreaName = entry.getKey(); + VirtualFS storageArea = entry.getValue(); + if (SupportedFSType.parseFS(storageArea.getFSType()) == SupportedFSType.GPFS) { + Quota quota = storageArea.getCapabilities().getQuota(); + if (quota != null && quota.getEnabled()) { + + GPFSFilesetQuotaInfo quotaInfo = getGPFSQuotaInfo(storageArea); + if (quotaInfo != null) { + updateTotalOnlineSizeFromGPFSQuota(storageAreaName, storageArea, quotaInfo); + } + } + } + } + } + + private GPFSFilesetQuotaInfo getGPFSQuotaInfo(VirtualFS storageArea) { + + GetGPFSFilesetQuotaInfoCommand cmd = new GetGPFSFilesetQuotaInfoCommand(storageArea); + + try { + return cmd.call(); + } catch (Throwable t) { + log.warn( + "Cannot get quota information out of GPFS. Using the TotalOnlineSize in namespace.xml " + + "for Storage Area {}. Reason: {}", + storageArea.getAliasName(), + t.getMessage()); + return null; + } + } + + private void updateTotalOnlineSizeFromGPFSQuota( + String storageAreaName, VirtualFS storageArea, GPFSFilesetQuotaInfo quotaInfo) { + + long gpfsTotalOnlineSize = GPFSSizeHelper.getBytesFromKIB(quotaInfo.getBlockSoftLimit()); + Property newProperties = Property.from(storageArea.getProperties()); + try { + newProperties.setTotalOnlineSize(SizeUnitType.BYTE.getTypeName(), gpfsTotalOnlineSize); + storageArea.setProperties(newProperties); + log.warn( + "TotalOnlineSize as specified in namespace.xml will be ignored " + + "since quota is enabled on the GPFS {} Storage Area.", + storageAreaName); + } catch (NamespaceException e) { + log.warn( + "Cannot get quota information out of GPFS. Using the TotalOnlineSize in namespace.xml " + + "for Storage Area {}.", + storageAreaName, + e); + } + } + + // ******************* Update SA Catalog *************************** + private void updateSA() { + + TSpaceToken spaceToken = null; + // ReservedSpaceCatalog spaceCatalog = new ReservedSpaceCatalog(); + SpaceHelper spaceHelp = new SpaceHelper(); + log.debug("Updating Space Catalog with Storage Area defined within NAMESPACE"); + VirtualFS vfs = null; + Iterator scan = vfss.values().iterator(); + while (scan.hasNext()) { + + vfs = (VirtualFS) scan.next(); + String vfsAliasName = vfs.getAliasName(); + log.debug(" Considering VFS : {}", vfsAliasName); + String aliasName = vfs.getSpaceTokenDescription(); + if (aliasName == null) { + // Found a VFS without the optional element Space Token Description + log.debug( + "XMLNamespaceParser.UpdateSA() : Found a VFS ('{}') without space-token-description. " + + "Skipping the Update of SA", + vfsAliasName); + } else { + TSizeInBytes onlineSize = vfs.getProperties().getTotalOnlineSize(); + String spaceFileName = vfs.getRootPath(); + spaceToken = spaceHelp.createVOSA_Token(aliasName, onlineSize, spaceFileName); + vfs.setSpaceToken(spaceToken); + + log.debug( + " Updating SA ('{}'), token:'{}', onlineSize:'{}', spaceFileName:'{}'", + aliasName, + spaceToken, + onlineSize, + spaceFileName); + } + } + spaceHelp.purgeOldVOSA_token(); + log.debug("Updating Space Catalog... DONE!!"); + } + + // ******************* VERSION NUMBER *************************** + private void retrieveVersion() throws NamespaceException { + + version = parserUtil.getNamespaceVersion(); + log.debug(" ==== NAMESPACE VERSION : '{}' ====", version); + } + + // ******************* VIRTUAL FS *************************** + + private void buildVFSs() throws ClassNotFoundException, NamespaceException { + + int nrOfVFS = parserUtil.getNumberOfFS(); + // For each VFS within configuration build VFS class instance + // VirtualFS vfs; + // String spaceTokenDescription = null; + // StorageClassType storageClass; + // String root = null; + // String name; + // String fsType; + // Class driver; + // String storageAreaAuthz; + // PropertyInterface prop; + // CapabilityInterface cap; + // DefaultValuesInterface defValues; + // SAAuthzType saAuthzType; + + for (int i = 0; i < nrOfVFS; i++) { + // Building VFS + VirtualFS vfs = new VirtualFS(); + // name + String name = parserUtil.getFSName(i); + vfs.setAliasName(name); + log.debug("VFS({}).name = '{}'", i, name); + // fs type + String fsType = parserUtil.getFSType(name); + vfs.setFSType(fsType); + log.debug("VFS({}).fs_type = '{}'", name, fsType); + // space token + String spaceTokenDescription = parserUtil.getFSSpaceTokenDescription(name); + vfs.setSpaceTokenDescription(spaceTokenDescription); + log.debug("VFS({}).space-token-description = '{}'", name, spaceTokenDescription); + // storage class + StorageClassType storageClass = + StorageClassType.getStorageClassType(parserUtil.getStorageClass(name)); + vfs.setStorageClassType(storageClass); + log.debug("VFS({}).storage-class = '{}'", name, storageClass); + // root path + String root = parserUtil.getFSRoot(name); + vfs.setRoot(root); + log.debug("VFS({}).root = '{}'", name, root); + // fs driver + Class fsDriver = Class.forName(parserUtil.getFSDriver(name)); + vfs.setFSDriver(fsDriver); + log.debug("VFS({}).fsDriver [CLASS Name] = '{}'", name, fsDriver.getName()); + // space driver + Class spaceDriver = Class.forName(parserUtil.getSpaceDriver(name)); + vfs.setSpaceSystemDriver(spaceDriver); + log.debug("VFS({}).spaceDriver [CLASS Name] = '{}'", name, spaceDriver.getName()); + // authz type + SAAuthzType saAuthzType = parserUtil.getStorageAreaAuthzType(name); + vfs.setSAAuthzType(saAuthzType); + log.debug("VFS({}).storage-area-authz.TYPE = '{}'", name, saAuthzType); + // storage area authz + String storageAreaAuthz = parserUtil.getStorageAreaAuthz(name, saAuthzType); + vfs.setSAAuthzSource(storageAreaAuthz); + log.debug("VFS({}).storage-area-authz = '{}'", name, storageAreaAuthz); + // properties + PropertyInterface prop = buildProperties(name); + vfs.setProperties(prop); + // capabilities + Capability cap = buildCapabilities(name); + vfs.setCapabilities(cap); + + DefaultValuesInterface defValues = buildDefaultValues(name); + vfs.setDefaultValues(defValues); + + // Adding VFS + synchronized (this) { + vfss.remove(name); + vfss.put(name, vfs); + } + } + } + + // ******************* PROPERTY *************************** + private PropertyInterface buildProperties(String fsName) throws NamespaceException { + + Property prop = new Property(); + + String accessLatency = parserUtil.getAccessLatencyType(fsName); + prop.setAccessLatency(accessLatency); + log.debug("VFS({}).Properties.AccessLatency = '{}'", fsName, accessLatency); + + String expirationMode = parserUtil.getExpirationModeType(fsName); + prop.setExpirationMode(expirationMode); + log.debug("VFS({}).Properties.ExpirationMode = '{}'", fsName, expirationMode); + + String retentionPolicy = parserUtil.getRetentionPolicyType(fsName); + prop.setRetentionPolicy(retentionPolicy); + log.debug("VFS({}).Properties.RetentionPolicy = '{}'", fsName, retentionPolicy); + + String unitType = parserUtil.getNearlineSpaceUnitType(fsName); + long nearLineSize = parserUtil.getNearlineSpaceSize(fsName); + prop.setTotalNearlineSize(unitType, nearLineSize); + log.debug("VFS({}).Properties.NearlineSpaceSize = '{} {}'", fsName, nearLineSize, unitType); + + unitType = parserUtil.getOnlineSpaceUnitType(fsName); + long onlineSize = parserUtil.getOnlineSpaceSize(fsName); + prop.setTotalOnlineSize(unitType, onlineSize); + log.debug("VFS({}).Properties.OnlineSpaceSize = '{} {}'", fsName, onlineSize, unitType); + + boolean hasLimitedSize = parserUtil.getOnlineSpaceLimitedSize(fsName); + prop.setLimitedSize(hasLimitedSize); + log.debug("VFS({}).Properties.OnlineSpaceLimitedSize = '{}'", fsName, hasLimitedSize); + + return prop; + } + + // ******************* CAPABILITY *************************** + + private Capability buildCapabilities(String fsName) throws NamespaceException { + + /** ACL MODE ELEMENT */ + ACLMode aclMode = ACLMode.makeFromString(parserUtil.getACLMode(fsName)); + Capability cap = new Capability(aclMode); + log.debug("VFS({}).Capabilities.aclMode = '{}'", fsName, aclMode); + + /** DEFAULT ACL */ + boolean defaultACLDefined = parserUtil.getDefaultACLDefined(fsName); + log.debug("VFS({}).Capabilities.defaultACL [Defined?] = {}", fsName, defaultACLDefined); + if (defaultACLDefined) { + int nrACLEntries = parserUtil.getNumberOfACL(fsName); + String groupName = null; + String filePermString = null; + ACLEntry aclEntry = null; + for (int entryNumber = 0; entryNumber < nrACLEntries; entryNumber++) { + groupName = parserUtil.getGroupName(fsName, entryNumber); + filePermString = parserUtil.getPermissionString(fsName, entryNumber); + try { + aclEntry = new ACLEntry(groupName, filePermString); + cap.addACLEntry(aclEntry); + } catch (PermissionException permEx) { + log.error("Namespace XML Parser -- ERROR -- : {}", permEx.getMessage()); + } + } + log.debug("VFS({}).Capabilities.defaultACL = {}", fsName, cap.getDefaultACL()); + } + + /** QUOTA ELEMENT */ + boolean quotaDefined = parserUtil.getQuotaDefined(fsName); + Quota quota = null; + if (quotaDefined) { + boolean quotaEnabled = parserUtil.getQuotaEnabled(fsName); + String device = parserUtil.getQuotaDevice(fsName); + + QuotaType quotaType; + String quotaValue = null; + + if (parserUtil.getQuotaFilesetDefined(fsName)) { + quotaType = QuotaType.buildQuotaType(QuotaType.FILESET); + quotaValue = parserUtil.getQuotaFileset(fsName); + } else { + if (parserUtil.getQuotaGroupIDDefined(fsName)) { + quotaType = QuotaType.buildQuotaType(QuotaType.GRP); + quotaValue = parserUtil.getQuotaGroupID(fsName); + } else { + if (parserUtil.getQuotaUserIDDefined(fsName)) { + quotaType = QuotaType.buildQuotaType(QuotaType.USR); + quotaValue = parserUtil.getQuotaUserID(fsName); + } else { + quotaType = QuotaType.buildQuotaType(QuotaType.UNKNOWN); + quotaValue = "unknown"; + } + } + } + + quotaType.setValue(quotaValue); + quota = new Quota(quotaEnabled, device, quotaType); + + } else { + quota = new Quota(); + } + cap.setQuota(quota); + + log.debug("VFS({}).Capabilities.quota = '{}'", fsName, quota); + + /** TRANSFER PROTOCOL */ + int nrProtocols = parserUtil.getNumberOfProt(fsName); + for (int protCounter = 0; protCounter < nrProtocols; protCounter++) { + int protocolIndex = parserUtil.getProtId(fsName, protCounter); + String name = parserUtil.getProtName(fsName, protCounter); + String schema = parserUtil.getProtSchema(fsName, protCounter); + Protocol protocol = Protocol.getProtocol(schema); + protocol.setProtocolServiceName(name); + String serviceHostName = parserUtil.getProtHost(fsName, protCounter); + String servicePortValue = parserUtil.getProtPort(fsName, protCounter); + int portIntValue = -1; + Authority service = null; + if (servicePortValue != null) { + try { + portIntValue = Integer.parseInt(servicePortValue); + service = new Authority(serviceHostName, portIntValue); + } catch (NumberFormatException nfe) { + log.warn("to evaluate the environmental variable " + servicePortValue); + } + } else { + service = new Authority(serviceHostName); + } + TransportProtocol transportProt = new TransportProtocol(protocol, service); + transportProt.setProtocolID(protocolIndex); + log.debug("VFS({}).Capabilities.protocol({}) = '{}'", fsName, protCounter, transportProt); + cap.addTransportProtocolByScheme(protocol, transportProt); + cap.addTransportProtocol(transportProt); + if (protocolIndex != -1) { + cap.addTransportProtocolByID(protocolIndex, transportProt); + } + } + + /** PROTOCOL POOL */ + int nrPools = parserUtil.getNumberOfPool(fsName); + if (nrPools > 0) { + + for (int poolCounter = 0; poolCounter < nrPools; poolCounter++) { + BalancingStrategyType balanceStrategy = + BalancingStrategyType.getByValue(parserUtil.getBalancerStrategy(fsName, poolCounter)); + List poolMembers = Lists.newArrayList(); + int nrMembers = parserUtil.getNumberOfPoolMembers(fsName, poolCounter); + for (int i = 0; i < nrMembers; i++) { + int protIndex = parserUtil.getMemberID(fsName, poolCounter, i); + TransportProtocol tProtMember = cap.getProtocolByID(protIndex); + if (tProtMember != null) { + PoolMember poolMember; + if (balanceStrategy.requireWeight()) { + int memberWeight = parserUtil.getMemberWeight(fsName, poolCounter, i); + poolMember = new PoolMember(protIndex, tProtMember, memberWeight); + } else { + poolMember = new PoolMember(protIndex, tProtMember); + } + poolMembers.add(poolMember); + } else { // member pointed out doesn't exist!! + String errorMessage = + String.format( + "POOL Building: Protocol with index %d does not exists in the VFS : %s", + protIndex, fsName); + log.error(errorMessage); + throw new NamespaceException(errorMessage); + } + } + verifyPoolIsValid(poolMembers); + Protocol poolProtocol = poolMembers.get(0).getMemberProtocol().getProtocol(); + log.debug("Defined pool for protocol {} with size {}", poolProtocol, poolMembers.size()); + ProtocolPool pool = new ProtocolPool(balanceStrategy, poolMembers); + cap.addProtocolPool(pool); + cap.addProtocolPoolBySchema(poolProtocol, pool); + log.debug("PROTOCOL POOL: {}", cap.getPoolByScheme(poolProtocol)); + } + } else { + log.debug("Pool is not defined in VFS {}", fsName); + } + + return cap; + } + + /** + * @param poolMembers + * @throws NamespaceException + */ + private void verifyPoolIsValid(List poolMembers) throws NamespaceException { + + if (poolMembers.isEmpty()) { + throw new NamespaceException("POOL Defined is EMPTY!"); + } + Protocol prot = poolMembers.get(0).getMemberProtocol().getProtocol(); + for (PoolMember member : poolMembers) { + if (!(member.getMemberProtocol().getProtocol().equals(prot))) { + throw new NamespaceException( + "Defined Pool is NOT HOMOGENEOUS! Protocols " + + prot.toString() + + " and " + + member.toString() + + " differs"); + } + } + } + + // ******************* DEFAULT VALUES *************************** + + private DefaultValuesInterface buildDefaultValues(String fsName) throws NamespaceException { + + DefaultValues def = new DefaultValues(); + if (parserUtil.isDefaultElementPresent(fsName)) { + setSpaceDef(fsName, def); + setFileDef(fsName, def); + } else { // Produce Default Values with default values :o ! + log.debug("VFS({}).DefaultValues is ABSENT. Using DEFAULT values.", fsName); + } + return def; + } + + private void setSpaceDef(String fsName, DefaultValues def) throws NamespaceException { + + String spaceType = parserUtil.getDefaultSpaceType(fsName); + log.debug("VFS({}).DefaultValues.space.type = '{}'", fsName, spaceType); + long lifeTime = parserUtil.getDefaultSpaceLifeTime(fsName); + log.debug("VFS({}).DefaultValues.space.lifeTime = ''", fsName, lifeTime); + long guarSize = parserUtil.getDefaultSpaceGuarSize(fsName); + log.debug("VFS({}).DefaultValues.space.guarSize = '{}'", fsName, guarSize); + long totSize = parserUtil.getDefaultSpaceTotSize(fsName); + log.debug("VFS({}).DefaultValues.space.totSize = '{}'", fsName, totSize); + def.setSpaceDefaults(spaceType, lifeTime, guarSize, totSize); + } + + private void setFileDef(String fsName, DefaultValues def) throws NamespaceException { + + String fileType = parserUtil.getDefaultFileType(fsName); + log.debug("VFS({}).DefaultValues.file.type = '{}'", fsName, fileType); + long lifeTime = parserUtil.getDefaultFileLifeTime(fsName); + log.debug("VFS({}).DefaultValues.file.lifeTime = '{}'", fsName, lifeTime); + def.setFileDefaults(fileType, lifeTime); + } + + // ******************* MAPPING RULE *************************** + + private void buildMapRules() throws NamespaceException { + + int numOfMapRules = parserUtil.getNumberOfMappingRule(); + String ruleName; + String stfnRoot; + String mappedFS; + MappingRule mapRule; + + for (int i = 0; i < numOfMapRules; i++) { + ruleName = parserUtil.getMapRuleName(i); + mappedFS = parserUtil.getMapRule_mappedFS(ruleName); + // Adding mapping rule to VFS within vfss; + if (vfss.containsKey(mappedFS)) { + log.debug("VFS '{}' pointed by RULE : '{}' exists.", mappedFS, ruleName); + stfnRoot = parserUtil.getMapRule_StFNRoot(ruleName); + VirtualFS vfs = vfss.get(mappedFS); + mapRule = new MappingRule(ruleName, stfnRoot, vfs); + ((VirtualFS) vfs).addMappingRule(mapRule); + maprules.put(ruleName, mapRule); + } else { + log.error("VFS '{}' pointed by RULE : '{}' DOES NOT EXISTS.", mappedFS, ruleName); + } + } + } + + // ******************* APPROACHABLE RULE *************************** + + private void buildAppRules() throws NamespaceException { + + int numOfAppRules = parserUtil.getNumberOfApproachRule(); + + String ruleName; + String dn; + String vo_name; + String relPath; + String anonymousHttpReadString; + List appFSList; + ApproachableRule appRule; + + log.debug("Number of APP Rule : {}", numOfAppRules); + + for (int i = 0; i < numOfAppRules; i++) { + ruleName = parserUtil.getApproachRuleName(i); + log.debug(" APP rule nr: {} is named : {}", i, ruleName); + + dn = parserUtil.getAppRule_SubjectDN(ruleName); + vo_name = parserUtil.getAppRule_SubjectVO(ruleName); + SubjectRules subjectRules = new SubjectRules(dn, vo_name); + + relPath = parserUtil.getAppRule_RelativePath(ruleName); + + anonymousHttpReadString = parserUtil.getAppRule_AnonymousHttpRead(ruleName); + if (anonymousHttpReadString != null && !anonymousHttpReadString.trim().isEmpty()) { + appRule = + new ApproachableRule( + ruleName, subjectRules, relPath, Boolean.parseBoolean(anonymousHttpReadString)); + } else { + appRule = new ApproachableRule(ruleName, subjectRules, relPath); + } + + appFSList = parserUtil.getAppRule_AppFS(ruleName); + for (String appFS : appFSList) { + if (vfss.containsKey(appFS)) { + log.debug("VFS '{}' pointed by RULE : '{}' exists.", appFS, ruleName); + VirtualFS vfs = vfss.get(appFS); + ((VirtualFS) vfs).addApproachableRule(appRule); + appRule.addApproachableVFS(vfs); + } else { + log.error("VFS '{}' pointed by RULE : '{}' DOES NOT EXISTS.", appFS, ruleName); + } + } + apprules.put(ruleName, appRule); + } + } + + /** + * *************************************************************************** BUSINESS METHODs + * ************************************************************************** + */ + public String getNamespaceVersion() { + + return version; + } + + public List getAllVFS_Roots() { + + Collection elem = vfss.values(); + List roots = new ArrayList<>(vfss.size()); + Iterator scan = elem.iterator(); + while (scan.hasNext()) { + String root = null; + root = scan.next().getRootPath(); + roots.add(root); + } + return roots; + } + + public Map getMapVFS_Root() { + + Map result = new HashMap<>(); + Collection elem = vfss.values(); + Iterator scan = elem.iterator(); + while (scan.hasNext()) { + String root = null; + VirtualFS vfs = scan.next(); + root = vfs.getRootPath(); + result.put(root, vfs); + } + return result; + } + + public List getAllMappingRule_StFNRoots() { + + Collection elem = maprules.values(); + List roots = new ArrayList<>(maprules.size()); + Iterator scan = elem.iterator(); + String root = null; + while (scan.hasNext()) { + root = scan.next().getStFNRoot(); + roots.add(root); + } + return roots; + } + + public Map getMappingRuleMAP() { + + Map map = new HashMap<>(); + Collection elem = maprules.values(); + Iterator scan = elem.iterator(); + String root = null; + String name = null; + MappingRule rule; + while (scan.hasNext()) { + rule = scan.next(); + root = rule.getStFNRoot(); + name = rule.getRuleName(); + map.put(name, root); + } + return map; + } + + public VirtualFS getVFS(String vfsName) { + + return vfss.get(vfsName); + } } diff --git a/src/main/java/it/grid/storm/namespace/config/xml/XMLParserUtil.java b/src/main/java/it/grid/storm/namespace/config/xml/XMLParserUtil.java index e2552234..65f5d229 100644 --- a/src/main/java/it/grid/storm/namespace/config/xml/XMLParserUtil.java +++ b/src/main/java/it/grid/storm/namespace/config/xml/XMLParserUtil.java @@ -1,1199 +1,1105 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config.xml; +import com.google.common.collect.Lists; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.SAAuthzType; - import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; - import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConversionException; import org.apache.commons.configuration.HierarchicalConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ public class XMLParserUtil implements XMLConst { - private final HierarchicalConfiguration configuration; - private final Logger log = LoggerFactory.getLogger(XMLParserUtil.class); - - public XMLParserUtil(Configuration config) { - - configuration = (HierarchicalConfiguration) config; - } - - /***************************************************************************** - * GENERICS METHODS - */ - - public boolean validateXML() { - - return true; - } - - public boolean areThereSustitutionCharInside(String element) { - - boolean result = false; - result = (element.indexOf(XMLConst.PROT_SUB_PATTERN) != -1) - || (element.indexOf(XMLConst.FS_SUB_PATTERN) != -1) - || (element.indexOf(XMLConst.APPRULE_SUB_PATTERN) != -1) - || (element.indexOf(XMLConst.MAP_SUB_PATTERN) != -1) - || (element.indexOf(XMLConst.ACL_ENTRY_SUB_PATTERN) != -1); - return result; - } - - public char whicSubstitutionChar(String element) { - - if (element.indexOf(XMLConst.PROT_SUB_PATTERN) != -1) { - return XMLConst.PROT_SUB_PATTERN; - } else if (element.indexOf(XMLConst.FS_SUB_PATTERN) != -1) { - return XMLConst.FS_SUB_PATTERN; - } else if (element.indexOf(XMLConst.APPRULE_SUB_PATTERN) != -1) { - return APPRULE_SUB_PATTERN; - } else if (element.indexOf(XMLConst.MAP_SUB_PATTERN) != -1) { - return XMLConst.MAP_SUB_PATTERN; - } else if (element.indexOf(XMLConst.ACL_ENTRY_SUB_PATTERN) != -1) { - return XMLConst.ACL_ENTRY_SUB_PATTERN; - } else if (element.indexOf(XMLConst.MEMBER_SUB_PATTERN) != -1) { - return XMLConst.MEMBER_SUB_PATTERN; - } - return ' '; - } - - /***************************************************************************** - * FILESYSTEMS METHODS - */ - public String getNamespaceVersion() throws NamespaceException { - - String result = null; - result = getStringProperty(XMLConst.NAMESPACE_VERSION); - return result; - } - - public String getFSSpaceTokenDescription(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FS_SPACE_TOKEN_DESCRIPTION)); - return result; - } - - /** - * public String getAuthorizationSource(String nameOfFS) throws - * NamespaceException { int numOfFS = retrieveNumberByName(nameOfFS, - * XMLConst.FS_BY_NAME); String result = null; //Optional element if - * (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.FS_AUTHZ))) { - * result = getStringProperty(substituteNumberInFSElement(numOfFS, - * XMLConst.FS_AUTHZ)); } else { //Default value needed. result = - * XMLConst.DEFAULT_AUTHZ_SOURCE; - * log.debug("AuthZ source for VFS(+'"+nameOfFS+ - * "') is absent. Default value ('"+result+"') will be used."); } return - * result; } - **/ - - /** - * public boolean getQuotaCheck(String nameOfFS) throws NamespaceException { - * int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); boolean - * result = false; //Optional element if - * (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_CHECK))) { - * result = getBooleanProperty(substituteNumberInFSElement(numOfFS, - * XMLConst.QUOTA_CHECK)); } else { //Default value needed. result = - * XMLConst.DEFAULT_CHECKING_QUOTA; - * log.debug("Checking quota flag in VFS(+'"+nameOfFS - * +"') is absent. Default value ('"+result+"') will be used."); } return - * result; } - **/ - - public String getRetentionPolicyType(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.RETENTION_POLICY)); - return result; - } - - public String getAccessLatencyType(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.ACCESS_LATENCY)); - return result; - } - - public String getExpirationModeType(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.EXPIRATION_MODE)); - return result; - } - - public String getOnlineSpaceUnitType(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = null; - // Optional element - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.ONLINE_SIZE_UNIT))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.ONLINE_SIZE_UNIT)); - } else { // Default value needed. - result = XMLConst.DEFAULT_UNIT_TYPE; - log.debug("Online Space Unit type for VFS(+'" + nameOfFS - + "') is absent. Default value ('" + result + "') will be used"); - } - return result; - } - - public long getOnlineSpaceSize(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - long result = getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.ONLINE_SIZE)); - return result; - } - - public String getNearlineSpaceUnitType(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = null; - // Optional element - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.NEARLINE_SIZE_UNIT))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.NEARLINE_SIZE_UNIT)); - } else { // Default value needed. - result = XMLConst.DEFAULT_UNIT_TYPE; - log.debug("Online Space Unit type for VFS(+'" + nameOfFS - + "') is absent. Default value ('" + result + "') will be used"); - } - return result; - } - - public long getNearlineSpaceSize(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - long result = getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.NEARLINE_SIZE)); - return result; - } - - public int getNumberOfFS() throws NamespaceException { - - return getPropertyNumber(XMLConst.FS_COUNTING); - } - - public String getFSName(int numOfFS) throws NamespaceException { - - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FILESYSTEM_NAME)); - } - - public int getFSNumber(String nameOfFS) throws NamespaceException { - - return retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - } - - public String getFSType(String nameOfFS) throws NamespaceException { - - // log.debug("-----FSTYPE------START"); - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - // log.debug("-----FSTYPE------END"); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FILESYSTEM_TYPE)); - } - - public String getFSRoot(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FS_ROOT)); - // log.debug("VFS ROOT = "+result); - return result; - } - - public String getFSDriver(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FS_DRIVER)); - } - - public String getSpaceDriver(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FS_SPACE_DRIVER)); - } - - public boolean isDefaultElementPresent(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - // FS_DEFAULTVALUES - result = isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.FS_DEFAULTVALUES)); - return result; - } - - public String getDefaultSpaceType(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_SPACE_TYPE)); - } + private final HierarchicalConfiguration configuration; + private final Logger log = LoggerFactory.getLogger(XMLParserUtil.class); + + public XMLParserUtil(Configuration config) { + + configuration = (HierarchicalConfiguration) config; + } + + /** + * *************************************************************************** GENERICS METHODS + */ + public boolean validateXML() { + + return true; + } + + public boolean areThereSustitutionCharInside(String element) { + + boolean result = false; + result = + (element.indexOf(XMLConst.PROT_SUB_PATTERN) != -1) + || (element.indexOf(XMLConst.FS_SUB_PATTERN) != -1) + || (element.indexOf(XMLConst.APPRULE_SUB_PATTERN) != -1) + || (element.indexOf(XMLConst.MAP_SUB_PATTERN) != -1) + || (element.indexOf(XMLConst.ACL_ENTRY_SUB_PATTERN) != -1); + return result; + } + + public char whicSubstitutionChar(String element) { + + if (element.indexOf(XMLConst.PROT_SUB_PATTERN) != -1) { + return XMLConst.PROT_SUB_PATTERN; + } else if (element.indexOf(XMLConst.FS_SUB_PATTERN) != -1) { + return XMLConst.FS_SUB_PATTERN; + } else if (element.indexOf(XMLConst.APPRULE_SUB_PATTERN) != -1) { + return APPRULE_SUB_PATTERN; + } else if (element.indexOf(XMLConst.MAP_SUB_PATTERN) != -1) { + return XMLConst.MAP_SUB_PATTERN; + } else if (element.indexOf(XMLConst.ACL_ENTRY_SUB_PATTERN) != -1) { + return XMLConst.ACL_ENTRY_SUB_PATTERN; + } else if (element.indexOf(XMLConst.MEMBER_SUB_PATTERN) != -1) { + return XMLConst.MEMBER_SUB_PATTERN; + } + return ' '; + } + + /** + * *************************************************************************** FILESYSTEMS METHODS + */ + public String getNamespaceVersion() throws NamespaceException { + + String result = null; + result = getStringProperty(XMLConst.NAMESPACE_VERSION); + return result; + } + + public String getFSSpaceTokenDescription(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = + getStringProperty( + substituteNumberInFSElement(numOfFS, XMLConst.FS_SPACE_TOKEN_DESCRIPTION)); + return result; + } + + /** + * public String getAuthorizationSource(String nameOfFS) throws NamespaceException { int numOfFS = + * retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); String result = null; //Optional element + * if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.FS_AUTHZ))) { result = + * getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FS_AUTHZ)); } else { //Default + * value needed. result = XMLConst.DEFAULT_AUTHZ_SOURCE; log.debug("AuthZ source for + * VFS(+'"+nameOfFS+ "') is absent. Default value ('"+result+"') will be used."); } return result; + * } + */ + + /** + * public boolean getQuotaCheck(String nameOfFS) throws NamespaceException { int numOfFS = + * retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); boolean result = false; //Optional element + * if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_CHECK))) { result = + * getBooleanProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_CHECK)); } else { + * //Default value needed. result = XMLConst.DEFAULT_CHECKING_QUOTA; log.debug("Checking quota + * flag in VFS(+'"+nameOfFS +"') is absent. Default value ('"+result+"') will be used."); } return + * result; } + */ + public String getRetentionPolicyType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = + getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.RETENTION_POLICY)); + return result; + } + + public String getAccessLatencyType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = + getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.ACCESS_LATENCY)); + return result; + } + + public String getExpirationModeType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = + getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.EXPIRATION_MODE)); + return result; + } + + public String getOnlineSpaceUnitType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = null; + // Optional element + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.ONLINE_SIZE_UNIT))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.ONLINE_SIZE_UNIT)); + } else { // Default value needed. + result = XMLConst.DEFAULT_UNIT_TYPE; + log.debug( + "Online Space Unit type for VFS(+'" + + nameOfFS + + "') is absent. Default value ('" + + result + + "') will be used"); + } + return result; + } + + public long getOnlineSpaceSize(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + long result = getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.ONLINE_SIZE)); + return result; + } + + public String getNearlineSpaceUnitType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = null; + // Optional element + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.NEARLINE_SIZE_UNIT))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.NEARLINE_SIZE_UNIT)); + } else { // Default value needed. + result = XMLConst.DEFAULT_UNIT_TYPE; + log.debug( + "Online Space Unit type for VFS(+'" + + nameOfFS + + "') is absent. Default value ('" + + result + + "') will be used"); + } + return result; + } + + public long getNearlineSpaceSize(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + long result = getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.NEARLINE_SIZE)); + return result; + } + + public int getNumberOfFS() throws NamespaceException { + + return getPropertyNumber(XMLConst.FS_COUNTING); + } + + public String getFSName(int numOfFS) throws NamespaceException { + + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FILESYSTEM_NAME)); + } + + public int getFSNumber(String nameOfFS) throws NamespaceException { + + return retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + } + + public String getFSType(String nameOfFS) throws NamespaceException { + + // log.debug("-----FSTYPE------START"); + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + // log.debug("-----FSTYPE------END"); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FILESYSTEM_TYPE)); + } + + public String getFSRoot(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FS_ROOT)); + // log.debug("VFS ROOT = "+result); + return result; + } + + public String getFSDriver(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FS_DRIVER)); + } + + public String getSpaceDriver(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FS_SPACE_DRIVER)); + } + + public boolean isDefaultElementPresent(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + // FS_DEFAULTVALUES + result = isPresent(substituteNumberInFSElement(numOfFS, XMLConst.FS_DEFAULTVALUES)); + return result; + } + + public String getDefaultSpaceType(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_SPACE_TYPE)); + } + + public long getDefaultSpaceLifeTime(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_SPACE_LT)); + } + + public long getDefaultSpaceGuarSize(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_SPACE_GUARSIZE)); + } + + public long getDefaultSpaceTotSize(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_SPACE_TOTSIZE)); + } + + public String getDefaultFileType(String nameOfFS) throws NamespaceException { - public long getDefaultSpaceLifeTime(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_SPACE_LT)); - } + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_FILE_TYPE)); + } - public long getDefaultSpaceGuarSize(String nameOfFS) - throws NamespaceException { + public long getDefaultFileLifeTime(String nameOfFS) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_SPACE_GUARSIZE)); - } + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getLongProperty(substituteNumberInFSElement(numOfFS, XMLConst.DEF_FILE_LT)); + } - public long getDefaultSpaceTotSize(String nameOfFS) throws NamespaceException { + public String getACLMode(String nameOfFS) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_SPACE_TOTSIZE)); - } + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.ACL_MODE)); + } - public String getDefaultFileType(String nameOfFS) throws NamespaceException { + public int getNumberOfProt(String nameOfFS) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_FILE_TYPE)); - } + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (numOfFS == -1) { + throw new NamespaceException("FS named '" + nameOfFS + "' does not exist in config"); + } + String protCount = + substitutionNumber(XMLConst.PROTOCOL_COUNTING, XMLConst.FS_SUB_PATTERN, numOfFS); + // log.debug( configuration.getString(protCount)); + return getPropertyNumber(protCount); + } - public long getDefaultFileLifeTime(String nameOfFS) throws NamespaceException { + public String getProtName(String nameOfFS, int numOfProt) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getLongProperty(substituteNumberInFSElement(numOfFS, - XMLConst.DEF_FILE_LT)); - } + return getStringProperty( + substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROTOCOL_NAME)); + } - public String getACLMode(String nameOfFS) throws NamespaceException { + public int getProtNumberByName(String nameOfFS, String nameOfProt) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.ACL_MODE)); - } + int numFS = getFSNumber(nameOfFS); + String collElem = substituteNumberInFSElement(numFS, XMLConst.PROTOCOL_BY_NAME); + // log.debug("COLLECTION = "+collElem); + return retrieveNumberByName(nameOfProt, collElem); + } - public int getNumberOfProt(String nameOfFS) throws NamespaceException { + public String getProtSchema(String nameOfFS, int numOfProt) throws NamespaceException { - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (numOfFS == -1) { - throw new NamespaceException("FS named '" + nameOfFS - + "' does not exist in config"); - } - String protCount = substitutionNumber(XMLConst.PROTOCOL_COUNTING, - XMLConst.FS_SUB_PATTERN, numOfFS); - // log.debug( configuration.getString(protCount)); - return getPropertyNumber(protCount); - } + return getStringProperty( + substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROT_SCHEMA)); + } - public String getProtName(String nameOfFS, int numOfProt) - throws NamespaceException { + public String getProtHost(String nameOfFS, int numOfProt) throws NamespaceException { - return getStringProperty(substituteNumberInProtocolElement(nameOfFS, - numOfProt, XMLConst.PROTOCOL_NAME)); - } + return getStringProperty( + substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROT_HOST)); + } - public int getProtNumberByName(String nameOfFS, String nameOfProt) - throws NamespaceException { + public String getProtPort(String nameOfFS, int numOfProt) throws NamespaceException { - int numFS = getFSNumber(nameOfFS); - String collElem = substituteNumberInFSElement(numFS, - XMLConst.PROTOCOL_BY_NAME); - // log.debug("COLLECTION = "+collElem); - return retrieveNumberByName(nameOfProt, collElem); - } + return getStringProperty( + substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROT_PORT)); + } - public String getProtSchema(String nameOfFS, int numOfProt) - throws NamespaceException { + /** + * *************************************************************************** MAPPING RULES + * METHODS + */ + public int getNumberOfMappingRule() throws NamespaceException { - return getStringProperty(substituteNumberInProtocolElement(nameOfFS, - numOfProt, XMLConst.PROT_SCHEMA)); - } + return getPropertyNumber(XMLConst.MAP_RULE_COUNTING); + } - public String getProtHost(String nameOfFS, int numOfProt) - throws NamespaceException { + public String getMapRuleName(int numOfMapRule) throws NamespaceException { - return getStringProperty(substituteNumberInProtocolElement(nameOfFS, - numOfProt, XMLConst.PROT_HOST)); - } + return getStringProperty(substituteNumberInMAPElement(numOfMapRule, XMLConst.MAP_RULE_NAME)); + } - public String getProtPort(String nameOfFS, int numOfProt) - throws NamespaceException { + public String getMapRule_StFNRoot(String nameOfMapRule) throws NamespaceException { - return getStringProperty(substituteNumberInProtocolElement(nameOfFS, - numOfProt, XMLConst.PROT_PORT)); - } + int numOfMapRule = retrieveNumberByName(nameOfMapRule, XMLConst.MAP_RULE_BY_NAME); + return getStringProperty( + substituteNumberInMAPElement(numOfMapRule, XMLConst.MAP_RULE_STFNROOT)); + } - /***************************************************************************** - * MAPPING RULES METHODS - */ + public String getMapRule_mappedFS(String nameOfMapRule) throws NamespaceException { - public int getNumberOfMappingRule() throws NamespaceException { + int numOfMapRule = retrieveNumberByName(nameOfMapRule, XMLConst.MAP_RULE_BY_NAME); + return getStringProperty( + substituteNumberInMAPElement(numOfMapRule, XMLConst.MAP_RULE_MAPPED_FS)); + } - return getPropertyNumber(XMLConst.MAP_RULE_COUNTING); - } + /** + * *************************************************************************** APPROACHING METHODS + */ + public int getNumberOfApproachRule() throws NamespaceException { - public String getMapRuleName(int numOfMapRule) throws NamespaceException { + return getPropertyNumber(XMLConst.APP_RULE_COUNTING); + } - return getStringProperty(substituteNumberInMAPElement(numOfMapRule, - XMLConst.MAP_RULE_NAME)); - } + public String getApproachRuleName(int numOfAppRule) throws NamespaceException { - public String getMapRule_StFNRoot(String nameOfMapRule) - throws NamespaceException { + return getStringProperty(substituteNumberInAPPElement(numOfAppRule, XMLConst.APP_RULE_NAME)); + } - int numOfMapRule = retrieveNumberByName(nameOfMapRule, - XMLConst.MAP_RULE_BY_NAME); - return getStringProperty(substituteNumberInMAPElement(numOfMapRule, - XMLConst.MAP_RULE_STFNROOT)); - } - - public String getMapRule_mappedFS(String nameOfMapRule) - throws NamespaceException { + public String getAppRule_SubjectDN(String nameOfAppRule) throws NamespaceException { - int numOfMapRule = retrieveNumberByName(nameOfMapRule, - XMLConst.MAP_RULE_BY_NAME); - return getStringProperty(substituteNumberInMAPElement(numOfMapRule, - XMLConst.MAP_RULE_MAPPED_FS)); - } - - /***************************************************************************** - * APPROACHING METHODS - */ - - public int getNumberOfApproachRule() throws NamespaceException { - - return getPropertyNumber(XMLConst.APP_RULE_COUNTING); - } - - public String getApproachRuleName(int numOfAppRule) throws NamespaceException { - - return getStringProperty(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APP_RULE_NAME)); - } - - public String getAppRule_SubjectDN(String nameOfAppRule) - throws NamespaceException { - - int numOfAppRule = retrieveNumberByName(nameOfAppRule, - XMLConst.APP_RULE_BY_NAME); - return getStringProperty(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APP_DN)); - } - - public String getAppRule_SubjectVO(String nameOfAppRule) - throws NamespaceException { - - int numOfAppRule = retrieveNumberByName(nameOfAppRule, - XMLConst.APP_RULE_BY_NAME); - return getStringProperty(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APP_VO_NAME)); - } - - public List getAppRule_AppFS(String nameOfAppRule) throws NamespaceException { - - int numOfAppRule = retrieveNumberByName(nameOfAppRule, - XMLConst.APP_RULE_BY_NAME); - return getListValue(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APPROACHABLE_FS)); - } - - public String getAppRule_RelativePath(String nameOfAppRule) - throws NamespaceException { - - int numOfAppRule = retrieveNumberByName(nameOfAppRule, - XMLConst.APP_RULE_BY_NAME); - return getStringProperty(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APP_SPACE_REL_PATH)); - } - - public String getAppRule_AnonymousHttpRead(String nameOfAppRule) - throws NamespaceException { - - int numOfAppRule = retrieveNumberByName(nameOfAppRule, - XMLConst.APP_RULE_BY_NAME); - return getStringProperty(substituteNumberInAPPElement(numOfAppRule, - XMLConst.APP_ANONYMOUS_HTTP_READ)); - } - - /***************************************************************************** - * QUOTA METHODS - */ - - public boolean getQuotaDefined(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_ENABLED))) { - result = true; - } - return result; - } - - public boolean getQuotaEnabled(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - result = getBooleanProperty(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_ENABLED)); - return result; - } - - public boolean getQuotaDeviceDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_DEVICE))) { - result = true; - } - return result; - } - - public String getQuotaDevice(String nameOfFS) throws NamespaceException { - - String result = null; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_DEVICE))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_DEVICE)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.QUOTA_DEVICE + "' for the VFS:'" + nameOfFS + "'"); - } - return result; - } - - public boolean getQuotaFilesetDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_FILE_SET_NAME))) { - result = true; - } - return result; - } - - public String getQuotaFileset(String nameOfFS) throws NamespaceException { - - String result = null; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_FILE_SET_NAME))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_FILE_SET_NAME)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.QUOTA_FILE_SET_NAME + "' for the VFS:'" + nameOfFS + "'"); - } - return result; - } - - public boolean getQuotaGroupIDDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_GROUP_NAME))) { - result = true; - } - return result; - } - - public String getQuotaGroupID(String nameOfFS) throws NamespaceException { - - String result = null; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_GROUP_NAME))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_GROUP_NAME)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.QUOTA_GROUP_NAME + "' for the VFS:'" + nameOfFS + "'"); - } - return result; - } - - public boolean getQuotaUserIDDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_USER_NAME))) { - result = true; - } - return result; - } - - public String getQuotaUserID(String nameOfFS) throws NamespaceException { - - String result = null; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_USER_NAME))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.QUOTA_USER_NAME)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.QUOTA_USER_NAME + "' for the VFS:'" + nameOfFS + "'"); - } - return result; - } - - /***************************************************************************** - * STORAGE CLASS METHODs - */ - public String getStorageClass(String nameOfFS) throws NamespaceException { - - String result = XMLConst.DEFAULT_STORAGE_CLASS; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.FS_STORAGE_CLASS))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.FS_STORAGE_CLASS)); - } else { - log.debug("Storage Class for VFS(+'" + nameOfFS - + "') is absent. Default value ('" + result + "') will be used."); - } - return result; - } - - /***************************************************************************** - * PRIVATE METHOD - *****************************************************************************/ - private String substitutionNumber(String xpath, char patternChar, int number) { - - int startIndex = 0; - int pos = 0; - StringBuilder result = new StringBuilder(); - pos = xpath.indexOf(patternChar, startIndex); - String numStr = Integer.toString(number); - result.append(xpath.substring(startIndex, pos)); - result.append(numStr); - result.append(xpath.substring(pos + 1)); - return result.toString(); - } - - private String substituteNumberInFSElement(int numberOfFS, String element) - throws NamespaceException { - - int numFS = getNumberOfFS(); - if (numberOfFS > numFS) { - throw new NamespaceException("Invalid pointing of Virtual File system"); - } - String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, - numberOfFS); - return new_element; - } - - private String substituteNumberInACLEntryElement(String nameOfFS, - int numberOfACLEntry, String element) throws NamespaceException { - - int numFS = getFSNumber(nameOfFS); - if (numFS == -1) { - throw new NamespaceException("Virtual File system (" + nameOfFS - + ") does not exists"); - } - int numACL = getNumberOfACL(nameOfFS); - if (numberOfACLEntry > numACL) { - throw new NamespaceException("Invalid pointing of ACL Entry within VFS"); - } - String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, - numFS); - new_element = substitutionNumber(new_element, - XMLConst.ACL_ENTRY_SUB_PATTERN, numberOfACLEntry); - return new_element; - } - - private String substituteNumberInProtocolElement(String nameOfFS, - int numberOfProtocol, String element) throws NamespaceException { - - int numFS = getFSNumber(nameOfFS); - if (numFS == -1) { - throw new NamespaceException("Virtual File system (" + nameOfFS - + ") does not exists"); - } - int numProt = getNumberOfProt(nameOfFS); - if (numberOfProtocol > numProt) { - throw new NamespaceException("Invalid pointing of Protocol within VFS"); - } - String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, - numFS); - new_element = substitutionNumber(new_element, XMLConst.PROT_SUB_PATTERN, - numberOfProtocol); - return new_element; - } - - private String substituteNumberInPoolElement(String nameOfFS, - int numberOfPool, String element) throws NamespaceException { - - int numFS = getFSNumber(nameOfFS); - if (numFS == -1) { - throw new NamespaceException("Virtual File system (" + nameOfFS - + ") does not exists"); - } - int numPool = getNumberOfPool(nameOfFS); - if (numberOfPool > numPool) { - throw new NamespaceException("Invalid pointing of Pool within VFS"); - } - String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, - numFS); - new_element = substitutionNumber(new_element, XMLConst.POOL_SUB_PATTERN, - numberOfPool); - return new_element; - } - - private String substituteNumberInMembersElement(String nameOfFS, - int numOfPool, int numberOfMember, String element) - throws NamespaceException { - - int numFS = getFSNumber(nameOfFS); - if (numFS == -1) { - throw new NamespaceException("Virtual File system (" + nameOfFS - + ") does not exists"); - } - int numMembers = getNumberOfPoolMembers(nameOfFS, numOfPool); - if (numberOfMember > numMembers) { - throw new NamespaceException("Invalid pointing of Member within VFS"); - } - String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, - numFS); - new_element = substitutionNumber(new_element, XMLConst.POOL_SUB_PATTERN, - numOfPool); - new_element = substitutionNumber(new_element, XMLConst.MEMBER_SUB_PATTERN, - numberOfMember); - return new_element; - } - - private String substituteNumberInMAPElement(int numberOfMapRule, - String element) throws NamespaceException { - - int numMapRule = getNumberOfMappingRule(); - - if (numberOfMapRule > numMapRule) { - throw new NamespaceException("Invalid pointing of Mapping Rule"); - } - String new_element = substitutionNumber(element, XMLConst.MAP_SUB_PATTERN, - numberOfMapRule); - return new_element; - } - - private String substituteNumberInAPPElement(int numberOfAppRule, - String element) throws NamespaceException { - - int numAppRule = getNumberOfApproachRule(); - if (numberOfAppRule > numAppRule) { - throw new NamespaceException("Invalid pointing of Approachable Rule"); - } - String new_element = substitutionNumber(element, - XMLConst.APPRULE_SUB_PATTERN, numberOfAppRule); - return new_element; - } - - private int retrieveNumberByName(String name, String collectionElement, - boolean logging) { - - int result = -1; - int size = -1; - // log.debug(" NAME : "+name+" | Collection Element :"+collectionElement); - List prop = configuration.getList(collectionElement); - if (prop != null) { - size = prop.size(); - // log.debug("Size = "+size); - if (logging) { - for (int i = 0; i < size; i++) { - log.debug(prop.get(i).toString()); - } - } - result = prop.indexOf(name); - } else { - log.warn("[retrieveNumberByName_3] Element <" + collectionElement - + "> does not exists in namespace configuration file"); - } - return result; - } - - private int retrieveNumberByName(String name, String collectionElement) { - - int result = -1; - int size = -1; - // log.debug(" NAME : "+name+" | Collection Element :"+collectionElement); - List prop = configuration.getList(collectionElement); - if (prop != null) { - size = prop.size(); - result = prop.indexOf(name); - } else { - log.warn("[retrieveNumberByName_2] Element <" + collectionElement - + "> does not exists in namespace configuration file"); - } - return result; - } - - public Iterator getKeys() { - - return configuration.getKeys(); - } - - /** - * - * @param element - * String - * @return int - */ - private int getPropertyNumber(String element) { - - int result = -1; - Object prop = configuration.getProperty(element); - if (prop != null) { - result = 1; // If it is not null its value is atleast '1'! - if (prop instanceof Collection) { - result = ((Collection) prop).size(); - } - } else { - log.warn("[getPropertyNumber] Element <" + element - + "> does not exists in namespace configuration file"); - } - - return result; - } - - private boolean isPresent(String element) { - - boolean result = false; - result = configuration.containsKey(element); - // log.debug("XMLPArserUtil: isPresent('"+element+"')="+result); - return result; - } - - /** - * - * @param element - * String - * @return int - */ - private String getStringProperty(String element) throws NamespaceException { - - String prop = null; - try { - prop = configuration.getString(element); - // log.debug("ELEMENT = "+element+" VALUE = "+prop); - } catch (ConversionException ce) { - log.warn("[getStringProperty] Element <" + element - + "> does not contains a String value"); - } catch (NoSuchElementException note) { - log.warn("[getStringProperty] Element <" + element - + "> does not exists in namespace configuration file"); - } - return prop; - } - - /** - * - * @param element - * String - * @return boolean - */ - private boolean getBooleanProperty(String element) throws NamespaceException { - - boolean result = false; - try { - result = configuration.getBoolean(element); - } catch (ConversionException ce) { - log.warn("[getLongProperty] Element <" + element - + "> does not contains a String value"); - } catch (NoSuchElementException note) { - log.warn("[getLongProperty] Element <" + element - + "> does not exists in namespace configuration file"); - } - return result; - } - - /** - * - * @param element - * String - * @return int - */ - private long getLongProperty(String element) throws NamespaceException { - - long prop = -1L; - try { - prop = configuration.getLong(element); - } catch (ConversionException ce) { - log.warn("[getLongProperty] Element <" + element - + "> does not contains a String value"); - } catch (NoSuchElementException note) { - log.warn("[getLongProperty] Element <" + element - + "> does not exists in namespace configuration file"); - } - return prop; - } - - /** - * - * @param element - * String - * @return int - */ - private int getIntProperty(String element) { - - int prop = -1; - try { - prop = configuration.getInt(element); - } catch (ConversionException ce) { - log.warn("[getIntProperty] Element <" + element - + "> does not contains a String value"); - } catch (NoSuchElementException note) { - log.warn("[getIntProperty] Element <" + element - + "> does not exists in namespace configuration file"); - } - return prop; - } - - /** - * - * @param element - * String - * @return int - */ - private String[] getListProperty(String element) throws NamespaceException { - - String prop = null; - try { - prop = configuration.getString(element); - } catch (ConversionException ce) { - log.warn("[getListProperty] Element <" + element - + "> does not contains a String value"); - } catch (NoSuchElementException note) { - log.warn("[getListProperty] Element <" + element - + "> does not exists in namespace configuration file"); - } - // log.debug("LIST : "+prop); - String[] result = prop.split(","); - // log.debug(" LIST lenght :"+result.length); - return result; - } - - private List getListValue(String collectionElement) { - - List propList = configuration.getList(collectionElement); - List prop = Lists.newArrayList(); - // For a set or list - for (Object element2 : propList) { - String element = (String) element2; - prop.add(element.trim()); - } - - log.debug("LIST - prop : " + prop); - log.debug("Nr. of elements : " + prop.size()); - if (prop.size() == 0) { - log.warn("[retrieveNumberByName_2] Element <" + collectionElement - + "> does not exists in namespace configuration file"); - } - return prop; - } - - public boolean getDefaultACLDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.GROUP_NAME))) { - result = true; - } - return result; - } - - public int getNumberOfACL(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (numOfFS == -1) { - throw new NamespaceException("FS named '" + nameOfFS - + "' does not exist in config"); - } - String aclCount = substitutionNumber(XMLConst.ACL_ENTRY_COUNTING, - XMLConst.FS_SUB_PATTERN, numOfFS); - log.debug("ACL Count = " + aclCount); - return getPropertyNumber(aclCount); - } - - public String getGroupName(String nameOfFS, int aclEntryNumber) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String aclCount = substitutionNumber(XMLConst.GROUP_NAME, - XMLConst.FS_SUB_PATTERN, numOfFS); - String result = null; - Object prop = configuration.getProperty(aclCount); - if (prop != null) { - if (prop instanceof Collection) { - ArrayList propList = new ArrayList((Collection) prop); - if (propList.size() > aclEntryNumber) { - result = propList.get(aclEntryNumber); - } - } else { - if (prop instanceof String) { - result = ((String) prop); - } - } - } else { - log.warn("[getPropertyNumber] Element <" + aclCount - + "> does not exists in namespace configuration file"); - } - return result; - // return getStringProperty(substituteNumberInACLEntryElement(nameOfFS, - // aclEntryNumber, XMLConst.GROUP_NAME)); - } - - public String getPermissionString(String nameOfFS, int aclEntryNumber) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - String aclCount = substitutionNumber(XMLConst.PERMISSIONS, - XMLConst.FS_SUB_PATTERN, numOfFS); - String result = null; - Object prop = configuration.getProperty(aclCount); - if (prop != null) { - if (prop instanceof Collection) { - ArrayList propList = new ArrayList((Collection) prop); - if (propList.size() > aclEntryNumber) { - result = propList.get(aclEntryNumber); - } - } else { - if (prop instanceof String) { - result = ((String) prop); - } - } - } else { - log.warn("[getPropertyNumber] Element <" + aclCount - + "> does not exists in namespace configuration file"); - } - return result; - - // return getStringProperty(substituteNumberInACLEntryElement(nameOfFS, - // aclEntryNumber, XMLConst.PERMISSIONS)); - } - - /** - * ********************************** VERSION 1.4.0 - ***************************************/ - - public String getStorageAreaAuthz(String nameOfFS, SAAuthzType type) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (type.equals(SAAuthzType.FIXED)) { - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.SA_AUTHZ_FIXED)); - } else { - return getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.SA_AUTHZ_DB)); - } - } - - public SAAuthzType getStorageAreaAuthzType(String nameOfFS) - throws NamespaceException { - - if (getStorageAreaAuthzFixedDefined(nameOfFS)) { - return SAAuthzType.FIXED; - } - if (getStorageAreaAuthzDBDefined(nameOfFS)) { - return SAAuthzType.AUTHZDB; - } - throw new NamespaceException("Unable to find the SAAuthzType in " - + nameOfFS); - } - - public boolean getStorageAreaAuthzFixedDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_FIXED))) { - result = true; - } - return result; - } - - public boolean getStorageAreaAuthzDBDefined(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_DB))) { - result = true; - } - return result; - } - - public int getProtId(String nameOfFS, int numOfProt) - throws NamespaceException { - - // int numOfProt = getProtNumberByName(nameOfFS, protName); - String protId = substituteNumberInProtocolElement(nameOfFS, numOfProt, - XMLConst.PROT_ID); - // log.debug("ProtID : "+protId); - if (isPresent(protId)) { - return getIntProperty(substituteNumberInProtocolElement(nameOfFS, - numOfProt, XMLConst.PROT_ID)); - } else { - return -1; - } - } - - public boolean getOnlineSpaceLimitedSize(String nameOfFS) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - result = getBooleanProperty(substituteNumberInFSElement(numOfFS, - XMLConst.LIMITED_SIZE)); - return result; - } - - public int getNumberOfPool(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (numOfFS == -1) { - throw new NamespaceException("FS named '" + nameOfFS - + "' does not exist in config"); - } - if (!getPoolDefined(nameOfFS)) - return 0; - String protCount = substitutionNumber(XMLConst.POOL_COUNTING, - XMLConst.FS_SUB_PATTERN, numOfFS); - return getPropertyNumber(protCount); - } - - public boolean getPoolDefined(String nameOfFS) throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - boolean result = false; - if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.POOL_COUNTING))) { - result = true; - } - return result; - } - - public String getBalancerStrategy(String nameOfFS) throws NamespaceException { - - String result = null; - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (isPresent(substituteNumberInFSElement(numOfFS, - XMLConst.BALANCE_STRATEGY))) { - result = getStringProperty(substituteNumberInFSElement(numOfFS, - XMLConst.BALANCE_STRATEGY)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.BALANCE_STRATEGY + "' for the VFS:'" + nameOfFS + "'"); - } - return result; - } - - public int getNumberOfPoolMembers(String nameOfFS, int poolCounter) - throws NamespaceException { - - int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); - if (numOfFS == -1) { - throw new NamespaceException("FS named '" + nameOfFS - + "' does not exist in config"); - } - String subTree = substituteNumberInPoolElement(nameOfFS, poolCounter, - XMLConst.POOL); - HierarchicalConfiguration sub = configuration.configurationAt(subTree); - Object members = sub.getProperty("members.member[@member-id]"); - int numOfMembers = -1; - if (members != null) { - if (members instanceof Collection) { - numOfMembers = ((Collection) members).size(); - } else { - numOfMembers = 1; - } - } else { - log.error("Error during the retrieve of the number of pool member of " - + nameOfFS); - } - return numOfMembers; - } - - public int getMemberID(String nameOfFS, int numOfPool, int memberNr) - throws NamespaceException { - - return getIntProperty(substituteNumberInMembersElement(nameOfFS, numOfPool, - memberNr, XMLConst.POOL_MEMBER_ID)); - } - - public int getMemberWeight(String nameOfFS, int numOfPool, int memberNr) - throws NamespaceException { - - return getIntProperty(substituteNumberInMembersElement(nameOfFS, numOfPool, - memberNr, XMLConst.POOL_MEMBER_WEIGHT)); - } - - public String getBalancerStrategy(String fsName, int poolCounter) - throws NamespaceException { - - String poolId = substituteNumberInPoolElement(fsName, poolCounter, - XMLConst.BALANCE_STRATEGY); - if (isPresent(poolId)) { - return getStringProperty(substituteNumberInPoolElement(fsName, - poolCounter, XMLConst.BALANCE_STRATEGY)); - } else { - throw new NamespaceException("Unable to find the element '" - + XMLConst.BALANCE_STRATEGY + "' for the VFS:'" + fsName + "'"); - } - } + int numOfAppRule = retrieveNumberByName(nameOfAppRule, XMLConst.APP_RULE_BY_NAME); + return getStringProperty(substituteNumberInAPPElement(numOfAppRule, XMLConst.APP_DN)); + } + + public String getAppRule_SubjectVO(String nameOfAppRule) throws NamespaceException { + int numOfAppRule = retrieveNumberByName(nameOfAppRule, XMLConst.APP_RULE_BY_NAME); + return getStringProperty(substituteNumberInAPPElement(numOfAppRule, XMLConst.APP_VO_NAME)); + } + + public List getAppRule_AppFS(String nameOfAppRule) throws NamespaceException { + + int numOfAppRule = retrieveNumberByName(nameOfAppRule, XMLConst.APP_RULE_BY_NAME); + return getListValue(substituteNumberInAPPElement(numOfAppRule, XMLConst.APPROACHABLE_FS)); + } + + public String getAppRule_RelativePath(String nameOfAppRule) throws NamespaceException { + + int numOfAppRule = retrieveNumberByName(nameOfAppRule, XMLConst.APP_RULE_BY_NAME); + return getStringProperty( + substituteNumberInAPPElement(numOfAppRule, XMLConst.APP_SPACE_REL_PATH)); + } + + public String getAppRule_AnonymousHttpRead(String nameOfAppRule) throws NamespaceException { + + int numOfAppRule = retrieveNumberByName(nameOfAppRule, XMLConst.APP_RULE_BY_NAME); + return getStringProperty( + substituteNumberInAPPElement(numOfAppRule, XMLConst.APP_ANONYMOUS_HTTP_READ)); + } + + /** *************************************************************************** QUOTA METHODS */ + public boolean getQuotaDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_ENABLED))) { + result = true; + } + return result; + } + + public boolean getQuotaEnabled(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + result = getBooleanProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_ENABLED)); + return result; + } + + public boolean getQuotaDeviceDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_DEVICE))) { + result = true; + } + return result; + } + + public String getQuotaDevice(String nameOfFS) throws NamespaceException { + + String result = null; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_DEVICE))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_DEVICE)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.QUOTA_DEVICE + + "' for the VFS:'" + + nameOfFS + + "'"); + } + return result; + } + + public boolean getQuotaFilesetDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_FILE_SET_NAME))) { + result = true; + } + return result; + } + + public String getQuotaFileset(String nameOfFS) throws NamespaceException { + + String result = null; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_FILE_SET_NAME))) { + result = + getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_FILE_SET_NAME)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.QUOTA_FILE_SET_NAME + + "' for the VFS:'" + + nameOfFS + + "'"); + } + return result; + } + + public boolean getQuotaGroupIDDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_GROUP_NAME))) { + result = true; + } + return result; + } + + public String getQuotaGroupID(String nameOfFS) throws NamespaceException { + + String result = null; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_GROUP_NAME))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_GROUP_NAME)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.QUOTA_GROUP_NAME + + "' for the VFS:'" + + nameOfFS + + "'"); + } + return result; + } + + public boolean getQuotaUserIDDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_USER_NAME))) { + result = true; + } + return result; + } + + public String getQuotaUserID(String nameOfFS) throws NamespaceException { + + String result = null; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_USER_NAME))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.QUOTA_USER_NAME)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.QUOTA_USER_NAME + + "' for the VFS:'" + + nameOfFS + + "'"); + } + return result; + } + + /** + * *************************************************************************** STORAGE CLASS + * METHODs + */ + public String getStorageClass(String nameOfFS) throws NamespaceException { + + String result = XMLConst.DEFAULT_STORAGE_CLASS; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.FS_STORAGE_CLASS))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.FS_STORAGE_CLASS)); + } else { + log.debug( + "Storage Class for VFS(+'" + + nameOfFS + + "') is absent. Default value ('" + + result + + "') will be used."); + } + return result; + } + + /** + * *************************************************************************** PRIVATE METHOD + * *************************************************************************** + */ + private String substitutionNumber(String xpath, char patternChar, int number) { + + int startIndex = 0; + int pos = 0; + StringBuilder result = new StringBuilder(); + pos = xpath.indexOf(patternChar, startIndex); + String numStr = Integer.toString(number); + result.append(xpath.substring(startIndex, pos)); + result.append(numStr); + result.append(xpath.substring(pos + 1)); + return result.toString(); + } + + private String substituteNumberInFSElement(int numberOfFS, String element) + throws NamespaceException { + + int numFS = getNumberOfFS(); + if (numberOfFS > numFS) { + throw new NamespaceException("Invalid pointing of Virtual File system"); + } + String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, numberOfFS); + return new_element; + } + + private String substituteNumberInACLEntryElement( + String nameOfFS, int numberOfACLEntry, String element) throws NamespaceException { + + int numFS = getFSNumber(nameOfFS); + if (numFS == -1) { + throw new NamespaceException("Virtual File system (" + nameOfFS + ") does not exists"); + } + int numACL = getNumberOfACL(nameOfFS); + if (numberOfACLEntry > numACL) { + throw new NamespaceException("Invalid pointing of ACL Entry within VFS"); + } + String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, numFS); + new_element = substitutionNumber(new_element, XMLConst.ACL_ENTRY_SUB_PATTERN, numberOfACLEntry); + return new_element; + } + + private String substituteNumberInProtocolElement( + String nameOfFS, int numberOfProtocol, String element) throws NamespaceException { + + int numFS = getFSNumber(nameOfFS); + if (numFS == -1) { + throw new NamespaceException("Virtual File system (" + nameOfFS + ") does not exists"); + } + int numProt = getNumberOfProt(nameOfFS); + if (numberOfProtocol > numProt) { + throw new NamespaceException("Invalid pointing of Protocol within VFS"); + } + String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, numFS); + new_element = substitutionNumber(new_element, XMLConst.PROT_SUB_PATTERN, numberOfProtocol); + return new_element; + } + + private String substituteNumberInPoolElement(String nameOfFS, int numberOfPool, String element) + throws NamespaceException { + + int numFS = getFSNumber(nameOfFS); + if (numFS == -1) { + throw new NamespaceException("Virtual File system (" + nameOfFS + ") does not exists"); + } + int numPool = getNumberOfPool(nameOfFS); + if (numberOfPool > numPool) { + throw new NamespaceException("Invalid pointing of Pool within VFS"); + } + String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, numFS); + new_element = substitutionNumber(new_element, XMLConst.POOL_SUB_PATTERN, numberOfPool); + return new_element; + } + + private String substituteNumberInMembersElement( + String nameOfFS, int numOfPool, int numberOfMember, String element) + throws NamespaceException { + + int numFS = getFSNumber(nameOfFS); + if (numFS == -1) { + throw new NamespaceException("Virtual File system (" + nameOfFS + ") does not exists"); + } + int numMembers = getNumberOfPoolMembers(nameOfFS, numOfPool); + if (numberOfMember > numMembers) { + throw new NamespaceException("Invalid pointing of Member within VFS"); + } + String new_element = substitutionNumber(element, XMLConst.FS_SUB_PATTERN, numFS); + new_element = substitutionNumber(new_element, XMLConst.POOL_SUB_PATTERN, numOfPool); + new_element = substitutionNumber(new_element, XMLConst.MEMBER_SUB_PATTERN, numberOfMember); + return new_element; + } + + private String substituteNumberInMAPElement(int numberOfMapRule, String element) + throws NamespaceException { + + int numMapRule = getNumberOfMappingRule(); + + if (numberOfMapRule > numMapRule) { + throw new NamespaceException("Invalid pointing of Mapping Rule"); + } + String new_element = substitutionNumber(element, XMLConst.MAP_SUB_PATTERN, numberOfMapRule); + return new_element; + } + + private String substituteNumberInAPPElement(int numberOfAppRule, String element) + throws NamespaceException { + + int numAppRule = getNumberOfApproachRule(); + if (numberOfAppRule > numAppRule) { + throw new NamespaceException("Invalid pointing of Approachable Rule"); + } + String new_element = substitutionNumber(element, XMLConst.APPRULE_SUB_PATTERN, numberOfAppRule); + return new_element; + } + + private int retrieveNumberByName(String name, String collectionElement, boolean logging) { + + int result = -1; + int size = -1; + // log.debug(" NAME : "+name+" | Collection Element :"+collectionElement); + List prop = configuration.getList(collectionElement); + if (prop != null) { + size = prop.size(); + // log.debug("Size = "+size); + if (logging) { + for (int i = 0; i < size; i++) { + log.debug(prop.get(i).toString()); + } + } + result = prop.indexOf(name); + } else { + log.warn( + "[retrieveNumberByName_3] Element <" + + collectionElement + + "> does not exists in namespace configuration file"); + } + return result; + } + + private int retrieveNumberByName(String name, String collectionElement) { + + int result = -1; + int size = -1; + // log.debug(" NAME : "+name+" | Collection Element :"+collectionElement); + List prop = configuration.getList(collectionElement); + if (prop != null) { + size = prop.size(); + result = prop.indexOf(name); + } else { + log.warn( + "[retrieveNumberByName_2] Element <" + + collectionElement + + "> does not exists in namespace configuration file"); + } + return result; + } + + public Iterator getKeys() { + + return configuration.getKeys(); + } + + /** + * @param element String + * @return int + */ + private int getPropertyNumber(String element) { + + int result = -1; + Object prop = configuration.getProperty(element); + if (prop != null) { + result = 1; // If it is not null its value is atleast '1'! + if (prop instanceof Collection) { + result = ((Collection) prop).size(); + } + } else { + log.warn( + "[getPropertyNumber] Element <" + + element + + "> does not exists in namespace configuration file"); + } + + return result; + } + + private boolean isPresent(String element) { + + boolean result = false; + result = configuration.containsKey(element); + // log.debug("XMLPArserUtil: isPresent('"+element+"')="+result); + return result; + } + + /** + * @param element String + * @return int + */ + private String getStringProperty(String element) throws NamespaceException { + + String prop = null; + try { + prop = configuration.getString(element); + // log.debug("ELEMENT = "+element+" VALUE = "+prop); + } catch (ConversionException ce) { + log.warn("[getStringProperty] Element <" + element + "> does not contains a String value"); + } catch (NoSuchElementException note) { + log.warn( + "[getStringProperty] Element <" + + element + + "> does not exists in namespace configuration file"); + } + return prop; + } + + /** + * @param element String + * @return boolean + */ + private boolean getBooleanProperty(String element) throws NamespaceException { + + boolean result = false; + try { + result = configuration.getBoolean(element); + } catch (ConversionException ce) { + log.warn("[getLongProperty] Element <" + element + "> does not contains a String value"); + } catch (NoSuchElementException note) { + log.warn( + "[getLongProperty] Element <" + + element + + "> does not exists in namespace configuration file"); + } + return result; + } + + /** + * @param element String + * @return int + */ + private long getLongProperty(String element) throws NamespaceException { + + long prop = -1L; + try { + prop = configuration.getLong(element); + } catch (ConversionException ce) { + log.warn("[getLongProperty] Element <" + element + "> does not contains a String value"); + } catch (NoSuchElementException note) { + log.warn( + "[getLongProperty] Element <" + + element + + "> does not exists in namespace configuration file"); + } + return prop; + } + + /** + * @param element String + * @return int + */ + private int getIntProperty(String element) { + + int prop = -1; + try { + prop = configuration.getInt(element); + } catch (ConversionException ce) { + log.warn("[getIntProperty] Element <" + element + "> does not contains a String value"); + } catch (NoSuchElementException note) { + log.warn( + "[getIntProperty] Element <" + + element + + "> does not exists in namespace configuration file"); + } + return prop; + } + + /** + * @param element String + * @return int + */ + private String[] getListProperty(String element) throws NamespaceException { + + String prop = null; + try { + prop = configuration.getString(element); + } catch (ConversionException ce) { + log.warn("[getListProperty] Element <" + element + "> does not contains a String value"); + } catch (NoSuchElementException note) { + log.warn( + "[getListProperty] Element <" + + element + + "> does not exists in namespace configuration file"); + } + // log.debug("LIST : "+prop); + String[] result = prop.split(","); + // log.debug(" LIST lenght :"+result.length); + return result; + } + + private List getListValue(String collectionElement) { + + List propList = configuration.getList(collectionElement); + List prop = Lists.newArrayList(); + // For a set or list + for (Object element2 : propList) { + String element = (String) element2; + prop.add(element.trim()); + } + + log.debug("LIST - prop : " + prop); + log.debug("Nr. of elements : " + prop.size()); + if (prop.size() == 0) { + log.warn( + "[retrieveNumberByName_2] Element <" + + collectionElement + + "> does not exists in namespace configuration file"); + } + return prop; + } + + public boolean getDefaultACLDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.GROUP_NAME))) { + result = true; + } + return result; + } + + public int getNumberOfACL(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (numOfFS == -1) { + throw new NamespaceException("FS named '" + nameOfFS + "' does not exist in config"); + } + String aclCount = + substitutionNumber(XMLConst.ACL_ENTRY_COUNTING, XMLConst.FS_SUB_PATTERN, numOfFS); + log.debug("ACL Count = " + aclCount); + return getPropertyNumber(aclCount); + } + + public String getGroupName(String nameOfFS, int aclEntryNumber) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String aclCount = substitutionNumber(XMLConst.GROUP_NAME, XMLConst.FS_SUB_PATTERN, numOfFS); + String result = null; + Object prop = configuration.getProperty(aclCount); + if (prop != null) { + if (prop instanceof Collection) { + ArrayList propList = new ArrayList((Collection) prop); + if (propList.size() > aclEntryNumber) { + result = propList.get(aclEntryNumber); + } + } else { + if (prop instanceof String) { + result = ((String) prop); + } + } + } else { + log.warn( + "[getPropertyNumber] Element <" + + aclCount + + "> does not exists in namespace configuration file"); + } + return result; + // return getStringProperty(substituteNumberInACLEntryElement(nameOfFS, + // aclEntryNumber, XMLConst.GROUP_NAME)); + } + + public String getPermissionString(String nameOfFS, int aclEntryNumber) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + String aclCount = substitutionNumber(XMLConst.PERMISSIONS, XMLConst.FS_SUB_PATTERN, numOfFS); + String result = null; + Object prop = configuration.getProperty(aclCount); + if (prop != null) { + if (prop instanceof Collection) { + ArrayList propList = new ArrayList((Collection) prop); + if (propList.size() > aclEntryNumber) { + result = propList.get(aclEntryNumber); + } + } else { + if (prop instanceof String) { + result = ((String) prop); + } + } + } else { + log.warn( + "[getPropertyNumber] Element <" + + aclCount + + "> does not exists in namespace configuration file"); + } + return result; + + // return getStringProperty(substituteNumberInACLEntryElement(nameOfFS, + // aclEntryNumber, XMLConst.PERMISSIONS)); + } + + /** ********************************** VERSION 1.4.0 ************************************* */ + public String getStorageAreaAuthz(String nameOfFS, SAAuthzType type) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (type.equals(SAAuthzType.FIXED)) { + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_FIXED)); + } else { + return getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_DB)); + } + } + + public SAAuthzType getStorageAreaAuthzType(String nameOfFS) throws NamespaceException { + + if (getStorageAreaAuthzFixedDefined(nameOfFS)) { + return SAAuthzType.FIXED; + } + if (getStorageAreaAuthzDBDefined(nameOfFS)) { + return SAAuthzType.AUTHZDB; + } + throw new NamespaceException("Unable to find the SAAuthzType in " + nameOfFS); + } + + public boolean getStorageAreaAuthzFixedDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_FIXED))) { + result = true; + } + return result; + } + + public boolean getStorageAreaAuthzDBDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.SA_AUTHZ_DB))) { + result = true; + } + return result; + } + + public int getProtId(String nameOfFS, int numOfProt) throws NamespaceException { + + // int numOfProt = getProtNumberByName(nameOfFS, protName); + String protId = substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROT_ID); + // log.debug("ProtID : "+protId); + if (isPresent(protId)) { + return getIntProperty( + substituteNumberInProtocolElement(nameOfFS, numOfProt, XMLConst.PROT_ID)); + } else { + return -1; + } + } + + public boolean getOnlineSpaceLimitedSize(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + result = getBooleanProperty(substituteNumberInFSElement(numOfFS, XMLConst.LIMITED_SIZE)); + return result; + } + + public int getNumberOfPool(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (numOfFS == -1) { + throw new NamespaceException("FS named '" + nameOfFS + "' does not exist in config"); + } + if (!getPoolDefined(nameOfFS)) return 0; + String protCount = substitutionNumber(XMLConst.POOL_COUNTING, XMLConst.FS_SUB_PATTERN, numOfFS); + return getPropertyNumber(protCount); + } + + public boolean getPoolDefined(String nameOfFS) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + boolean result = false; + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.POOL_COUNTING))) { + result = true; + } + return result; + } + + public String getBalancerStrategy(String nameOfFS) throws NamespaceException { + + String result = null; + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (isPresent(substituteNumberInFSElement(numOfFS, XMLConst.BALANCE_STRATEGY))) { + result = getStringProperty(substituteNumberInFSElement(numOfFS, XMLConst.BALANCE_STRATEGY)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.BALANCE_STRATEGY + + "' for the VFS:'" + + nameOfFS + + "'"); + } + return result; + } + + public int getNumberOfPoolMembers(String nameOfFS, int poolCounter) throws NamespaceException { + + int numOfFS = retrieveNumberByName(nameOfFS, XMLConst.FS_BY_NAME); + if (numOfFS == -1) { + throw new NamespaceException("FS named '" + nameOfFS + "' does not exist in config"); + } + String subTree = substituteNumberInPoolElement(nameOfFS, poolCounter, XMLConst.POOL); + HierarchicalConfiguration sub = configuration.configurationAt(subTree); + Object members = sub.getProperty("members.member[@member-id]"); + int numOfMembers = -1; + if (members != null) { + if (members instanceof Collection) { + numOfMembers = ((Collection) members).size(); + } else { + numOfMembers = 1; + } + } else { + log.error("Error during the retrieve of the number of pool member of " + nameOfFS); + } + return numOfMembers; + } + + public int getMemberID(String nameOfFS, int numOfPool, int memberNr) throws NamespaceException { + + return getIntProperty( + substituteNumberInMembersElement(nameOfFS, numOfPool, memberNr, XMLConst.POOL_MEMBER_ID)); + } + + public int getMemberWeight(String nameOfFS, int numOfPool, int memberNr) + throws NamespaceException { + + return getIntProperty( + substituteNumberInMembersElement( + nameOfFS, numOfPool, memberNr, XMLConst.POOL_MEMBER_WEIGHT)); + } + + public String getBalancerStrategy(String fsName, int poolCounter) throws NamespaceException { + + String poolId = substituteNumberInPoolElement(fsName, poolCounter, XMLConst.BALANCE_STRATEGY); + if (isPresent(poolId)) { + return getStringProperty( + substituteNumberInPoolElement(fsName, poolCounter, XMLConst.BALANCE_STRATEGY)); + } else { + throw new NamespaceException( + "Unable to find the element '" + + XMLConst.BALANCE_STRATEGY + + "' for the VFS:'" + + fsName + + "'"); + } + } } diff --git a/src/main/java/it/grid/storm/namespace/config/xml/XMLReloadingStrategy.java b/src/main/java/it/grid/storm/namespace/config/xml/XMLReloadingStrategy.java index 4529c022..3a4cbe2f 100644 --- a/src/main/java/it/grid/storm/namespace/config/xml/XMLReloadingStrategy.java +++ b/src/main/java/it/grid/storm/namespace/config/xml/XMLReloadingStrategy.java @@ -1,126 +1,112 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.config.xml; import java.io.File; - import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ public class XMLReloadingStrategy extends FileChangedReloadingStrategy { - private boolean notifing = false; - private Logger log = LoggerFactory.getLogger(XMLReloadingStrategy.class); - - private boolean verbosity; - private long reloadingTime; - - public void setVerbosity(boolean verbosity) { - - this.verbosity = verbosity; - } + private boolean notifing = false; + private Logger log = LoggerFactory.getLogger(XMLReloadingStrategy.class); - protected void notifingPerformed() { + private boolean verbosity; + private long reloadingTime; - // log.debug(" NOTIFING set to FALSE"); - this.notifing = false; - } + public void setVerbosity(boolean verbosity) { - protected boolean notifingRequired() { + this.verbosity = verbosity; + } - // log.debug(" NOTIFING is "+notifing); - return notifing; - } + protected void notifingPerformed() { - protected void notifyNeeded() { + // log.debug(" NOTIFING set to FALSE"); + this.notifing = false; + } - // log.debug(" NOTIFING set to TRUE"); - this.notifing = true; + protected boolean notifingRequired() { - } + // log.debug(" NOTIFING is "+notifing); + return notifing; + } - @Override - public boolean reloadingRequired() { + protected void notifyNeeded() { - boolean reloading = false; + // log.debug(" NOTIFING set to TRUE"); + this.notifing = true; + } - long now = System.currentTimeMillis(); + @Override + public boolean reloadingRequired() { - if (now > lastChecked + refreshDelay) { - lastChecked = now; - if (hasChanged()) { - reloading = true; - } - } - if (verbosity) { - log.debug(" ...RELOADING REQUIRED? " + reloading); - } + boolean reloading = false; - return reloading; - } + long now = System.currentTimeMillis(); - @Override - public void reloadingPerformed() { + if (now > lastChecked + refreshDelay) { + lastChecked = now; + if (hasChanged()) { + reloading = true; + } + } + if (verbosity) { + log.debug(" ...RELOADING REQUIRED? " + reloading); + } - updateLastModified(); - this.reloadingTime = System.currentTimeMillis(); + return reloading; + } - } + @Override + public void reloadingPerformed() { - /** - * Check if the configuration has changed since the last time it was loaded. - * - * @return a flag whether the configuration has changed - */ - @Override - protected boolean hasChanged() { + updateLastModified(); + this.reloadingTime = System.currentTimeMillis(); + } - // log.debug("Checking if Namespace Configuration is changed.."); - File file = getConfigurationFile(); - // File file = thigetFile(); - if (file == null || !file.exists()) { - return false; - } - boolean result = file.lastModified() > lastModified; - if (result) { - notifyNeeded(); - log - .debug(" <<<<< Namespace Configuration is CHANGED ---> Notify needed.."); - } - return result; - } + /** + * Check if the configuration has changed since the last time it was loaded. + * + * @return a flag whether the configuration has changed + */ + @Override + protected boolean hasChanged() { - public File getConfigurationFile() { + // log.debug("Checking if Namespace Configuration is changed.."); + File file = getConfigurationFile(); + // File file = thigetFile(); + if (file == null || !file.exists()) { + return false; + } + boolean result = file.lastModified() > lastModified; + if (result) { + notifyNeeded(); + log.debug(" <<<<< Namespace Configuration is CHANGED ---> Notify needed.."); + } + return result; + } - return this.configuration.getFile(); - } + public File getConfigurationFile() { - public long getLastReload() { + return this.configuration.getFile(); + } - return this.reloadingTime; - } + public long getLastReload() { + return this.reloadingTime; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/ACLEntry.java b/src/main/java/it/grid/storm/namespace/model/ACLEntry.java index a22a326a..de5ffe08 100644 --- a/src/main/java/it/grid/storm/namespace/model/ACLEntry.java +++ b/src/main/java/it/grid/storm/namespace/model/ACLEntry.java @@ -1,104 +1,95 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import it.grid.storm.filesystem.FilesystemPermission; import it.grid.storm.namespace.util.userinfo.LocalGroups; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ACLEntry { - private static final Logger LOG = LoggerFactory.getLogger(ACLEntry.class); - - private final String groupName; - private int groupId = -1; - private final FilePermissionType permission; - - public ACLEntry(String groupName, String permissionString) - throws PermissionException { - - // Digest the permission - permission = FilePermissionType.getFilePermissionType(permissionString); - if (permission.equals(FilePermissionType.UNKNOWN)) { - LOG.error("Unble to understand permission '" + permissionString + "'"); - throw new PermissionException("Unble to understand permission '" - + permissionString + "'"); - } - - // Digest the GroupName and Retrieve the GroupId - this.groupName = groupName; - - boolean isDefined = LocalGroups.getInstance().isGroupDefined(groupName); - if (!isDefined) { - throw new PermissionException("The groupName '" + groupName - + "' does not exist!"); - } else { - LOG.debug("Checking if groupName '" + groupName + "' is defined: " - + isDefined); - groupId = LocalGroups.getInstance().getGroupId(groupName); - LOG.debug("GroupID of '" + groupName + "' = " + groupId); - } - } - - public boolean isValid() { - - boolean result = false; - boolean isDefined = LocalGroups.getInstance().isGroupDefined(groupName); - if (!isDefined) { - LOG.error("The groupName '" + groupName + "' does not exist!"); - result = false; - } else { - LOG.debug("Checking if groupName '" + groupName + "' is defined: " - + isDefined); - groupId = LocalGroups.getInstance().getGroupId(groupName); - LOG.debug("GroupID of '" + groupName + "' = " + groupId); - result = true; - } - return result; - } - - public int getGroupID() { - - return groupId; - } - - public String getGroupName() { - - return groupName; - } - - public String getFilePermissionString() { - - return permission.getPermissionString(); - } - - public FilesystemPermission getFilesystemPermission() { - - switch (permission.getOrdinalNumber()) { - // READ 0 : FilePermissionType.READ.getOrdinalNumber() - case 0: - return FilesystemPermission.Read; - // READWRITE 1 : FilePermissionType.READWRITE.getOrdinalNumber() - case 1: - return FilesystemPermission.ReadWrite; - // WRITE 2 : FilePermissionType.WRITE.getOrdinalNumber() - case 2: - return FilesystemPermission.Write; - // DEFAULT VALUE (is it possible this case?) == READ - default: - return FilesystemPermission.Read; - } - } - - @Override - public String toString() { - - return "group: " + groupId + " (" + groupName + ")" + " permission: " - + permission; - } - + private static final Logger LOG = LoggerFactory.getLogger(ACLEntry.class); + + private final String groupName; + private int groupId = -1; + private final FilePermissionType permission; + + public ACLEntry(String groupName, String permissionString) throws PermissionException { + + // Digest the permission + permission = FilePermissionType.getFilePermissionType(permissionString); + if (permission.equals(FilePermissionType.UNKNOWN)) { + LOG.error("Unble to understand permission '" + permissionString + "'"); + throw new PermissionException("Unble to understand permission '" + permissionString + "'"); + } + + // Digest the GroupName and Retrieve the GroupId + this.groupName = groupName; + + boolean isDefined = LocalGroups.getInstance().isGroupDefined(groupName); + if (!isDefined) { + throw new PermissionException("The groupName '" + groupName + "' does not exist!"); + } else { + LOG.debug("Checking if groupName '" + groupName + "' is defined: " + isDefined); + groupId = LocalGroups.getInstance().getGroupId(groupName); + LOG.debug("GroupID of '" + groupName + "' = " + groupId); + } + } + + public boolean isValid() { + + boolean result = false; + boolean isDefined = LocalGroups.getInstance().isGroupDefined(groupName); + if (!isDefined) { + LOG.error("The groupName '" + groupName + "' does not exist!"); + result = false; + } else { + LOG.debug("Checking if groupName '" + groupName + "' is defined: " + isDefined); + groupId = LocalGroups.getInstance().getGroupId(groupName); + LOG.debug("GroupID of '" + groupName + "' = " + groupId); + result = true; + } + return result; + } + + public int getGroupID() { + + return groupId; + } + + public String getGroupName() { + + return groupName; + } + + public String getFilePermissionString() { + + return permission.getPermissionString(); + } + + public FilesystemPermission getFilesystemPermission() { + + switch (permission.getOrdinalNumber()) { + // READ 0 : FilePermissionType.READ.getOrdinalNumber() + case 0: + return FilesystemPermission.Read; + // READWRITE 1 : FilePermissionType.READWRITE.getOrdinalNumber() + case 1: + return FilesystemPermission.ReadWrite; + // WRITE 2 : FilePermissionType.WRITE.getOrdinalNumber() + case 2: + return FilesystemPermission.Write; + // DEFAULT VALUE (is it possible this case?) == READ + default: + return FilesystemPermission.Read; + } + } + + @Override + public String toString() { + + return "group: " + groupId + " (" + groupName + ")" + " permission: " + permission; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/ACLMode.java b/src/main/java/it/grid/storm/namespace/model/ACLMode.java index 7e48312c..de82b475 100644 --- a/src/main/java/it/grid/storm/namespace/model/ACLMode.java +++ b/src/main/java/it/grid/storm/namespace/model/ACLMode.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; diff --git a/src/main/java/it/grid/storm/namespace/model/AccessLatency.java b/src/main/java/it/grid/storm/namespace/model/AccessLatency.java index 65f4a226..6826f466 100644 --- a/src/main/java/it/grid/storm/namespace/model/AccessLatency.java +++ b/src/main/java/it/grid/storm/namespace/model/AccessLatency.java @@ -1,56 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class AccessLatency { - /** - * - **/ + /** + * + * + * + */ + private String accessLatency; - private String accessLatency; - private String stringSchema; + private String stringSchema; - public final static AccessLatency ONLINE = new AccessLatency("ONLINE", - "online"); - public final static AccessLatency NEARLINE = new AccessLatency("NEARLINE", - "nearline"); - public final static AccessLatency OFFLINE = new AccessLatency("OFFLINE", - "offline"); - public final static AccessLatency UNKNOWN = new AccessLatency("UNKNOWN", - "Access Latency UNKNOWN!"); + public static final AccessLatency ONLINE = new AccessLatency("ONLINE", "online"); + public static final AccessLatency NEARLINE = new AccessLatency("NEARLINE", "nearline"); + public static final AccessLatency OFFLINE = new AccessLatency("OFFLINE", "offline"); + public static final AccessLatency UNKNOWN = + new AccessLatency("UNKNOWN", "Access Latency UNKNOWN!"); - private AccessLatency(String accessLatency, String stringSchema) { + private AccessLatency(String accessLatency, String stringSchema) { - this.accessLatency = accessLatency; - this.stringSchema = stringSchema; - } + this.accessLatency = accessLatency; + this.stringSchema = stringSchema; + } - // Only get method for Name - public String getAccessLatencyName() { + // Only get method for Name + public String getAccessLatencyName() { - return accessLatency; - } + return accessLatency; + } - // Only get method for Schema - public String toString() { + // Only get method for Schema + public String toString() { - return this.stringSchema; - } + return this.stringSchema; + } - public static AccessLatency getAccessLatency(String accessLatency) { - - if (accessLatency.equals(AccessLatency.ONLINE.toString())) - return AccessLatency.ONLINE; - if (accessLatency.equals(AccessLatency.NEARLINE.toString())) - return AccessLatency.NEARLINE; - if (accessLatency.equals(AccessLatency.OFFLINE.toString())) - return AccessLatency.OFFLINE; - return AccessLatency.UNKNOWN; - } + public static AccessLatency getAccessLatency(String accessLatency) { + if (accessLatency.equals(AccessLatency.ONLINE.toString())) return AccessLatency.ONLINE; + if (accessLatency.equals(AccessLatency.NEARLINE.toString())) return AccessLatency.NEARLINE; + if (accessLatency.equals(AccessLatency.OFFLINE.toString())) return AccessLatency.OFFLINE; + return AccessLatency.UNKNOWN; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/ApproachableRule.java b/src/main/java/it/grid/storm/namespace/model/ApproachableRule.java index a7760809..be4692b8 100644 --- a/src/main/java/it/grid/storm/namespace/model/ApproachableRule.java +++ b/src/main/java/it/grid/storm/namespace/model/ApproachableRule.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -8,242 +7,228 @@ import it.grid.storm.griduser.DistinguishedName; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.namespace.NamespaceDirector; - import java.util.LinkedList; import java.util.List; - import org.slf4j.Logger; public class ApproachableRule implements Comparable { - private Logger log = NamespaceDirector.getLogger(); + private Logger log = NamespaceDirector.getLogger(); + + private final String ruleName; + private final SubjectRules subjectRules; + + private String relativePath = null; + private LinkedList appFS = new LinkedList(); + + private final boolean anonymousHttpReadAccess; - private final String ruleName; - private final SubjectRules subjectRules; + public ApproachableRule( + String rulename, + SubjectRules subjectRules, + String relativePath, + boolean anonymousHttpReadAccess) { - private String relativePath = null; - private LinkedList appFS = new LinkedList(); - - private final boolean anonymousHttpReadAccess; - - public ApproachableRule(String rulename, SubjectRules subjectRules, - String relativePath, boolean anonymousHttpReadAccess) { - - this.ruleName = rulename; - this.subjectRules = subjectRules; - /** - * @todo : Check if relative Path is a path well formed. - */ - this.relativePath = relativePath; - this.anonymousHttpReadAccess = anonymousHttpReadAccess; - } - - public ApproachableRule(String rulename, SubjectRules subjectRules, - String relativePath) { - - this.ruleName = rulename; - this.subjectRules = subjectRules; - /** - * @todo : Check if relative Path is a path well formed. - */ - this.relativePath = relativePath; - this.anonymousHttpReadAccess = false; - } - - public boolean isAdmitAll() { - - return subjectRules.getDNMatchingRule().isMatchAll() - && subjectRules.getVONameMatchingRule().isMatchAll(); - } - - public void addApproachableVFS(VirtualFS vfs) { - - this.appFS.add(vfs); - } - - public List getApproachableVFS() { - - return this.appFS; - } - - /** - * getSpaceRelativePath - * - * @return String - */ - public String getSpaceRelativePath() { - - return relativePath; - } - - /** - * - * @return String - */ - public String getRuleName() { - - return this.ruleName; - } - - public boolean getAnonymousHttpReadAccess() { - - return this.anonymousHttpReadAccess; - } - - /** - * - * @return Subject - */ - public SubjectRules getSubjectRules() { - - return this.subjectRules; - } - - /** - * MAIN METHOD - * - * @param gUser - * GridUserInterface - * @return boolean - */ - public boolean match(GridUserInterface gUser) { - - return matchDN(gUser.getDn()) && matchVoms(gUser); - } - - private boolean matchVoms(GridUserInterface gUser) { - - // ---- Check if VOMS Attributes are required ---- - if (subjectRules.getVONameMatchingRule().isMatchAll()) { - return true; - } - // VOMS Attribute required. - if (gUser instanceof AbstractGridUser - && ((AbstractGridUser) gUser).hasVoms()) { - log.debug("Grid User Requestor : " - + ((AbstractGridUser) gUser).toString()); - if (subjectRules.getVONameMatchingRule().match( - ((AbstractGridUser) gUser).getVO().getValue())) { - return true; - } - } - return false; - } - - private boolean matchDN(String dnString) { - - if (dnString == null) { - return subjectRules.getDNMatchingRule().isMatchAll(); - } - DistinguishedName dn = new DistinguishedName(dnString); - return subjectRules.getDNMatchingRule().match(dn); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - String sep = System.getProperty("line.separator"); - sb.append(sep + " --- APPROACHABLE RULE NAME ---" + sep); - sb.append(" Approachable Rule Name : " + this.ruleName + sep); - sb.append(" SUBJECT - dn : " - + this.getSubjectRules().getDNMatchingRule() + sep); - if (!this.getSubjectRules().getVONameMatchingRule().isMatchAll()) { - sb.append(" -- VOMS cert IS MANDATORY!" + sep); - sb.append(" -- SUBJECT - vo_name : " - + this.getSubjectRules().getVONameMatchingRule() + sep); - } else { - sb.append(" -- VOMS cert is not mandatory" + sep); - } - sb.append(" Relative-Path for Space : " + this.getSpaceRelativePath() - + sep); - sb.append(" Approachable VFS : " + this.appFS + sep); - return sb.toString(); - } - - public int compareTo(Object o) { - - int result = 1; - if (o instanceof ApproachableRule) { - ApproachableRule other = (ApproachableRule) o; - result = (this.getRuleName()).compareTo(other.getRuleName()); - } - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + ((appFS == null) ? 0 : appFS.hashCode()); - result = prime * result + ((log == null) ? 0 : log.hashCode()); - result = prime * result - + ((relativePath == null) ? 0 : relativePath.hashCode()); - result = prime * result + ((ruleName == null) ? 0 : ruleName.hashCode()); - result = prime * result - + ((subjectRules == null) ? 0 : subjectRules.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - ApproachableRule other = (ApproachableRule) obj; - if (appFS == null) { - if (other.appFS != null) { - return false; - } - } else if (!appFS.equals(other.appFS)) { - return false; - } - if (log == null) { - if (other.log != null) { - return false; - } - } else if (!log.equals(other.log)) { - return false; - } - if (relativePath == null) { - if (other.relativePath != null) { - return false; - } - } else if (!relativePath.equals(other.relativePath)) { - return false; - } - if (ruleName == null) { - if (other.ruleName != null) { - return false; - } - } else if (!ruleName.equals(other.ruleName)) { - return false; - } - if (subjectRules == null) { - if (other.subjectRules != null) { - return false; - } - } else if (!subjectRules.equals(other.subjectRules)) { - return false; - } - return true; - } + this.ruleName = rulename; + this.subjectRules = subjectRules; + /** @todo : Check if relative Path is a path well formed. */ + this.relativePath = relativePath; + this.anonymousHttpReadAccess = anonymousHttpReadAccess; + } + + public ApproachableRule(String rulename, SubjectRules subjectRules, String relativePath) { + + this.ruleName = rulename; + this.subjectRules = subjectRules; + /** @todo : Check if relative Path is a path well formed. */ + this.relativePath = relativePath; + this.anonymousHttpReadAccess = false; + } + + public boolean isAdmitAll() { + + return subjectRules.getDNMatchingRule().isMatchAll() + && subjectRules.getVONameMatchingRule().isMatchAll(); + } + + public void addApproachableVFS(VirtualFS vfs) { + + this.appFS.add(vfs); + } + + public List getApproachableVFS() { + + return this.appFS; + } + + /** + * getSpaceRelativePath + * + * @return String + */ + public String getSpaceRelativePath() { + + return relativePath; + } + + /** @return String */ + public String getRuleName() { + + return this.ruleName; + } + + public boolean getAnonymousHttpReadAccess() { + + return this.anonymousHttpReadAccess; + } + + /** @return Subject */ + public SubjectRules getSubjectRules() { + + return this.subjectRules; + } + + /** + * MAIN METHOD + * + * @param gUser GridUserInterface + * @return boolean + */ + public boolean match(GridUserInterface gUser) { + + return matchDN(gUser.getDn()) && matchVoms(gUser); + } + + private boolean matchVoms(GridUserInterface gUser) { + + // ---- Check if VOMS Attributes are required ---- + if (subjectRules.getVONameMatchingRule().isMatchAll()) { + return true; + } + // VOMS Attribute required. + if (gUser instanceof AbstractGridUser && ((AbstractGridUser) gUser).hasVoms()) { + log.debug("Grid User Requestor : " + ((AbstractGridUser) gUser).toString()); + if (subjectRules + .getVONameMatchingRule() + .match(((AbstractGridUser) gUser).getVO().getValue())) { + return true; + } + } + return false; + } + + private boolean matchDN(String dnString) { + + if (dnString == null) { + return subjectRules.getDNMatchingRule().isMatchAll(); + } + DistinguishedName dn = new DistinguishedName(dnString); + return subjectRules.getDNMatchingRule().match(dn); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + String sep = System.getProperty("line.separator"); + sb.append(sep + " --- APPROACHABLE RULE NAME ---" + sep); + sb.append(" Approachable Rule Name : " + this.ruleName + sep); + sb.append(" SUBJECT - dn : " + this.getSubjectRules().getDNMatchingRule() + sep); + if (!this.getSubjectRules().getVONameMatchingRule().isMatchAll()) { + sb.append(" -- VOMS cert IS MANDATORY!" + sep); + sb.append( + " -- SUBJECT - vo_name : " + + this.getSubjectRules().getVONameMatchingRule() + + sep); + } else { + sb.append(" -- VOMS cert is not mandatory" + sep); + } + sb.append(" Relative-Path for Space : " + this.getSpaceRelativePath() + sep); + sb.append(" Approachable VFS : " + this.appFS + sep); + return sb.toString(); + } + + public int compareTo(Object o) { + + int result = 1; + if (o instanceof ApproachableRule) { + ApproachableRule other = (ApproachableRule) o; + result = (this.getRuleName()).compareTo(other.getRuleName()); + } + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((appFS == null) ? 0 : appFS.hashCode()); + result = prime * result + ((log == null) ? 0 : log.hashCode()); + result = prime * result + ((relativePath == null) ? 0 : relativePath.hashCode()); + result = prime * result + ((ruleName == null) ? 0 : ruleName.hashCode()); + result = prime * result + ((subjectRules == null) ? 0 : subjectRules.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ApproachableRule other = (ApproachableRule) obj; + if (appFS == null) { + if (other.appFS != null) { + return false; + } + } else if (!appFS.equals(other.appFS)) { + return false; + } + if (log == null) { + if (other.log != null) { + return false; + } + } else if (!log.equals(other.log)) { + return false; + } + if (relativePath == null) { + if (other.relativePath != null) { + return false; + } + } else if (!relativePath.equals(other.relativePath)) { + return false; + } + if (ruleName == null) { + if (other.ruleName != null) { + return false; + } + } else if (!ruleName.equals(other.ruleName)) { + return false; + } + if (subjectRules == null) { + if (other.subjectRules != null) { + return false; + } + } else if (!subjectRules.equals(other.subjectRules)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/Authority.java b/src/main/java/it/grid/storm/namespace/model/Authority.java index eab5a954..4d6d49d7 100644 --- a/src/main/java/it/grid/storm/namespace/model/Authority.java +++ b/src/main/java/it/grid/storm/namespace/model/Authority.java @@ -1,103 +1,98 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class Authority { - private String hostname; - private int port = -1; + private String hostname; + private int port = -1; - public final static Authority EMPTY = new Authority(""); + public static final Authority EMPTY = new Authority(""); - /** - * Complete constructor - * - * @param serviceHostname - * String - * @param servicePort - * int - */ - public Authority(String serviceHostname, int servicePort) { + /** + * Complete constructor + * + * @param serviceHostname String + * @param servicePort int + */ + public Authority(String serviceHostname, int servicePort) { - this.hostname = serviceHostname; - this.port = servicePort; - } + this.hostname = serviceHostname; + this.port = servicePort; + } - /** - * Cnstructor with default port - * - * @param serviceHostname - * String - */ - public Authority(String serviceHostname) { + /** + * Cnstructor with default port + * + * @param serviceHostname String + */ + public Authority(String serviceHostname) { - this.hostname = serviceHostname; - } + this.hostname = serviceHostname; + } - public String getServiceHostname() { + public String getServiceHostname() { - return this.hostname; - } + return this.hostname; + } - public void setServiceHostname(String hostname) { + public void setServiceHostname(String hostname) { - this.hostname = hostname; - } + this.hostname = hostname; + } - public int getServicePort() { + public int getServicePort() { - return this.port; - } + return this.port; + } - public void setServicePort(int port) { + public void setServicePort(int port) { - this.port = port; - } + this.port = port; + } - private String getHostnameAndPort() { + private String getHostnameAndPort() { - StringBuilder result = new StringBuilder(); - if (hostname != null) { - result.append(hostname); - if (port > 0) { - result.append(":"); - result.append(port); - } - } - return result.toString(); - } + StringBuilder result = new StringBuilder(); + if (hostname != null) { + result.append(hostname); + if (port > 0) { + result.append(":"); + result.append(port); + } + } + return result.toString(); + } - public String toString() { + public String toString() { - return getHostnameAndPort(); - } + return getHostnameAndPort(); + } - public boolean equals(Object other) { + public boolean equals(Object other) { - boolean result = false; - if (other instanceof Authority) { - Authority otherA = (Authority) other; - if (otherA.getServiceHostname().equals(this.getServiceHostname())) { // Hostname - // is - // equal - // Check if the Port is equal. - if (otherA.getServicePort() == this.getServicePort()) { - result = true; - } - } - } - return result; - } + boolean result = false; + if (other instanceof Authority) { + Authority otherA = (Authority) other; + if (otherA.getServiceHostname().equals(this.getServiceHostname())) { // Hostname + // is + // equal + // Check if the Port is equal. + if (otherA.getServicePort() == this.getServicePort()) { + result = true; + } + } + } + return result; + } - @Override - public int hashCode() { - - int result = 17; - result = 31 * result + (hostname != null ? hostname.hashCode() : 0); - result = 31 * result + port; - return result; - } + @Override + public int hashCode() { + int result = 17; + result = 31 * result + (hostname != null ? hostname.hashCode() : 0); + result = 31 * result + port; + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/Capability.java b/src/main/java/it/grid/storm/namespace/model/Capability.java index 78a09762..c538cd36 100644 --- a/src/main/java/it/grid/storm/namespace/model/Capability.java +++ b/src/main/java/it/grid/storm/namespace/model/Capability.java @@ -1,18 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import it.grid.storm.balancer.BalancingStrategy; import it.grid.storm.balancer.Node; import it.grid.storm.balancer.node.FTPNode; @@ -20,6 +12,10 @@ import it.grid.storm.balancer.node.HttpsNode; import it.grid.storm.balancer.strategy.BalancingStrategyFactory; import it.grid.storm.namespace.NamespaceException; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class Capability { @@ -41,10 +37,7 @@ public class Capability { private DefaultACL defaultACL; - /** - * Constructor - * - */ + /** Constructor */ public Capability(ACLMode aclMode) throws NamespaceException { setACLMode(aclMode); @@ -63,10 +56,10 @@ public Capability() throws NamespaceException { this(ACLMode.UNDEF); } - /***************************************************************************** - * BUILDING METHODs - ****************************************************************************/ - + /** + * *************************************************************************** BUILDING METHODs + * ************************************************************************** + */ public void setACLMode(ACLMode aclMode) throws NamespaceException { this.aclMode = aclMode; @@ -74,7 +67,7 @@ public void setACLMode(ACLMode aclMode) throws NamespaceException { /** * addProtocol - * + * * @param prot Protocol */ public void addTransportProtocolByScheme(Protocol protocol, TransportProtocol trasfProt) { @@ -121,9 +114,10 @@ public void addProtocolPoolBySchema(Protocol protocol, ProtocolPool protPool) protocolPoolsByScheme.put(protocol, protPool); // Building Balancer and put it into Map of Balancers - if (Protocol.GSIFTP.equals(protocol) || Protocol.HTTP.equals(protocol) + if (Protocol.GSIFTP.equals(protocol) + || Protocol.HTTP.equals(protocol) || Protocol.HTTPS.equals(protocol)) { - + List nodeList = Lists.newLinkedList(); Node node = null; boolean weighedPool = protPool.getBalanceStrategy().requireWeight(); @@ -154,11 +148,13 @@ public void addProtocolPoolBySchema(Protocol protocol, ProtocolPool protPool) BalancingStrategyFactory.getBalancingStrategy(protPool.getBalanceStrategy(), nodeList); balancerByScheme.put(protocol, balancingStrategy); } catch (IllegalArgumentException e) { - log.error("Unable to get {} balacing strategy for nodes {}", - protPool.getBalanceStrategy().toString(), nodeList.toString()); + log.error( + "Unable to get {} balacing strategy for nodes {}", + protPool.getBalanceStrategy().toString(), + nodeList.toString()); throw new NamespaceException("Unable to create a balancing schema from the protocol pool"); } - + } else { log.error("The current version manage only GSIFTP."); } @@ -193,13 +189,14 @@ private Node buildNode(Protocol protocol, int id, String hostname, int port, int throw new Exception("Unsupported protocol, no node type available: " + protocol); } - /***************************************************************************** - * READ METHODs - ****************************************************************************/ + /** + * *************************************************************************** READ METHODs + * ************************************************************************** + */ /** * getACLMode - * + * * @return String */ public ACLMode getACLMode() { @@ -217,16 +214,14 @@ public DefaultACL getDefaultACL() { return this.defaultACL; } - /***************************************************************************** - * BUSINESS METHODs - ****************************************************************************/ - + /** + * *************************************************************************** BUSINESS METHODs + * ************************************************************************** + */ public boolean isAllowedProtocol(String protocolScheme) { boolean result = false; - /** - * @todo IMPLEMENT THIS! - */ + /** @todo IMPLEMENT THIS! */ return result; } @@ -253,10 +248,10 @@ public String toString() { return sb.toString(); } - /****************************************** - * VERSION 1.4 * - *******************************************/ - + /** + * **************************************** VERSION 1.4 * + * ***************************************** + */ public ProtocolPool getPoolByScheme(Protocol protocol) { ProtocolPool poll = null; @@ -278,11 +273,12 @@ public BalancingStrategy getBalancingStrategyByScheme(Protocol protocol) { public List getManagedProtocolByScheme(Protocol protocol) { List result = Lists.newArrayList(); - transpProtocolsList.forEach(tp -> { - if (tp.getProtocol().equals(protocol)) { - result.add(tp); - } - }); + transpProtocolsList.forEach( + tp -> { + if (tp.getProtocol().equals(protocol)) { + result.add(tp); + } + }); return result; } @@ -303,5 +299,4 @@ public TransportProtocol getProtocolByID(int id) { } return null; } - } diff --git a/src/main/java/it/grid/storm/namespace/model/DefaultACL.java b/src/main/java/it/grid/storm/namespace/model/DefaultACL.java index 0cbb7f2a..f4775eda 100644 --- a/src/main/java/it/grid/storm/namespace/model/DefaultACL.java +++ b/src/main/java/it/grid/storm/namespace/model/DefaultACL.java @@ -1,69 +1,51 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import java.util.ArrayList; import java.util.List; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DefaultACL { - private final Logger log = LoggerFactory.getLogger(DefaultACL.class); - private List acl = new ArrayList(); - - /** - * - */ - public DefaultACL() { + private final Logger log = LoggerFactory.getLogger(DefaultACL.class); + private List acl = new ArrayList(); - super(); - } + /** */ + public DefaultACL() { - /** - * - * @param aclEntry - * ACLEntry - */ - public void addACLEntry(ACLEntry aclEntry) { + super(); + } - acl.add(aclEntry); - log.debug("Added to Default ACL : " + aclEntry); - } + /** @param aclEntry ACLEntry */ + public void addACLEntry(ACLEntry aclEntry) { - /** - * - * @return boolean - */ - public boolean isEmpty() { + acl.add(aclEntry); + log.debug("Added to Default ACL : " + aclEntry); + } - return acl.isEmpty(); - } + /** @return boolean */ + public boolean isEmpty() { - /** - * - * @return List - */ - public List getACL() { + return acl.isEmpty(); + } - return acl; - } + /** @return List */ + public List getACL() { - /** - * - * @return String - */ - @Override - public String toString() { + return acl; + } - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < acl.size(); i++) { - sb.append("ACL[" + i + "] = ( ").append(acl.get(i)).append(") \n"); - } - return sb.toString(); - } + /** @return String */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < acl.size(); i++) { + sb.append("ACL[" + i + "] = ( ").append(acl.get(i)).append(") \n"); + } + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/DefaultValues.java b/src/main/java/it/grid/storm/namespace/model/DefaultValues.java index 9ac98452..7cc294b6 100644 --- a/src/main/java/it/grid/storm/namespace/model/DefaultValues.java +++ b/src/main/java/it/grid/storm/namespace/model/DefaultValues.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -14,318 +13,273 @@ import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceType; - import org.slf4j.Logger; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ public class DefaultValues implements DefaultValuesInterface { - private Logger log = NamespaceDirector.getLogger(); - private SpaceDefault spaceDefault; - private FileDefault fileDefault; - - public DefaultValues(SpaceDefault spaceDefault, FileDefault fileDefault) { - - this.spaceDefault = spaceDefault; - this.fileDefault = fileDefault; - } - - public DefaultValues() { - - try { - this.spaceDefault = new SpaceDefault(); - } catch (NamespaceException ex) { - log.error("Something was wrong building default Space Default Values"); - } - try { - this.fileDefault = new FileDefault(); - } catch (NamespaceException ex1) { - log.error("Something was wrong building default File Default Values"); - } - } - - public void setSpaceDefaults(String type, long lifetime, long guarsize, - long totalsize) throws NamespaceException { - - this.spaceDefault = new SpaceDefault(type, lifetime, guarsize, totalsize); - } - - public void setFileDefaults(String type, long lifetime) - throws NamespaceException { - - this.fileDefault = new FileDefault(type, lifetime); - } - - public TLifeTimeInSeconds getDefaultSpaceLifetime() { - - return spaceDefault.lifetime; - } - - public TSpaceType getDefaultSpaceType() { - - return spaceDefault.type; - } - - public TSizeInBytes getDefaultGuaranteedSpaceSize() { - - return spaceDefault.guarsize; - } - - public TSizeInBytes getDefaultTotalSpaceSize() { - - return spaceDefault.totalsize; - } - - public TLifeTimeInSeconds getDefaultFileLifeTime() { - - return fileDefault.lifetime; - } - - public TFileStorageType getDefaultFileType() { - - return fileDefault.type; - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - String sep = System.getProperty("line.separator"); - sb.append(" DEF. Space Lifetime : " - + this.getDefaultSpaceLifetime() + sep); - sb.append(" DEF. Space Guar. size : " - + this.getDefaultGuaranteedSpaceSize() + sep); - sb.append(" DEF. Space Tot. size : " - + this.getDefaultTotalSpaceSize() + sep); - sb.append(" DEF. Space Type : " + this.getDefaultSpaceType() - + sep); - sb.append(" DEF. File Lifetime : " + this.getDefaultFileLifeTime() - + sep); - sb.append(" DEF. File Type : " + this.getDefaultFileType() - + sep); - return sb.toString(); - } - - /************************************************************************** - * INNER CLASS - **************************************************************************/ - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * - * @author Riccardo Zappi - * @version 1.0 - */ - public class SpaceDefault { - - private TSpaceType type = null; - private TLifeTimeInSeconds lifetime; - private TSizeInBytes guarsize; - private TSizeInBytes totalsize; - - public SpaceDefault() throws NamespaceException { - - // Build space type - this.type = TSpaceType.getTSpaceType(DefaultValues.DEFAULT_SPACE_TYPE); - // Build lifetime - try { - this.lifetime = TLifeTimeInSeconds.make(DefaultValues.DEFAULT_SPACE_LT, - TimeUnit.SECONDS); - } catch (IllegalArgumentException ex) { - log.error(" Default Space Lifetime was wrong "); - throw new NamespaceException( - "Space Lifetime invalid argument in Namespace configuration.", ex); - } - // Build of Guaranteed Space Size - try { - this.guarsize = TSizeInBytes.make( - DefaultValues.DEFAULT_SPACE_GUAR_SIZE, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException ex1) { - log.error(" Default Guaranteed Space Size was wrong "); - throw new NamespaceException( - " Guaranteed Space Size invalid argument in Namespace configuration.", - ex1); - } - - // Build of Total Space Size - try { - this.totalsize = TSizeInBytes.make( - DefaultValues.DEFAULT_SPACE_TOT_SIZE, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException ex2) { - log.error(" Default Total Space Size was wrong "); - throw new NamespaceException( - "Total Space Size invalid argument in Namespace configuration.", ex2); - } - } - - public SpaceDefault(String type, long lifetime, long guarsize, - long totalsize) throws NamespaceException { - - // Build space type - this.type = TSpaceType.getTSpaceType(type); - - // Build lifetime - try { - this.lifetime = TLifeTimeInSeconds.make(lifetime, TimeUnit.SECONDS); - } catch (IllegalArgumentException ex) { - log.error(" Default Space Lifetime was wrong "); - throw new NamespaceException( - "Space Lifetime invalid argument in Namespace configuration.", ex); - } - - // Checking of size - if (guarsize > totalsize) { - log - .error(" Default Space Guaranteed Size is greater of Space Total Size !"); - throw new NamespaceException( - "Space size (Guar and Total) are invalid in Namespace configuration."); - } - - // Build of Guaranteed Space Size - try { - this.guarsize = TSizeInBytes.make(guarsize, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException ex1) { - log.error(" Default Guaranteed Space Size was wrong "); - throw new NamespaceException( - " Guaranteed Space Size invalid argument in Namespace configuration.", - ex1); - } - - // Build of Total Space Size - try { - this.totalsize = TSizeInBytes.make(totalsize, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException ex2) { - log.error(" Default Total Space Size was wrong "); - throw new NamespaceException( - "Total Space Size invalid argument in Namespace configuration.", ex2); - } - } - - public TSpaceType getSpaceType() { - - return type; - } - - public TLifeTimeInSeconds getLifetime() { - - return lifetime; - } - - public TSizeInBytes guarsize() { - - return guarsize; - } - - public TSizeInBytes totalsize() { - - return totalsize; - } - - } - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * - * @author Riccardo Zappi - * @version 1.0 - */ - public class FileDefault { - - private TFileStorageType type = null; - private TLifeTimeInSeconds lifetime; - - public FileDefault() throws NamespaceException { - - // Build space type - this.type = TFileStorageType - .getTFileStorageType(DefaultValues.DEFAULT_FILE_TYPE); - - // Build lifetime - try { - this.lifetime = TLifeTimeInSeconds.make(DefaultValues.DEFAULT_FILE_LT, - TimeUnit.SECONDS); - } catch (IllegalArgumentException ex) { - log.error(" Default Space Lifetime was wrong "); - throw new NamespaceException( - "Space Lifetime invalid argument in Namespace configuration.", ex); - } - - } - - public FileDefault(String type, long lifetime) throws NamespaceException { - - // Build space type - this.type = TFileStorageType.getTFileStorageType(type); - - // Build lifetime - try { - this.lifetime = TLifeTimeInSeconds.make(lifetime, TimeUnit.SECONDS); - } catch (IllegalArgumentException ex) { - log.error(" Default Space Lifetime was wrong "); - throw new NamespaceException( - "Space Lifetime invalid argument in Namespace configuration.", ex); - } - } - - public TFileStorageType getFileStorageType() { - - return type; - } - - public TLifeTimeInSeconds getLifetime() { - - return lifetime; - } - - } + private Logger log = NamespaceDirector.getLogger(); + private SpaceDefault spaceDefault; + private FileDefault fileDefault; + + public DefaultValues(SpaceDefault spaceDefault, FileDefault fileDefault) { + + this.spaceDefault = spaceDefault; + this.fileDefault = fileDefault; + } + + public DefaultValues() { + try { + this.spaceDefault = new SpaceDefault(); + } catch (NamespaceException ex) { + log.error("Something was wrong building default Space Default Values"); + } + try { + this.fileDefault = new FileDefault(); + } catch (NamespaceException ex1) { + log.error("Something was wrong building default File Default Values"); + } + } + + public void setSpaceDefaults(String type, long lifetime, long guarsize, long totalsize) + throws NamespaceException { + + this.spaceDefault = new SpaceDefault(type, lifetime, guarsize, totalsize); + } + + public void setFileDefaults(String type, long lifetime) throws NamespaceException { + + this.fileDefault = new FileDefault(type, lifetime); + } + + public TLifeTimeInSeconds getDefaultSpaceLifetime() { + + return spaceDefault.lifetime; + } + + public TSpaceType getDefaultSpaceType() { + + return spaceDefault.type; + } + + public TSizeInBytes getDefaultGuaranteedSpaceSize() { + + return spaceDefault.guarsize; + } + + public TSizeInBytes getDefaultTotalSpaceSize() { + + return spaceDefault.totalsize; + } + + public TLifeTimeInSeconds getDefaultFileLifeTime() { + + return fileDefault.lifetime; + } + + public TFileStorageType getDefaultFileType() { + + return fileDefault.type; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + String sep = System.getProperty("line.separator"); + sb.append(" DEF. Space Lifetime : " + this.getDefaultSpaceLifetime() + sep); + sb.append(" DEF. Space Guar. size : " + this.getDefaultGuaranteedSpaceSize() + sep); + sb.append(" DEF. Space Tot. size : " + this.getDefaultTotalSpaceSize() + sep); + sb.append(" DEF. Space Type : " + this.getDefaultSpaceType() + sep); + sb.append(" DEF. File Lifetime : " + this.getDefaultFileLifeTime() + sep); + sb.append(" DEF. File Type : " + this.getDefaultFileType() + sep); + return sb.toString(); + } + + /** + * ************************************************************************ INNER CLASS + * ************************************************************************ + */ + + /** + * Title: + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * + * @author Riccardo Zappi + * @version 1.0 + */ + public class SpaceDefault { + + private TSpaceType type = null; + private TLifeTimeInSeconds lifetime; + private TSizeInBytes guarsize; + private TSizeInBytes totalsize; + + public SpaceDefault() throws NamespaceException { + + // Build space type + this.type = TSpaceType.getTSpaceType(DefaultValues.DEFAULT_SPACE_TYPE); + // Build lifetime + try { + this.lifetime = TLifeTimeInSeconds.make(DefaultValues.DEFAULT_SPACE_LT, TimeUnit.SECONDS); + } catch (IllegalArgumentException ex) { + log.error(" Default Space Lifetime was wrong "); + throw new NamespaceException( + "Space Lifetime invalid argument in Namespace configuration.", ex); + } + // Build of Guaranteed Space Size + try { + this.guarsize = TSizeInBytes.make(DefaultValues.DEFAULT_SPACE_GUAR_SIZE, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException ex1) { + log.error(" Default Guaranteed Space Size was wrong "); + throw new NamespaceException( + " Guaranteed Space Size invalid argument in Namespace configuration.", ex1); + } + + // Build of Total Space Size + try { + this.totalsize = TSizeInBytes.make(DefaultValues.DEFAULT_SPACE_TOT_SIZE, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException ex2) { + log.error(" Default Total Space Size was wrong "); + throw new NamespaceException( + "Total Space Size invalid argument in Namespace configuration.", ex2); + } + } + + public SpaceDefault(String type, long lifetime, long guarsize, long totalsize) + throws NamespaceException { + + // Build space type + this.type = TSpaceType.getTSpaceType(type); + + // Build lifetime + try { + this.lifetime = TLifeTimeInSeconds.make(lifetime, TimeUnit.SECONDS); + } catch (IllegalArgumentException ex) { + log.error(" Default Space Lifetime was wrong "); + throw new NamespaceException( + "Space Lifetime invalid argument in Namespace configuration.", ex); + } + + // Checking of size + if (guarsize > totalsize) { + log.error(" Default Space Guaranteed Size is greater of Space Total Size !"); + throw new NamespaceException( + "Space size (Guar and Total) are invalid in Namespace configuration."); + } + + // Build of Guaranteed Space Size + try { + this.guarsize = TSizeInBytes.make(guarsize, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException ex1) { + log.error(" Default Guaranteed Space Size was wrong "); + throw new NamespaceException( + " Guaranteed Space Size invalid argument in Namespace configuration.", ex1); + } + + // Build of Total Space Size + try { + this.totalsize = TSizeInBytes.make(totalsize, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException ex2) { + log.error(" Default Total Space Size was wrong "); + throw new NamespaceException( + "Total Space Size invalid argument in Namespace configuration.", ex2); + } + } + + public TSpaceType getSpaceType() { + + return type; + } + + public TLifeTimeInSeconds getLifetime() { + + return lifetime; + } + + public TSizeInBytes guarsize() { + + return guarsize; + } + + public TSizeInBytes totalsize() { + + return totalsize; + } + } + + /** + * Title: + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * + * @author Riccardo Zappi + * @version 1.0 + */ + public class FileDefault { + + private TFileStorageType type = null; + private TLifeTimeInSeconds lifetime; + + public FileDefault() throws NamespaceException { + + // Build space type + this.type = TFileStorageType.getTFileStorageType(DefaultValues.DEFAULT_FILE_TYPE); + + // Build lifetime + try { + this.lifetime = TLifeTimeInSeconds.make(DefaultValues.DEFAULT_FILE_LT, TimeUnit.SECONDS); + } catch (IllegalArgumentException ex) { + log.error(" Default Space Lifetime was wrong "); + throw new NamespaceException( + "Space Lifetime invalid argument in Namespace configuration.", ex); + } + } + + public FileDefault(String type, long lifetime) throws NamespaceException { + + // Build space type + this.type = TFileStorageType.getTFileStorageType(type); + + // Build lifetime + try { + this.lifetime = TLifeTimeInSeconds.make(lifetime, TimeUnit.SECONDS); + } catch (IllegalArgumentException ex) { + log.error(" Default Space Lifetime was wrong "); + throw new NamespaceException( + "Space Lifetime invalid argument in Namespace configuration.", ex); + } + } + + public TFileStorageType getFileStorageType() { + + return type; + } + + public TLifeTimeInSeconds getLifetime() { + + return lifetime; + } + } } diff --git a/src/main/java/it/grid/storm/namespace/model/ExpirationMode.java b/src/main/java/it/grid/storm/namespace/model/ExpirationMode.java index 588f9045..bf290f66 100644 --- a/src/main/java/it/grid/storm/namespace/model/ExpirationMode.java +++ b/src/main/java/it/grid/storm/namespace/model/ExpirationMode.java @@ -1,56 +1,51 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class ExpirationMode { - /** - * - * - * - **/ - - private String expirationMode; - private String stringSchema; - - public final static ExpirationMode NEVER_EXPIRE = new ExpirationMode( - "NEVER_EXPIRE", "neverExpire"); - public final static ExpirationMode WARN_WHEN_EXPIRE = new ExpirationMode( - "WARN_WHEN_EXPIRE", "warnWhenExpire"); - public final static ExpirationMode RELEASE_WHEN_EXPIRE = new ExpirationMode( - "RELEASE_WHEN_EXPIRE", "releaseWhenExpire"); - public final static ExpirationMode UNKNOWN = new ExpirationMode("UNKNOWN", - "Expiration mode UNKNOWN!"); - - private ExpirationMode(String expirationMode, String stringSchema) { - - this.expirationMode = expirationMode; - this.stringSchema = stringSchema; - } - - // Only get method for Name - public String getExpirationModeName() { - - return expirationMode; - } - - // Only get method for Schema - public String toString() { - - return this.stringSchema; - } - - public static ExpirationMode getExpirationMode(String expMode) { - - if (expMode.equals(NEVER_EXPIRE.toString())) - return ExpirationMode.NEVER_EXPIRE; - if (expMode.equals(WARN_WHEN_EXPIRE.toString())) - return ExpirationMode.WARN_WHEN_EXPIRE; - if (expMode.equals(RELEASE_WHEN_EXPIRE.toString())) - return ExpirationMode.RELEASE_WHEN_EXPIRE; - return ExpirationMode.UNKNOWN; - } + /** + * + * + * + */ + private String expirationMode; + + private String stringSchema; + + public static final ExpirationMode NEVER_EXPIRE = + new ExpirationMode("NEVER_EXPIRE", "neverExpire"); + public static final ExpirationMode WARN_WHEN_EXPIRE = + new ExpirationMode("WARN_WHEN_EXPIRE", "warnWhenExpire"); + public static final ExpirationMode RELEASE_WHEN_EXPIRE = + new ExpirationMode("RELEASE_WHEN_EXPIRE", "releaseWhenExpire"); + public static final ExpirationMode UNKNOWN = + new ExpirationMode("UNKNOWN", "Expiration mode UNKNOWN!"); + + private ExpirationMode(String expirationMode, String stringSchema) { + + this.expirationMode = expirationMode; + this.stringSchema = stringSchema; + } + + // Only get method for Name + public String getExpirationModeName() { + + return expirationMode; + } + + // Only get method for Schema + public String toString() { + + return this.stringSchema; + } + + public static ExpirationMode getExpirationMode(String expMode) { + + if (expMode.equals(NEVER_EXPIRE.toString())) return ExpirationMode.NEVER_EXPIRE; + if (expMode.equals(WARN_WHEN_EXPIRE.toString())) return ExpirationMode.WARN_WHEN_EXPIRE; + if (expMode.equals(RELEASE_WHEN_EXPIRE.toString())) return ExpirationMode.RELEASE_WHEN_EXPIRE; + return ExpirationMode.UNKNOWN; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/FilePermissionType.java b/src/main/java/it/grid/storm/namespace/model/FilePermissionType.java index 37844220..b6efeaa1 100644 --- a/src/main/java/it/grid/storm/namespace/model/FilePermissionType.java +++ b/src/main/java/it/grid/storm/namespace/model/FilePermissionType.java @@ -1,117 +1,99 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class FilePermissionType { - private int ordinalNumber; - private String permissionType; - private String permissionString; - - public final static FilePermissionType READ = new FilePermissionType("READ", - "R", 0); - public final static FilePermissionType READWRITE = new FilePermissionType( - "READWRITE", "RW", 1); - public final static FilePermissionType WRITE = new FilePermissionType( - "WRITE", "W", 2); - public final static FilePermissionType UNKNOWN = new FilePermissionType( - "UNKNOWN", "Permission Type UNKNOWN!", -1); - - private FilePermissionType(String permissionType, String permissionString, - int ord) { - - this.permissionType = permissionType; - this.permissionString = permissionString; - this.ordinalNumber = ord; - } - - // Only get method for Ordinal Number - public int getOrdinalNumber() { - - return this.ordinalNumber; - } - - public String getPermissionString() { - - return this.permissionString; - } - - /** - * - * @param quotaType - * String - * @return QuotaType - */ - public static FilePermissionType getFilePermissionType(String permission) { - - if (permission.equals(READ.getPermissionString())) { - return FilePermissionType.READ; - } - if (permission.equals(READWRITE.getPermissionString())) { - return FilePermissionType.READWRITE; - } - if (permission.equals(WRITE.getPermissionString())) { - return FilePermissionType.WRITE; - } - return FilePermissionType.UNKNOWN; - } - - /** - * - * @param quotaType - * String - * @return QuotaType - */ - public static FilePermissionType getFilePermissionType(int filetypeOrd) { - - if (filetypeOrd == 0) { - return FilePermissionType.READ; - } - if (filetypeOrd == 1) { - return FilePermissionType.READWRITE; - } - if (filetypeOrd == 2) { - return FilePermissionType.WRITE; - } - return FilePermissionType.UNKNOWN; - } - - /** - * - * @return int - */ - public int hashCode() { - - return this.ordinalNumber; - } - - /** - * - * @param other - * Object - * @return boolean - */ - public boolean equals(Object other) { - - boolean result = false; - if (other instanceof FilePermissionType) { - FilePermissionType ft = (FilePermissionType) other; - if (ft.ordinalNumber == this.ordinalNumber) { - result = true; - } - } - return result; - } - - /** - * - * @return String - */ - public String toString() { - - return this.permissionType; - } - + private int ordinalNumber; + private String permissionType; + private String permissionString; + + public static final FilePermissionType READ = new FilePermissionType("READ", "R", 0); + public static final FilePermissionType READWRITE = new FilePermissionType("READWRITE", "RW", 1); + public static final FilePermissionType WRITE = new FilePermissionType("WRITE", "W", 2); + public static final FilePermissionType UNKNOWN = + new FilePermissionType("UNKNOWN", "Permission Type UNKNOWN!", -1); + + private FilePermissionType(String permissionType, String permissionString, int ord) { + + this.permissionType = permissionType; + this.permissionString = permissionString; + this.ordinalNumber = ord; + } + + // Only get method for Ordinal Number + public int getOrdinalNumber() { + + return this.ordinalNumber; + } + + public String getPermissionString() { + + return this.permissionString; + } + + /** + * @param quotaType String + * @return QuotaType + */ + public static FilePermissionType getFilePermissionType(String permission) { + + if (permission.equals(READ.getPermissionString())) { + return FilePermissionType.READ; + } + if (permission.equals(READWRITE.getPermissionString())) { + return FilePermissionType.READWRITE; + } + if (permission.equals(WRITE.getPermissionString())) { + return FilePermissionType.WRITE; + } + return FilePermissionType.UNKNOWN; + } + + /** + * @param quotaType String + * @return QuotaType + */ + public static FilePermissionType getFilePermissionType(int filetypeOrd) { + + if (filetypeOrd == 0) { + return FilePermissionType.READ; + } + if (filetypeOrd == 1) { + return FilePermissionType.READWRITE; + } + if (filetypeOrd == 2) { + return FilePermissionType.WRITE; + } + return FilePermissionType.UNKNOWN; + } + + /** @return int */ + public int hashCode() { + + return this.ordinalNumber; + } + + /** + * @param other Object + * @return boolean + */ + public boolean equals(Object other) { + + boolean result = false; + if (other instanceof FilePermissionType) { + FilePermissionType ft = (FilePermissionType) other; + if (ft.ordinalNumber == this.ordinalNumber) { + result = true; + } + } + return result; + } + + /** @return String */ + public String toString() { + + return this.permissionType; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/MappingRule.java b/src/main/java/it/grid/storm/namespace/model/MappingRule.java index 3933d254..b97a8d31 100644 --- a/src/main/java/it/grid/storm/namespace/model/MappingRule.java +++ b/src/main/java/it/grid/storm/namespace/model/MappingRule.java @@ -1,47 +1,43 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; - public class MappingRule { - private final String ruleName; - private final String stfnRoot; - private final VirtualFS mappedFS; - - public MappingRule(String ruleName, String stfn_root/* , String mapped_fs */, - VirtualFS vfs) { + private final String ruleName; + private final String stfnRoot; + private final VirtualFS mappedFS; - this.ruleName = ruleName; - this.stfnRoot = stfn_root; - this.mappedFS = vfs; - } + public MappingRule(String ruleName, String stfn_root /* , String mapped_fs */, VirtualFS vfs) { - public String getRuleName() { + this.ruleName = ruleName; + this.stfnRoot = stfn_root; + this.mappedFS = vfs; + } - return this.ruleName; - } + public String getRuleName() { - public String getStFNRoot() { + return this.ruleName; + } - return this.stfnRoot; - } + public String getStFNRoot() { - public VirtualFS getMappedFS() { + return this.stfnRoot; + } - return this.mappedFS; - } + public VirtualFS getMappedFS() { - public String toString() { + return this.mappedFS; + } - StringBuilder sb = new StringBuilder(); - String sep = System.getProperty("line.separator"); - sb.append(sep + " Mapping rule name : " + this.ruleName + sep); - sb.append(" StFN-Root : " + this.stfnRoot + sep); - sb.append(" mapped-FS : " + this.mappedFS + sep); - return sb.toString(); - } + public String toString() { + StringBuilder sb = new StringBuilder(); + String sep = System.getProperty("line.separator"); + sb.append(sep + " Mapping rule name : " + this.ruleName + sep); + sb.append(" StFN-Root : " + this.stfnRoot + sep); + sb.append(" mapped-FS : " + this.mappedFS + sep); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/PathCreator.java b/src/main/java/it/grid/storm/namespace/model/PathCreator.java index b8fb0749..ff0b58a3 100644 --- a/src/main/java/it/grid/storm/namespace/model/PathCreator.java +++ b/src/main/java/it/grid/storm/namespace/model/PathCreator.java @@ -1,123 +1,121 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * Class for creating fo StoRI by file - * - * Edited by Michele Dibenedetto + * + *

Edited by Michele Dibenedetto */ - package it.grid.storm.namespace.model; import java.io.File; import java.util.ArrayList; import java.util.Collection; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PathCreator { - /** - * Logger. - */ - private static final Logger log = LoggerFactory.getLogger(PathCreator.class); - - private File file; - private boolean recursive; - private int level; - - public PathCreator(File file, boolean recursive, int level) { - - this.file = file; - log.debug(": New Path Created: " + file.toString()); - this.recursive = recursive; - this.level = level; - - } - - public Collection generateChildrenNoFolders() { - - return generateChildrenNoFolders(this.file, this.recursive, this.level); - } - - /** - * If file is a directory returns the list of files contained in the - * directory, then on each subfolder if recursive is true or level is greater - * than zero call itself recursively on it decreasing the level. If file is - * file returns the file itself - * - * @param file - * @param recursive - * @param level - * @return - */ - private Collection generateChildrenNoFolders(File file, - boolean recursive, int level) { - - ArrayList children = new ArrayList(); - log.debug("Generating children of = " + file + " with recursive option = " - + recursive + " and recursion level = " + level); - if (file.isDirectory()) { - log.debug("Is a directory"); - if (recursive || (level > 0)) { - log.debug("Recursion permitted. Inspectiong the content"); - String[] arrayOfNames = file.list(); - if (arrayOfNames != null) { - for (String filePath : arrayOfNames) { - log.debug("Analizing child = " + filePath); - if (!(filePath.startsWith("."))) { - File child = new File(file, filePath); - if (child.isDirectory()) { - log.debug("It's a directory, calling recursive procedure" - + " with level " + (level - 1)); - children.addAll(generateChildrenNoFolders(child, recursive, - level - 1)); - } else { - log.debug("It is a file. Reached a leaf"); - children.add(child.toString()); - } - } - } - } - } - } else { - log.debug("It's a file, adding to the return collection"); - children.add(file.toString()); - } - return children; - } - - /** - * @param list - * @return - */ - public Collection generateChildren() { - - ArrayList children = new ArrayList(); - String[] arrayOfNames = null; - log.debug("Generating children of = " + file + " with recursive option = " - + recursive + " and recursion level = " + level); - if (file.isDirectory() && (recursive || level > 0)) { - log - .debug("Is a directory with recursion permitted. Inspectiong the content"); - arrayOfNames = file.list(); - if (arrayOfNames != null) { - for (String filePath : arrayOfNames) { - log.debug("Analizing child = " + filePath); - if (!(filePath.startsWith("."))) { - PathCreator path = new PathCreator(new File(file, filePath), - recursive, level - 1); - children.addAll(path.generateChildren()); - } - } - } - - } else { - children.add(file.toString()); - } - return children; - } - + /** Logger. */ + private static final Logger log = LoggerFactory.getLogger(PathCreator.class); + + private File file; + private boolean recursive; + private int level; + + public PathCreator(File file, boolean recursive, int level) { + + this.file = file; + log.debug(": New Path Created: " + file.toString()); + this.recursive = recursive; + this.level = level; + } + + public Collection generateChildrenNoFolders() { + + return generateChildrenNoFolders(this.file, this.recursive, this.level); + } + + /** + * If file is a directory returns the list of files contained in the directory, then on each + * subfolder if recursive is true or level is greater than zero call itself recursively on it + * decreasing the level. If file is file returns the file itself + * + * @param file + * @param recursive + * @param level + * @return + */ + private Collection generateChildrenNoFolders(File file, boolean recursive, int level) { + + ArrayList children = new ArrayList(); + log.debug( + "Generating children of = " + + file + + " with recursive option = " + + recursive + + " and recursion level = " + + level); + if (file.isDirectory()) { + log.debug("Is a directory"); + if (recursive || (level > 0)) { + log.debug("Recursion permitted. Inspectiong the content"); + String[] arrayOfNames = file.list(); + if (arrayOfNames != null) { + for (String filePath : arrayOfNames) { + log.debug("Analizing child = " + filePath); + if (!(filePath.startsWith("."))) { + File child = new File(file, filePath); + if (child.isDirectory()) { + log.debug( + "It's a directory, calling recursive procedure" + " with level " + (level - 1)); + children.addAll(generateChildrenNoFolders(child, recursive, level - 1)); + } else { + log.debug("It is a file. Reached a leaf"); + children.add(child.toString()); + } + } + } + } + } + } else { + log.debug("It's a file, adding to the return collection"); + children.add(file.toString()); + } + return children; + } + + /** + * @param list + * @return + */ + public Collection generateChildren() { + + ArrayList children = new ArrayList(); + String[] arrayOfNames = null; + log.debug( + "Generating children of = " + + file + + " with recursive option = " + + recursive + + " and recursion level = " + + level); + if (file.isDirectory() && (recursive || level > 0)) { + log.debug("Is a directory with recursion permitted. Inspectiong the content"); + arrayOfNames = file.list(); + if (arrayOfNames != null) { + for (String filePath : arrayOfNames) { + log.debug("Analizing child = " + filePath); + if (!(filePath.startsWith("."))) { + PathCreator path = new PathCreator(new File(file, filePath), recursive, level - 1); + children.addAll(path.generateChildren()); + } + } + } + + } else { + children.add(file.toString()); + } + return children; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/PermissionException.java b/src/main/java/it/grid/storm/namespace/model/PermissionException.java index 6011dec4..db890e8b 100644 --- a/src/main/java/it/grid/storm/namespace/model/PermissionException.java +++ b/src/main/java/it/grid/storm/namespace/model/PermissionException.java @@ -1,28 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class PermissionException extends RuntimeException { - public PermissionException() { + public PermissionException() { - super(); - } + super(); + } - public PermissionException(String message) { + public PermissionException(String message) { - super(message); - } + super(message); + } - public PermissionException(String message, Throwable cause) { + public PermissionException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public PermissionException(Throwable cause) { + public PermissionException(Throwable cause) { - super(cause); - } + super(cause); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/PoolMember.java b/src/main/java/it/grid/storm/namespace/model/PoolMember.java index 022522ac..c6468fd6 100644 --- a/src/main/java/it/grid/storm/namespace/model/PoolMember.java +++ b/src/main/java/it/grid/storm/namespace/model/PoolMember.java @@ -1,47 +1,45 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class PoolMember { - private final int memberID; - private final int memberWeight; // -1 means undefined - private final TransportProtocol memberProtocol; + private final int memberID; + private final int memberWeight; // -1 means undefined + private final TransportProtocol memberProtocol; - public PoolMember(int memberID, TransportProtocol protocol) { + public PoolMember(int memberID, TransportProtocol protocol) { - this(memberID, protocol, -1); - } + this(memberID, protocol, -1); + } - public PoolMember(int memberID, TransportProtocol protocol, int weight) { + public PoolMember(int memberID, TransportProtocol protocol, int weight) { - this.memberID = memberID; - this.memberProtocol = protocol; - this.memberWeight = weight; - } + this.memberID = memberID; + this.memberProtocol = protocol; + this.memberWeight = weight; + } - public int getMemberID() { + public int getMemberID() { - return this.memberID; - } + return this.memberID; + } - public int getMemberWeight() { + public int getMemberWeight() { - return this.memberWeight; - } + return this.memberWeight; + } - public TransportProtocol getMemberProtocol() { + public TransportProtocol getMemberProtocol() { - return this.memberProtocol; - } + return this.memberProtocol; + } - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append(memberProtocol + " --> Weight: " + this.memberWeight); - return sb.toString(); - } + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(memberProtocol + " --> Weight: " + this.memberWeight); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/Property.java b/src/main/java/it/grid/storm/namespace/model/Property.java index bf71db1a..75ef4089 100644 --- a/src/main/java/it/grid/storm/namespace/model/Property.java +++ b/src/main/java/it/grid/storm/namespace/model/Property.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -10,218 +9,198 @@ import it.grid.storm.namespace.PropertyInterface; import it.grid.storm.srm.types.InvalidTSizeAttributesException; import it.grid.storm.srm.types.TSizeInBytes; - import org.slf4j.Logger; public class Property implements PropertyInterface { - private Logger log = NamespaceDirector.getLogger(); - private TSizeInBytes totalOnlineSize = TSizeInBytes.makeEmpty(); - private TSizeInBytes totalNearlineSize = TSizeInBytes.makeEmpty(); - private RetentionPolicy retentionPolicy = RetentionPolicy.UNKNOWN; - private ExpirationMode expirationMode = ExpirationMode.UNKNOWN; - private AccessLatency accessLatency = AccessLatency.UNKNOWN; - private boolean hasLimitedSize = false; + private Logger log = NamespaceDirector.getLogger(); + private TSizeInBytes totalOnlineSize = TSizeInBytes.makeEmpty(); + private TSizeInBytes totalNearlineSize = TSizeInBytes.makeEmpty(); + private RetentionPolicy retentionPolicy = RetentionPolicy.UNKNOWN; + private ExpirationMode expirationMode = ExpirationMode.UNKNOWN; + private AccessLatency accessLatency = AccessLatency.UNKNOWN; + private boolean hasLimitedSize = false; + + public static Property from(PropertyInterface other) { + + Property property = new Property(); + property.accessLatency = other.getAccessLatency(); + property.expirationMode = other.getExpirationMode(); + property.hasLimitedSize = other.hasLimitedSize(); + property.retentionPolicy = other.getRetentionPolicy(); + property.totalNearlineSize = other.getTotalNearlineSize(); + property.totalOnlineSize = other.getTotalOnlineSize(); + return property; + } + + public TSizeInBytes getTotalOnlineSize() { + + return totalOnlineSize; + } + + public TSizeInBytes getTotalNearlineSize() { + + return totalNearlineSize; + } - public static Property from(PropertyInterface other) { + public RetentionPolicy getRetentionPolicy() { - Property property = new Property(); - property.accessLatency = other.getAccessLatency(); - property.expirationMode = other.getExpirationMode(); - property.hasLimitedSize = other.hasLimitedSize(); - property.retentionPolicy = other.getRetentionPolicy(); - property.totalNearlineSize = other.getTotalNearlineSize(); - property.totalOnlineSize = other.getTotalOnlineSize(); - return property; - } + return retentionPolicy; + } - public TSizeInBytes getTotalOnlineSize() { + public ExpirationMode getExpirationMode() { - return totalOnlineSize; - } + return expirationMode; + } - public TSizeInBytes getTotalNearlineSize() { - - return totalNearlineSize; - } + public AccessLatency getAccessLatency() { - public RetentionPolicy getRetentionPolicy() { + return accessLatency; + } - return retentionPolicy; - } - - public ExpirationMode getExpirationMode() { + @Override + public boolean hasLimitedSize() { - return expirationMode; - } - - public AccessLatency getAccessLatency() { + return hasLimitedSize; + } - return accessLatency; - } - - @Override - public boolean hasLimitedSize() { - - return hasLimitedSize; - } - - public void setTotalOnlineSize(String unitType, long onlineSize) - throws NamespaceException { - - try { - this.totalOnlineSize = SizeUnitType.getInBytes(unitType, onlineSize); - } catch (InvalidTSizeAttributesException ex1) { - log.error("TotalOnlineSize parameter is wrong "); - throw new NamespaceException( - "'TotalOnlineSize' invalid argument in Namespace configuration.", ex1); - } - } - - public void setTotalNearlineSize(String unitType, long nearlineSize) - throws NamespaceException { + public void setTotalOnlineSize(String unitType, long onlineSize) throws NamespaceException { - try { - this.totalNearlineSize = SizeUnitType.getInBytes(unitType, nearlineSize); - } catch (InvalidTSizeAttributesException ex1) { - log.error("TotalOnlineSize parameter is wrong "); - throw new NamespaceException( - "'TotalOnlineSize' invalid argument in Namespace configuration.", ex1); - } - } - - public void setRetentionPolicy(String retentionPolicy) - throws NamespaceException { - - this.retentionPolicy = RetentionPolicy.getRetentionPolicy(retentionPolicy); - } - - public void setAccessLatency(String accessLatency) throws NamespaceException { - - this.accessLatency = AccessLatency.getAccessLatency(accessLatency); - } - - public void setExpirationMode(String expirationMode) - throws NamespaceException { - - this.expirationMode = ExpirationMode.getExpirationMode(expirationMode); - } - - public void setLimitedSize(boolean limitedSize) throws NamespaceException { - - this.hasLimitedSize = limitedSize; - } - - /****************************************** - * VERSION 1.4 * - *******************************************/ - - public boolean isOnlineSpaceLimited() { - - return hasLimitedSize; - } - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2007 - *

- * - *

- * Company: - *

- * - * @author not attributable - * @version 1.0 - */ - public static class SizeUnitType { - - private Logger log = NamespaceDirector.getLogger(); - - /** - * - **/ - - private String sizeTypeName; - private int ordinal; - private long size; - - public final static SizeUnitType BYTE = new SizeUnitType("Byte", 0, 1); - public final static SizeUnitType KB = new SizeUnitType("KB", 1, 1000); - public final static SizeUnitType MB = new SizeUnitType("MB", 2, 1000000); - public final static SizeUnitType GB = new SizeUnitType("GB", 3, 1000000000); - public final static SizeUnitType TB = new SizeUnitType("TB", 4, - 1000000000000L); - public final static SizeUnitType UNKNOWN = new SizeUnitType("UNKNOWN", -1, - -1); - - private SizeUnitType(String sizeTypeName, int ordinal, long size) { - - this.sizeTypeName = sizeTypeName; - this.size = size; - this.ordinal = ordinal; - } - - public String getTypeName() { - - return this.sizeTypeName; - } - - private static SizeUnitType makeUnitType(String unitType) { - - SizeUnitType result = SizeUnitType.UNKNOWN; - if (unitType.equals(SizeUnitType.BYTE.sizeTypeName)) { - result = SizeUnitType.BYTE; - } - if (unitType.equals(SizeUnitType.KB.sizeTypeName)) { - result = SizeUnitType.KB; - } - if (unitType.equals(SizeUnitType.MB.sizeTypeName)) { - result = SizeUnitType.MB; - } - if (unitType.equals(SizeUnitType.GB.sizeTypeName)) { - result = SizeUnitType.GB; - } - if (unitType.equals(SizeUnitType.TB.sizeTypeName)) { - result = SizeUnitType.TB; - } - return result; - } - - public static TSizeInBytes getInBytes(String unitType, long value) - throws InvalidTSizeAttributesException { - - TSizeInBytes result = TSizeInBytes.makeEmpty(); - SizeUnitType sizeUnitType = makeUnitType(unitType); - if (!(sizeUnitType.getTypeName().equals(SizeUnitType.UNKNOWN - .getTypeName()))) { - result = TSizeInBytes.make(value * sizeUnitType.size, SizeUnit.BYTES); - } - return result; - } - - public TSizeInBytes getInBytes() { - - TSizeInBytes result = TSizeInBytes.makeEmpty(); - try { - result = TSizeInBytes.make(this.size, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException ex) { - log.error("Size '" + this.size + "'are invalid. Use empty size: '" - + result + "'." + ex); - } - return result; - } - - } + try { + this.totalOnlineSize = SizeUnitType.getInBytes(unitType, onlineSize); + } catch (InvalidTSizeAttributesException ex1) { + log.error("TotalOnlineSize parameter is wrong "); + throw new NamespaceException( + "'TotalOnlineSize' invalid argument in Namespace configuration.", ex1); + } + } + public void setTotalNearlineSize(String unitType, long nearlineSize) throws NamespaceException { + + try { + this.totalNearlineSize = SizeUnitType.getInBytes(unitType, nearlineSize); + } catch (InvalidTSizeAttributesException ex1) { + log.error("TotalOnlineSize parameter is wrong "); + throw new NamespaceException( + "'TotalOnlineSize' invalid argument in Namespace configuration.", ex1); + } + } + + public void setRetentionPolicy(String retentionPolicy) throws NamespaceException { + + this.retentionPolicy = RetentionPolicy.getRetentionPolicy(retentionPolicy); + } + + public void setAccessLatency(String accessLatency) throws NamespaceException { + + this.accessLatency = AccessLatency.getAccessLatency(accessLatency); + } + + public void setExpirationMode(String expirationMode) throws NamespaceException { + + this.expirationMode = ExpirationMode.getExpirationMode(expirationMode); + } + + public void setLimitedSize(boolean limitedSize) throws NamespaceException { + + this.hasLimitedSize = limitedSize; + } + + /** + * **************************************** VERSION 1.4 * + * ***************************************** + */ + public boolean isOnlineSpaceLimited() { + + return hasLimitedSize; + } + + /** + * Title: + * + *

Description: + * + *

Copyright: Copyright (c) 2007 + * + *

Company: + * + * @author not attributable + * @version 1.0 + */ + public static class SizeUnitType { + + private Logger log = NamespaceDirector.getLogger(); + + /** + * + * + * + */ + private String sizeTypeName; + + private int ordinal; + private long size; + + public static final SizeUnitType BYTE = new SizeUnitType("Byte", 0, 1); + public static final SizeUnitType KB = new SizeUnitType("KB", 1, 1000); + public static final SizeUnitType MB = new SizeUnitType("MB", 2, 1000000); + public static final SizeUnitType GB = new SizeUnitType("GB", 3, 1000000000); + public static final SizeUnitType TB = new SizeUnitType("TB", 4, 1000000000000L); + public static final SizeUnitType UNKNOWN = new SizeUnitType("UNKNOWN", -1, -1); + + private SizeUnitType(String sizeTypeName, int ordinal, long size) { + + this.sizeTypeName = sizeTypeName; + this.size = size; + this.ordinal = ordinal; + } + + public String getTypeName() { + + return this.sizeTypeName; + } + + private static SizeUnitType makeUnitType(String unitType) { + + SizeUnitType result = SizeUnitType.UNKNOWN; + if (unitType.equals(SizeUnitType.BYTE.sizeTypeName)) { + result = SizeUnitType.BYTE; + } + if (unitType.equals(SizeUnitType.KB.sizeTypeName)) { + result = SizeUnitType.KB; + } + if (unitType.equals(SizeUnitType.MB.sizeTypeName)) { + result = SizeUnitType.MB; + } + if (unitType.equals(SizeUnitType.GB.sizeTypeName)) { + result = SizeUnitType.GB; + } + if (unitType.equals(SizeUnitType.TB.sizeTypeName)) { + result = SizeUnitType.TB; + } + return result; + } + + public static TSizeInBytes getInBytes(String unitType, long value) + throws InvalidTSizeAttributesException { + + TSizeInBytes result = TSizeInBytes.makeEmpty(); + SizeUnitType sizeUnitType = makeUnitType(unitType); + if (!(sizeUnitType.getTypeName().equals(SizeUnitType.UNKNOWN.getTypeName()))) { + result = TSizeInBytes.make(value * sizeUnitType.size, SizeUnit.BYTES); + } + return result; + } + + public TSizeInBytes getInBytes() { + + TSizeInBytes result = TSizeInBytes.makeEmpty(); + try { + result = TSizeInBytes.make(this.size, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException ex) { + log.error("Size '" + this.size + "'are invalid. Use empty size: '" + result + "'." + ex); + } + return result; + } + } } diff --git a/src/main/java/it/grid/storm/namespace/model/Protocol.java b/src/main/java/it/grid/storm/namespace/model/Protocol.java index 37075610..12b8a302 100644 --- a/src/main/java/it/grid/storm/namespace/model/Protocol.java +++ b/src/main/java/it/grid/storm/namespace/model/Protocol.java @@ -1,174 +1,151 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: + * * @author not attributable * @version 1.0 */ public class Protocol { - private int protocolIndex = -1; - - private String protocolServiceName; - private String protocolName; - private String schema; - private int defaultPort = -1; - - public final static Protocol FILE = new Protocol(1, "FILE", "file", -1); - public final static Protocol GSIFTP = new Protocol(2, "GSIFTP", "gsiftp", - 2811); - public final static Protocol RFIO = new Protocol(3, "RFIO", "rfio", 5001); - public final static Protocol SRM = new Protocol(4, "SRM", "srm", 8444); - public final static Protocol ROOT = new Protocol(5, "ROOT", "root", 1094); - // TODO HTTPS TURL - public final static Protocol HTTP = new Protocol(6, "HTTP", "http", 8080); - public final static Protocol HTTPS = new Protocol(7, "HTTPS", "https", 443); - - public final static Protocol XROOT = new Protocol(8, "XROOT", "xroot", 1094); - - public final static Protocol EMPTY = new Protocol(0, "EMPTY", "", -1); - public final static Protocol UNKNOWN = new Protocol(-1, "UNKNOWN", "", -1); - - /** - * Constructor - * - * @param protocolName - * String - * @param protocolSchema - * String - */ - private Protocol(int protocolIndex, String protocolName, - String protocolScheme, int defaultPort) { - - this.protocolIndex = protocolIndex; - this.protocolName = protocolName; - this.schema = protocolScheme; - this.defaultPort = defaultPort; - } - - // Return internal index for equals method and to use in a switch statement - public int getProtocolIndex() { - - return protocolIndex; - } - - // Only get method for Name - public String getProtocolName() { - - return protocolName; - } - - // Only get method for Schema - public String getSchema() { - - return schema; - } - - // Only get method for Schema - public String getProtocolPrefix() { - - return this.schema + "://"; - } - - public void setProtocolServiceName(String serviceName) { - - this.protocolServiceName = serviceName; - } - - public String getProtocolServiceName() { - - return this.protocolServiceName; - } - - public int getDefaultPort() { - - return this.defaultPort; - } - - public static Protocol getProtocol(String scheme) { - - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(FILE.getSchema().toLowerCase())) { - return FILE; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(GSIFTP.getSchema().toLowerCase())) { - return GSIFTP; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(RFIO.getSchema().toLowerCase())) { - return RFIO; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(ROOT.getSchema().toLowerCase())) { - return ROOT; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(SRM.getSchema().toLowerCase())) { - return SRM; - } - // TODO HTTPS TURL - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(HTTP.getSchema().toLowerCase())) { - return HTTP; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(HTTPS.getSchema().toLowerCase())) { - return HTTPS; - } - - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(EMPTY.getSchema().toLowerCase())) { - return EMPTY; - } - if (scheme.toLowerCase().replaceAll(" ", "") - .equals(XROOT.getSchema().toLowerCase())) { - return XROOT; - } - return UNKNOWN; - } - - public int hashCode() { - - return protocolIndex; - } - - public boolean equals(Object o) { - - boolean result = false; - if (o instanceof Protocol) { - Protocol other = (Protocol) o; - if (other.getProtocolIndex() == this.getProtocolIndex()) { - result = true; - } - } - return result; - } - - public String toString() { - - StringBuilder buf = new StringBuilder(); - buf.append(this.protocolName + " = " + this.getSchema() + "://"); - return buf.toString(); - } - + private int protocolIndex = -1; + + private String protocolServiceName; + private String protocolName; + private String schema; + private int defaultPort = -1; + + public static final Protocol FILE = new Protocol(1, "FILE", "file", -1); + public static final Protocol GSIFTP = new Protocol(2, "GSIFTP", "gsiftp", 2811); + public static final Protocol RFIO = new Protocol(3, "RFIO", "rfio", 5001); + public static final Protocol SRM = new Protocol(4, "SRM", "srm", 8444); + public static final Protocol ROOT = new Protocol(5, "ROOT", "root", 1094); + // TODO HTTPS TURL + public static final Protocol HTTP = new Protocol(6, "HTTP", "http", 8080); + public static final Protocol HTTPS = new Protocol(7, "HTTPS", "https", 443); + + public static final Protocol XROOT = new Protocol(8, "XROOT", "xroot", 1094); + + public static final Protocol EMPTY = new Protocol(0, "EMPTY", "", -1); + public static final Protocol UNKNOWN = new Protocol(-1, "UNKNOWN", "", -1); + + /** + * Constructor + * + * @param protocolName String + * @param protocolSchema String + */ + private Protocol(int protocolIndex, String protocolName, String protocolScheme, int defaultPort) { + + this.protocolIndex = protocolIndex; + this.protocolName = protocolName; + this.schema = protocolScheme; + this.defaultPort = defaultPort; + } + + // Return internal index for equals method and to use in a switch statement + public int getProtocolIndex() { + + return protocolIndex; + } + + // Only get method for Name + public String getProtocolName() { + + return protocolName; + } + + // Only get method for Schema + public String getSchema() { + + return schema; + } + + // Only get method for Schema + public String getProtocolPrefix() { + + return this.schema + "://"; + } + + public void setProtocolServiceName(String serviceName) { + + this.protocolServiceName = serviceName; + } + + public String getProtocolServiceName() { + + return this.protocolServiceName; + } + + public int getDefaultPort() { + + return this.defaultPort; + } + + public static Protocol getProtocol(String scheme) { + + if (scheme.toLowerCase().replaceAll(" ", "").equals(FILE.getSchema().toLowerCase())) { + return FILE; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(GSIFTP.getSchema().toLowerCase())) { + return GSIFTP; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(RFIO.getSchema().toLowerCase())) { + return RFIO; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(ROOT.getSchema().toLowerCase())) { + return ROOT; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(SRM.getSchema().toLowerCase())) { + return SRM; + } + // TODO HTTPS TURL + if (scheme.toLowerCase().replaceAll(" ", "").equals(HTTP.getSchema().toLowerCase())) { + return HTTP; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(HTTPS.getSchema().toLowerCase())) { + return HTTPS; + } + + if (scheme.toLowerCase().replaceAll(" ", "").equals(EMPTY.getSchema().toLowerCase())) { + return EMPTY; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(XROOT.getSchema().toLowerCase())) { + return XROOT; + } + return UNKNOWN; + } + + public int hashCode() { + + return protocolIndex; + } + + public boolean equals(Object o) { + + boolean result = false; + if (o instanceof Protocol) { + Protocol other = (Protocol) o; + if (other.getProtocolIndex() == this.getProtocolIndex()) { + result = true; + } + } + return result; + } + + public String toString() { + + StringBuilder buf = new StringBuilder(); + buf.append(this.protocolName + " = " + this.getSchema() + "://"); + return buf.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/ProtocolPool.java b/src/main/java/it/grid/storm/namespace/model/ProtocolPool.java index 17f97bfe..c16b6e9f 100644 --- a/src/main/java/it/grid/storm/namespace/model/ProtocolPool.java +++ b/src/main/java/it/grid/storm/namespace/model/ProtocolPool.java @@ -1,66 +1,61 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; -import java.util.List; - import com.google.common.collect.Lists; - import it.grid.storm.balancer.BalancingStrategyType; +import java.util.List; public class ProtocolPool { - private final Protocol poolType; - private final BalancingStrategyType balanceStrategy; - private final List poolMembers = Lists.newArrayList(); - - public ProtocolPool(Protocol protocol, BalancingStrategyType strategy, - List members) { + private final Protocol poolType; + private final BalancingStrategyType balanceStrategy; + private final List poolMembers = Lists.newArrayList(); - this.poolType = protocol; - this.balanceStrategy = strategy; - this.poolMembers.addAll(members); - } + public ProtocolPool(Protocol protocol, BalancingStrategyType strategy, List members) { - public ProtocolPool(BalancingStrategyType strategy, List members) { + this.poolType = protocol; + this.balanceStrategy = strategy; + this.poolMembers.addAll(members); + } - this(members.get(0).getMemberProtocol().getProtocol(), strategy, members); - } + public ProtocolPool(BalancingStrategyType strategy, List members) { - public BalancingStrategyType getBalanceStrategy() { + this(members.get(0).getMemberProtocol().getProtocol(), strategy, members); + } - return this.balanceStrategy; - } + public BalancingStrategyType getBalanceStrategy() { - public Protocol getPoolType() { + return this.balanceStrategy; + } - return this.poolType; - } + public Protocol getPoolType() { - public List getPoolMembers() { + return this.poolType; + } - return this.poolMembers; - } + public List getPoolMembers() { - public void addPoolMember(PoolMember member) { + return this.poolMembers; + } - poolMembers.add(member); - } + public void addPoolMember(PoolMember member) { - @Override - public String toString() { + poolMembers.add(member); + } - StringBuilder builder = new StringBuilder(); - builder.append("ProtocolPool [poolType="); - builder.append(poolType); - builder.append(", balanceStrategy="); - builder.append(balanceStrategy); - builder.append(", poolMembers="); - builder.append(poolMembers); - builder.append("]"); - return builder.toString(); - } + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("ProtocolPool [poolType="); + builder.append(poolType); + builder.append(", balanceStrategy="); + builder.append(balanceStrategy); + builder.append(", poolMembers="); + builder.append(poolMembers); + builder.append("]"); + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/Quota.java b/src/main/java/it/grid/storm/namespace/model/Quota.java index 6bb22d29..47018e76 100644 --- a/src/main/java/it/grid/storm/namespace/model/Quota.java +++ b/src/main/java/it/grid/storm/namespace/model/Quota.java @@ -1,96 +1,93 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import it.grid.storm.namespace.NamespaceDirector; - import org.slf4j.Logger; public class Quota { - private final Logger log = NamespaceDirector.getLogger(); - - private boolean defined = false; - private boolean enabled = false; - private String device = null; - private QuotaType quotaType = null; + private final Logger log = NamespaceDirector.getLogger(); - public Quota() { + private boolean defined = false; + private boolean enabled = false; + private String device = null; + private QuotaType quotaType = null; - super(); - } + public Quota() { - public Quota(boolean enabled, String device, QuotaType quotaType) { + super(); + } - defined = true; - this.enabled = enabled; - this.device = device; - this.quotaType = quotaType; - } + public Quota(boolean enabled, String device, QuotaType quotaType) { - /** - * Read only attribute - * - * @return boolean - */ - public boolean getDefined() { + defined = true; + this.enabled = enabled; + this.device = device; + this.quotaType = quotaType; + } - return defined; - } + /** + * Read only attribute + * + * @return boolean + */ + public boolean getDefined() { - public boolean getEnabled() { + return defined; + } - return enabled; - } + public boolean getEnabled() { - public void setEnabled(boolean enabled) { + return enabled; + } - this.enabled = enabled; - } + public void setEnabled(boolean enabled) { - public String getDevice() { + this.enabled = enabled; + } - return device; - } + public String getDevice() { - public void setDevice(String device) { + return device; + } - this.device = device; - } + public void setDevice(String device) { - public QuotaType getQuotaType() { + this.device = device; + } - return quotaType; - } + public QuotaType getQuotaType() { - public void setQuotaType(QuotaType quotaType) { + return quotaType; + } - this.quotaType = quotaType; - } + public void setQuotaType(QuotaType quotaType) { - /** - * Return the value of UserName or GroupName or FileSetName. The meaning of - * the value depends on QuotaType. - * - * @return the quotaElementName - */ - public String getQuotaElementName() { + this.quotaType = quotaType; + } - return quotaType.getValue(); - } + /** + * Return the value of UserName or GroupName or FileSetName. The meaning of the value depends on + * QuotaType. + * + * @return the quotaElementName + */ + public String getQuotaElementName() { - @Override - public String toString() { + return quotaType.getValue(); + } - StringBuilder result = new StringBuilder(); - result.append("Quota : [ Defined:'" + defined + "' "); - result.append("Enabled:'" + enabled + "' "); - result.append("device:'" + device + "', "); - result.append("quotaType:'" + quotaType + " "); - result.append("]"); - return result.toString(); - } + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append("Quota : [ Defined:'" + defined + "' "); + result.append("Enabled:'" + enabled + "' "); + result.append("device:'" + device + "', "); + result.append("quotaType:'" + quotaType + " "); + result.append("]"); + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/QuotaInfo.java b/src/main/java/it/grid/storm/namespace/model/QuotaInfo.java index 15513484..22903c27 100644 --- a/src/main/java/it/grid/storm/namespace/model/QuotaInfo.java +++ b/src/main/java/it/grid/storm/namespace/model/QuotaInfo.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -8,81 +7,58 @@ import it.grid.storm.common.types.TimeUnit; /** - * - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2008 - *

- * - *

- * Company: - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2008 + * + *

Company: + * * @author not attributable * @version 1.0 - * - * - * typedef struct gpfs_quotaInfo { gpfs_off64_t blockUsage; /* current - * block count * gpfs_off64_t blockHardLimit; * absolute limit on disk - * blks alloc * gpfs_off64_t blockSoftLimit; /* preferred limit on disk - * blks * gpfs_off64_t blockInDoubt; /* distributed shares + "lost" - * usage for blks * int inodeUsage; /* current # allocated inodes * int - * inodeHardLimit; /* absolute limit on allocated inodes * int - * inodeSoftLimit; /* preferred inode limit * int inodeInDoubt; /* - * distributed shares + "lost" usage for inodes * gpfs_uid_t quoId; /* - * uid, gid or fileset id int entryType; /* entry type, not used * - * unsigned int blockGraceTime; /* time limit for excessive disk use * - * unsigned int inodeGraceTime; /* time limit for excessive inode use * - * } gpfs_quotaInfo_t; - * - * Block Limits | File Limits Filesystem type KB quota limit in_doubt - * grace | files quota limit in_doubt grace Remarks gpfs_storm FILESET - * 110010268672 126953000960 126953125888 492384 none | 1796915 0 0 197 - * none - * - * blockUsage Current block count in 1 KB units. blockHardLimit - * Absolute limit on disk block allocation. blockSoftLimit Preferred - * limit on disk block allocation. blockInDoubt Distributed shares and - * block usage that have not been not accounted for. inodeUsage Current - * number of allocated inodes. inodeHardLimit Absolute limit on - * allocated inodes. inodeSoftLimit Preferred inode limit. inodeInDoubt - * Distributed inode share and inode usage that have not been accounted - * for. quoId user ID, group ID, or fileset ID. entryType Not used - * blockGraceTime Time limit (in seconds since the Epoch) for excessive - * disk use. inodeGraceTime Time limit (in seconds since the Epoch) for - * excessive inode use. - * - **/ - + *

typedef struct gpfs_quotaInfo { gpfs_off64_t blockUsage; /* current block count * + * gpfs_off64_t blockHardLimit; * absolute limit on disk blks alloc * gpfs_off64_t + * blockSoftLimit; /* preferred limit on disk blks * gpfs_off64_t blockInDoubt; /* distributed + * shares + "lost" usage for blks * int inodeUsage; /* current # allocated inodes * int + * inodeHardLimit; /* absolute limit on allocated inodes * int inodeSoftLimit; /* preferred + * inode limit * int inodeInDoubt; /* distributed shares + "lost" usage for inodes * gpfs_uid_t + * quoId; /* uid, gid or fileset id int entryType; /* entry type, not used * unsigned int + * blockGraceTime; /* time limit for excessive disk use * unsigned int inodeGraceTime; /* time + * limit for excessive inode use * } gpfs_quotaInfo_t; + *

Block Limits | File Limits Filesystem type KB quota limit in_doubt grace | files quota + * limit in_doubt grace Remarks gpfs_storm FILESET 110010268672 126953000960 126953125888 492384 + * none | 1796915 0 0 197 none + *

blockUsage Current block count in 1 KB units. blockHardLimit Absolute limit on disk block + * allocation. blockSoftLimit Preferred limit on disk block allocation. blockInDoubt Distributed + * shares and block usage that have not been not accounted for. inodeUsage Current number of + * allocated inodes. inodeHardLimit Absolute limit on allocated inodes. inodeSoftLimit Preferred + * inode limit. inodeInDoubt Distributed inode share and inode usage that have not been + * accounted for. quoId user ID, group ID, or fileset ID. entryType Not used blockGraceTime Time + * limit (in seconds since the Epoch) for excessive disk use. inodeGraceTime Time limit (in + * seconds since the Epoch) for excessive inode use. + */ public class QuotaInfo { - private String filesystemName = null; - private QuotaType quotaType = null; - private long blockUsage = -1L; - private long blockHardLimit = -1L; - private long blockSoftLimit = -1L; - private long blockInDoubt = -1L; - private long blockGraceTime = -1L; - private long inodeUsage = -1L; - private long inodeHardLimit = -1L; - private long inodeSoftLimit = -1L; - private long inodeInDoubt = -1L; - private long inodeGraceTime = -1L; - private String remarks = null; - private SizeUnit sizeUnit = SizeUnit.KILOBYTES; // Default values for Blocks - private TimeUnit timeUnit = TimeUnit.HOURS; // Default values is one week = 7 - // days = 168 hours - - public QuotaInfo() { + private String filesystemName = null; + private QuotaType quotaType = null; + private long blockUsage = -1L; + private long blockHardLimit = -1L; + private long blockSoftLimit = -1L; + private long blockInDoubt = -1L; + private long blockGraceTime = -1L; + private long inodeUsage = -1L; + private long inodeHardLimit = -1L; + private long inodeSoftLimit = -1L; + private long inodeInDoubt = -1L; + private long inodeGraceTime = -1L; + private String remarks = null; + private SizeUnit sizeUnit = SizeUnit.KILOBYTES; // Default values for Blocks + private TimeUnit timeUnit = TimeUnit.HOURS; // Default values is one week = 7 + // days = 168 hours - super(); - } + public QuotaInfo() { + super(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/QuotaType.java b/src/main/java/it/grid/storm/namespace/model/QuotaType.java index dd2a702d..dba37d05 100644 --- a/src/main/java/it/grid/storm/namespace/model/QuotaType.java +++ b/src/main/java/it/grid/storm/namespace/model/QuotaType.java @@ -1,142 +1,129 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class QuotaType { - private final int ordinalNumber; - private final String quotaType; - private final String stringSchema; - - private String value; - - public final static QuotaType FILESET = new QuotaType("FILESET", - "filesetName", 0); - public final static QuotaType USR = new QuotaType("USR", "userName", 1); - public final static QuotaType GRP = new QuotaType("GRP", "groupName", 2); - public final static QuotaType UNKNOWN = new QuotaType("UNKNOWN", - "Quota Type UNKNOWN!", -1); - - private QuotaType(String quotaType, String stringSchema, int ord) { - - this.quotaType = quotaType; - this.stringSchema = stringSchema; - ordinalNumber = ord; - } - - public static QuotaType buildQuotaType(QuotaType quotaType) { - - String schema = quotaType.stringSchema; - int ord = quotaType.getOrdinalNumber(); - String type = quotaType.quotaType; - return new QuotaType(type, schema, ord); - } - - /*************************************** - * WRITE METHODS - */ - public void setValue(String value) { - - this.value = value; - } - - /*************************************** - * READ METHODS - */ - - // Only get method for Name - public String getQuotaType() { - - return quotaType; - } - - // Only get method for Schema - public static String string(int ord) { - - return QuotaType.getQuotaType(ord).toString(); - } - - // Only get method for Ordinal Number - public int getOrdinalNumber() { - - return ordinalNumber; - } - - // Only get method for value (that is filesetName or userName or groupName) - public String getValue() { - - return value; - } - - // Only get method for Schema - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("QuotaType:").append(quotaType).append("=<").append(value) - .append(">"); - return sb.toString(); - } - - /** - * - * @param quotaType - * String - * @return QuotaType - */ - public static QuotaType getQuotaType(String quotaType) { - - if (quotaType.equals(FILESET.toString())) { - return QuotaType.FILESET; - } - if (quotaType.equals(USR.toString())) { - return QuotaType.USR; - } - if (quotaType.equals(GRP.toString())) { - return QuotaType.GRP; - } - return QuotaType.UNKNOWN; - } - - /** - * - * @param quotaType - * String - * @return QuotaType - */ - public static QuotaType getQuotaType(int quotaOrd) { - - if (quotaOrd == 0) { - return QuotaType.FILESET; - } - if (quotaOrd == 1) { - return QuotaType.USR; - } - if (quotaOrd == 2) { - return QuotaType.GRP; - } - return QuotaType.UNKNOWN; - } - - @Override - public int hashCode() { - - return ordinalNumber; - } - - @Override - public boolean equals(Object other) { - - boolean result = false; - if (other instanceof QuotaType) { - QuotaType qt = (QuotaType) other; - if (qt.ordinalNumber == ordinalNumber) { - result = true; - } - } - return result; - } + private final int ordinalNumber; + private final String quotaType; + private final String stringSchema; + + private String value; + public static final QuotaType FILESET = new QuotaType("FILESET", "filesetName", 0); + public static final QuotaType USR = new QuotaType("USR", "userName", 1); + public static final QuotaType GRP = new QuotaType("GRP", "groupName", 2); + public static final QuotaType UNKNOWN = new QuotaType("UNKNOWN", "Quota Type UNKNOWN!", -1); + + private QuotaType(String quotaType, String stringSchema, int ord) { + + this.quotaType = quotaType; + this.stringSchema = stringSchema; + ordinalNumber = ord; + } + + public static QuotaType buildQuotaType(QuotaType quotaType) { + + String schema = quotaType.stringSchema; + int ord = quotaType.getOrdinalNumber(); + String type = quotaType.quotaType; + return new QuotaType(type, schema, ord); + } + + /** ************************************* WRITE METHODS */ + public void setValue(String value) { + + this.value = value; + } + + /** ************************************* READ METHODS */ + + // Only get method for Name + public String getQuotaType() { + + return quotaType; + } + + // Only get method for Schema + public static String string(int ord) { + + return QuotaType.getQuotaType(ord).toString(); + } + + // Only get method for Ordinal Number + public int getOrdinalNumber() { + + return ordinalNumber; + } + + // Only get method for value (that is filesetName or userName or groupName) + public String getValue() { + + return value; + } + + // Only get method for Schema + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("QuotaType:").append(quotaType).append("=<").append(value).append(">"); + return sb.toString(); + } + + /** + * @param quotaType String + * @return QuotaType + */ + public static QuotaType getQuotaType(String quotaType) { + + if (quotaType.equals(FILESET.toString())) { + return QuotaType.FILESET; + } + if (quotaType.equals(USR.toString())) { + return QuotaType.USR; + } + if (quotaType.equals(GRP.toString())) { + return QuotaType.GRP; + } + return QuotaType.UNKNOWN; + } + + /** + * @param quotaType String + * @return QuotaType + */ + public static QuotaType getQuotaType(int quotaOrd) { + + if (quotaOrd == 0) { + return QuotaType.FILESET; + } + if (quotaOrd == 1) { + return QuotaType.USR; + } + if (quotaOrd == 2) { + return QuotaType.GRP; + } + return QuotaType.UNKNOWN; + } + + @Override + public int hashCode() { + + return ordinalNumber; + } + + @Override + public boolean equals(Object other) { + + boolean result = false; + if (other instanceof QuotaType) { + QuotaType qt = (QuotaType) other; + if (qt.ordinalNumber == ordinalNumber) { + result = true; + } + } + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/RetentionPolicy.java b/src/main/java/it/grid/storm/namespace/model/RetentionPolicy.java index b2ad381c..6dd157ea 100644 --- a/src/main/java/it/grid/storm/namespace/model/RetentionPolicy.java +++ b/src/main/java/it/grid/storm/namespace/model/RetentionPolicy.java @@ -1,55 +1,49 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class RetentionPolicy { - /** - * - **/ - private String retentionPolicy; - private String stringSchema; + /** + * + * + * + */ + private String retentionPolicy; - public final static RetentionPolicy CUSTODIAL = new RetentionPolicy( - "CUSTODIAL", "custodial"); - public final static RetentionPolicy OUTPUT = new RetentionPolicy("OUTPUT", - "output"); - public final static RetentionPolicy REPLICA = new RetentionPolicy("REPLICA", - "replica"); - public final static RetentionPolicy UNKNOWN = new RetentionPolicy("UNKNOWN", - "Retention policy UNKNOWN!"); + private String stringSchema; - private RetentionPolicy(String retentionPolicy, String stringSchema) { + public static final RetentionPolicy CUSTODIAL = new RetentionPolicy("CUSTODIAL", "custodial"); + public static final RetentionPolicy OUTPUT = new RetentionPolicy("OUTPUT", "output"); + public static final RetentionPolicy REPLICA = new RetentionPolicy("REPLICA", "replica"); + public static final RetentionPolicy UNKNOWN = + new RetentionPolicy("UNKNOWN", "Retention policy UNKNOWN!"); - this.retentionPolicy = retentionPolicy; - this.stringSchema = stringSchema; - } + private RetentionPolicy(String retentionPolicy, String stringSchema) { - // Only get method for Name - public String getRetentionPolicyName() { + this.retentionPolicy = retentionPolicy; + this.stringSchema = stringSchema; + } - return retentionPolicy; - } + // Only get method for Name + public String getRetentionPolicyName() { - // Only get method for Schema - public String toString() { + return retentionPolicy; + } - return this.stringSchema; - } + // Only get method for Schema + public String toString() { - public static RetentionPolicy getRetentionPolicy(String retentionPolicy) { + return this.stringSchema; + } - if (retentionPolicy.equals(RetentionPolicy.CUSTODIAL.toString())) - return RetentionPolicy.CUSTODIAL; - if (retentionPolicy.equals(RetentionPolicy.OUTPUT.toString())) - return RetentionPolicy.OUTPUT; - if (retentionPolicy.equals(RetentionPolicy.REPLICA.toString())) - return RetentionPolicy.REPLICA; - return RetentionPolicy.UNKNOWN; - } + public static RetentionPolicy getRetentionPolicy(String retentionPolicy) { + if (retentionPolicy.equals(RetentionPolicy.CUSTODIAL.toString())) + return RetentionPolicy.CUSTODIAL; + if (retentionPolicy.equals(RetentionPolicy.OUTPUT.toString())) return RetentionPolicy.OUTPUT; + if (retentionPolicy.equals(RetentionPolicy.REPLICA.toString())) return RetentionPolicy.REPLICA; + return RetentionPolicy.UNKNOWN; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/SAAuthzType.java b/src/main/java/it/grid/storm/namespace/model/SAAuthzType.java index 8e9051a0..c48a4cda 100644 --- a/src/main/java/it/grid/storm/namespace/model/SAAuthzType.java +++ b/src/main/java/it/grid/storm/namespace/model/SAAuthzType.java @@ -1,69 +1,68 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public class SAAuthzType { - private int authzTypeIndex = -1; - private String authzTypeName = "UNKNOWN"; + private int authzTypeIndex = -1; + private String authzTypeName = "UNKNOWN"; - public final static SAAuthzType FIXED = new SAAuthzType(1, "FIXED"); - public final static SAAuthzType AUTHZDB = new SAAuthzType(2, "AUTHZDB"); - public final static SAAuthzType UNKNOWN = new SAAuthzType(-1, "UNKNOWN"); + public static final SAAuthzType FIXED = new SAAuthzType(1, "FIXED"); + public static final SAAuthzType AUTHZDB = new SAAuthzType(2, "AUTHZDB"); + public static final SAAuthzType UNKNOWN = new SAAuthzType(-1, "UNKNOWN"); - private SAAuthzType(int authzTypeIndex, String authzTypeName) { + private SAAuthzType(int authzTypeIndex, String authzTypeName) { - this.authzTypeIndex = authzTypeIndex; - this.authzTypeName = authzTypeName; - } + this.authzTypeIndex = authzTypeIndex; + this.authzTypeName = authzTypeName; + } - public String getSAAuthzTypeName() { + public String getSAAuthzTypeName() { - return this.authzTypeName; - } + return this.authzTypeName; + } - public static SAAuthzType getSAType(String saType) { + public static SAAuthzType getSAType(String saType) { - if (saType.toLowerCase().replaceAll(" ", "") - .equals(FIXED.getSAAuthzTypeName().toLowerCase())) { - return FIXED; - } - if (saType.toLowerCase().replaceAll(" ", "") - .equals(AUTHZDB.getSAAuthzTypeName().toLowerCase())) { - return AUTHZDB; - } - return UNKNOWN; - } + if (saType.toLowerCase().replaceAll(" ", "").equals(FIXED.getSAAuthzTypeName().toLowerCase())) { + return FIXED; + } + if (saType + .toLowerCase() + .replaceAll(" ", "") + .equals(AUTHZDB.getSAAuthzTypeName().toLowerCase())) { + return AUTHZDB; + } + return UNKNOWN; + } - public int getSAAuthzTypeIndex() { + public int getSAAuthzTypeIndex() { - return authzTypeIndex; - } + return authzTypeIndex; + } - public int hashCode() { + public int hashCode() { - return authzTypeIndex; - } + return authzTypeIndex; + } - public boolean equals(Object o) { + public boolean equals(Object o) { - boolean result = false; - if (o instanceof SAAuthzType) { - SAAuthzType other = (SAAuthzType) o; - if (other.getSAAuthzTypeIndex() == this.authzTypeIndex) { - result = true; - } - } - return result; - } + boolean result = false; + if (o instanceof SAAuthzType) { + SAAuthzType other = (SAAuthzType) o; + if (other.getSAAuthzTypeIndex() == this.authzTypeIndex) { + result = true; + } + } + return result; + } - public String toString() { - - StringBuilder buf = new StringBuilder(); - buf.append(this.getSAAuthzTypeName()); - return buf.toString(); - } + public String toString() { + StringBuilder buf = new StringBuilder(); + buf.append(this.getSAAuthzTypeName()); + return buf.toString(); + } } diff --git a/src/main/java/it/grid/storm/namespace/model/SAInfo.java b/src/main/java/it/grid/storm/namespace/model/SAInfo.java index 33864d73..3c1617b2 100644 --- a/src/main/java/it/grid/storm/namespace/model/SAInfo.java +++ b/src/main/java/it/grid/storm/namespace/model/SAInfo.java @@ -1,18 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; +import com.google.common.collect.Lists; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.remote.Constants.HttpPerms; - import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import com.google.common.collect.Lists; - public class SAInfo { private String name; @@ -29,9 +26,7 @@ public class SAInfo { private List approachableRules; // Must have no-argument constructor - public SAInfo() { - - } + public SAInfo() {} public String getName() { @@ -159,9 +154,11 @@ public static SAInfo buildFromVFS(VirtualFS vfs) throws NamespaceException { sa.setName(vfs.getAliasName()); sa.setToken(vfs.getSpaceTokenDescription()); List vos = Lists.newArrayList(); - vfs.getApproachableRules().forEach(ar -> { - vos.add(ar.getSubjectRules().getVONameMatchingRule().getVOName()); - }); + vfs.getApproachableRules() + .forEach( + ar -> { + vos.add(ar.getSubjectRules().getVONameMatchingRule().getVOName()); + }); sa.setVos(vos); sa.setRoot(vfs.getRootPath()); sa.setStfnRoot(new ArrayList()); @@ -195,11 +192,11 @@ public static SAInfo buildFromVFS(VirtualFS vfs) throws NamespaceException { } if (!rule.getSubjectRules().getDNMatchingRule().isMatchAll()) { sa.getApproachableRules() - .add(rule.getSubjectRules().getDNMatchingRule().toShortSlashSeparatedString()); + .add(rule.getSubjectRules().getDNMatchingRule().toShortSlashSeparatedString()); } if (!rule.getSubjectRules().getVONameMatchingRule().isMatchAll()) { sa.getApproachableRules() - .add("vo:" + rule.getSubjectRules().getVONameMatchingRule().getVOName()); + .add("vo:" + rule.getSubjectRules().getVONameMatchingRule().getVOName()); } } if (sa.getApproachableRules().size() == 0) { @@ -239,5 +236,4 @@ public String toString() { builder.append("]"); return builder.toString(); } - } diff --git a/src/main/java/it/grid/storm/namespace/model/SAInfoV13.java b/src/main/java/it/grid/storm/namespace/model/SAInfoV13.java index af8687bc..e6eb9b57 100644 --- a/src/main/java/it/grid/storm/namespace/model/SAInfoV13.java +++ b/src/main/java/it/grid/storm/namespace/model/SAInfoV13.java @@ -1,210 +1,203 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.remote.Constants.HttpPerms; - import java.util.ArrayList; import java.util.Iterator; import java.util.List; public class SAInfoV13 { - private String name; - private String token; - private String voname; - private String root; - private String storageclass; - private List stfnRoot; - private String retentionPolicy; - private String accessLatency; - private List protocols; - private HttpPerms anonymous; - private long availableNearlineSpace; - private List approachableRules; - - // Must have no-argument constructor - public SAInfoV13() { + private String name; + private String token; + private String voname; + private String root; + private String storageclass; + private List stfnRoot; + private String retentionPolicy; + private String accessLatency; + private List protocols; + private HttpPerms anonymous; + private long availableNearlineSpace; + private List approachableRules; - } + // Must have no-argument constructor + public SAInfoV13() {} - public String getName() { + public String getName() { - return name; - } + return name; + } - public void setName(String name) { + public void setName(String name) { - this.name = name; - } + this.name = name; + } - public String getToken() { + public String getToken() { - return token; - } + return token; + } - public void setToken(String token) { + public void setToken(String token) { - this.token = token; - } + this.token = token; + } - public String getVoname() { + public String getVoname() { - return voname; - } + return voname; + } - public void setVoname(String voname) { + public void setVoname(String voname) { - this.voname = voname; - } + this.voname = voname; + } - public String getRoot() { + public String getRoot() { - return root; - } + return root; + } - public void setRoot(String root) { + public void setRoot(String root) { - this.root = root; - } + this.root = root; + } - public String getStorageclass() { + public String getStorageclass() { - return storageclass; - } + return storageclass; + } - public void setStorageclass(String storageclass) { + public void setStorageclass(String storageclass) { - this.storageclass = storageclass; - } + this.storageclass = storageclass; + } - public List getStfnRoot() { + public List getStfnRoot() { - return stfnRoot; - } + return stfnRoot; + } - public void setStfnRoot(List stfnRoot) { + public void setStfnRoot(List stfnRoot) { - this.stfnRoot = stfnRoot; - } + this.stfnRoot = stfnRoot; + } - public String getRetentionPolicy() { + public String getRetentionPolicy() { - return retentionPolicy; - } + return retentionPolicy; + } - public void setRetentionPolicy(String retentionPolicy) { + public void setRetentionPolicy(String retentionPolicy) { - this.retentionPolicy = retentionPolicy; - } + this.retentionPolicy = retentionPolicy; + } - public String getAccessLatency() { + public String getAccessLatency() { - return accessLatency; - } + return accessLatency; + } - public void setAccessLatency(String accessLatency) { + public void setAccessLatency(String accessLatency) { - this.accessLatency = accessLatency; - } + this.accessLatency = accessLatency; + } - public List getProtocols() { + public List getProtocols() { - return protocols; - } + return protocols; + } - public void setProtocols(List protocols) { + public void setProtocols(List protocols) { - this.protocols = protocols; - } + this.protocols = protocols; + } - public HttpPerms getAnonymous() { + public HttpPerms getAnonymous() { - return anonymous; - } + return anonymous; + } - public void setAnonymous(HttpPerms anonymous) { + public void setAnonymous(HttpPerms anonymous) { - this.anonymous = anonymous; - } + this.anonymous = anonymous; + } - public long getAvailableNearlineSpace() { + public long getAvailableNearlineSpace() { - return availableNearlineSpace; - } + return availableNearlineSpace; + } - public void setAvailableNearlineSpace(long availableNearlineSpace) { + public void setAvailableNearlineSpace(long availableNearlineSpace) { - this.availableNearlineSpace = availableNearlineSpace; - } + this.availableNearlineSpace = availableNearlineSpace; + } - public List getApproachableRules() { + public List getApproachableRules() { - return approachableRules; - } + return approachableRules; + } - public void setApproachableRules(List approachableRules) { + public void setApproachableRules(List approachableRules) { - this.approachableRules = approachableRules; - } + this.approachableRules = approachableRules; + } - public static SAInfoV13 buildFromVFS(VirtualFS vfs) throws NamespaceException { + public static SAInfoV13 buildFromVFS(VirtualFS vfs) throws NamespaceException { - SAInfoV13 sa = new SAInfoV13(); + SAInfoV13 sa = new SAInfoV13(); - sa.setName(vfs.getAliasName()); - sa.setToken(vfs.getSpaceTokenDescription()); - sa.setVoname(vfs.getApproachableRules().get(0).getSubjectRules() - .getVONameMatchingRule().getVOName()); - sa.setRoot(vfs.getRootPath()); - sa.setStfnRoot(new ArrayList()); - for (MappingRule rule : vfs.getMappingRules()) { - sa.getStfnRoot().add(rule.getStFNRoot()); - } - sa.setProtocols(new ArrayList()); - Iterator protocolsIterator = vfs.getCapabilities() - .getAllManagedProtocols().iterator(); - while (protocolsIterator.hasNext()) { - sa.getProtocols().add(protocolsIterator.next().getSchema()); - } - if (vfs.isHttpWorldReadable()) { - if (vfs.isApproachableByAnonymous()) { - sa.setAnonymous(HttpPerms.READWRITE); - } else { - sa.setAnonymous(HttpPerms.READ); - } - } else { - sa.setAnonymous(HttpPerms.NOREAD); - } - sa.setStorageclass(vfs.getStorageClassType().getStorageClassTypeString()); - sa.setRetentionPolicy(vfs.getProperties().getRetentionPolicy() - .getRetentionPolicyName()); - sa.setAccessLatency(vfs.getProperties().getAccessLatency() - .getAccessLatencyName()); - sa.setAvailableNearlineSpace(vfs.getAvailableNearlineSpace().value()); - sa.setApproachableRules(new ArrayList()); - for (ApproachableRule rule : vfs.getApproachableRules()) { - if (rule.getSubjectRules().getDNMatchingRule().isMatchAll() - && rule.getSubjectRules().getVONameMatchingRule().isMatchAll()) { - continue; - } - if (!rule.getSubjectRules().getDNMatchingRule().isMatchAll()) { - sa.getApproachableRules().add( - rule.getSubjectRules().getDNMatchingRule() - .toShortSlashSeparatedString()); - } - if (!rule.getSubjectRules().getVONameMatchingRule().isMatchAll()) { - sa.getApproachableRules().add( - "vo:" + rule.getSubjectRules().getVONameMatchingRule().getVOName()); - } - } - if (sa.getApproachableRules().size() == 0) { - sa.getApproachableRules().add("'ALL'"); - } - - return sa; - } -} \ No newline at end of file + sa.setName(vfs.getAliasName()); + sa.setToken(vfs.getSpaceTokenDescription()); + sa.setVoname( + vfs.getApproachableRules().get(0).getSubjectRules().getVONameMatchingRule().getVOName()); + sa.setRoot(vfs.getRootPath()); + sa.setStfnRoot(new ArrayList()); + for (MappingRule rule : vfs.getMappingRules()) { + sa.getStfnRoot().add(rule.getStFNRoot()); + } + sa.setProtocols(new ArrayList()); + Iterator protocolsIterator = + vfs.getCapabilities().getAllManagedProtocols().iterator(); + while (protocolsIterator.hasNext()) { + sa.getProtocols().add(protocolsIterator.next().getSchema()); + } + if (vfs.isHttpWorldReadable()) { + if (vfs.isApproachableByAnonymous()) { + sa.setAnonymous(HttpPerms.READWRITE); + } else { + sa.setAnonymous(HttpPerms.READ); + } + } else { + sa.setAnonymous(HttpPerms.NOREAD); + } + sa.setStorageclass(vfs.getStorageClassType().getStorageClassTypeString()); + sa.setRetentionPolicy(vfs.getProperties().getRetentionPolicy().getRetentionPolicyName()); + sa.setAccessLatency(vfs.getProperties().getAccessLatency().getAccessLatencyName()); + sa.setAvailableNearlineSpace(vfs.getAvailableNearlineSpace().value()); + sa.setApproachableRules(new ArrayList()); + for (ApproachableRule rule : vfs.getApproachableRules()) { + if (rule.getSubjectRules().getDNMatchingRule().isMatchAll() + && rule.getSubjectRules().getVONameMatchingRule().isMatchAll()) { + continue; + } + if (!rule.getSubjectRules().getDNMatchingRule().isMatchAll()) { + sa.getApproachableRules() + .add(rule.getSubjectRules().getDNMatchingRule().toShortSlashSeparatedString()); + } + if (!rule.getSubjectRules().getVONameMatchingRule().isMatchAll()) { + sa.getApproachableRules() + .add("vo:" + rule.getSubjectRules().getVONameMatchingRule().getVOName()); + } + } + if (sa.getApproachableRules().size() == 0) { + sa.getApproachableRules().add("'ALL'"); + } + + return sa; + } +} diff --git a/src/main/java/it/grid/storm/namespace/model/StoRIType.java b/src/main/java/it/grid/storm/namespace/model/StoRIType.java index 55fc9676..636ef355 100644 --- a/src/main/java/it/grid/storm/namespace/model/StoRIType.java +++ b/src/main/java/it/grid/storm/namespace/model/StoRIType.java @@ -1,132 +1,110 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; /** - *

* Title: - *

- * - *

- * Description: - *

- * - * - * data children attr physical FILE T F T T FOLDER F T T T LINK F F T T SPACE F - * F T T/F SPACE_BOUND T F T T IMAGINARY F F F F - * - * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: - *

- * + * + *

Description: data children attr physical FILE T F T T FOLDER F T T T LINK F F T T SPACE F F T + * T/F SPACE_BOUND T F T T IMAGINARY F F F F + * + *

Copyright: Copyright (c) 2006 + * + *

Company: + * * @author not attributable * @version 1.0 */ public class StoRIType { - private final String typeName; - private boolean dataContent; - private boolean children; - private boolean attributes; - private boolean physical; - - // Means that StoRI has the correspondent FILE in underlying file system. - public static final StoRIType FILE = new StoRIType("file", true, false, true, - true); - // Means that StoRI corresponds to a FOLDER in underlying file system. - public static final StoRIType FOLDER = new StoRIType("folder", false, true, - true, true); - // Means that StoRI corresponds to a LINK in underlying file system. - public static final StoRIType LINK = new StoRIType("link", false, false, - true, true); - // Means that StoRI corresponds to a FILE representing a SPACE in virtual file - // system. - // and the SPACE is not alloted for a Logical File, that is could not exists - // the physical file for it. - public static final StoRIType SPACE = new StoRIType("space", false, false, - true, false); - // Means that StoRI corresponds to a FILE representing a SPACE in underlying - // file system. - // and the SPACE is alloted for a Logical File, that is exists a physical file - // for it. - public static final StoRIType SPACE_BOUND = new StoRIType("spaceBound", true, - false, true, true); - // Means that StoRI has the correspondent FILE in underlying file system. - public static final StoRIType IMAGINARY = new StoRIType("imaginary", false, - false, false, false); - - public static final StoRIType UNKNOWN = new StoRIType("unknown", false, - false, false, false); - - private StoRIType(String type, boolean dataContent, boolean children, - boolean attributes, boolean physical) { - - this.typeName = type; - this.dataContent = dataContent; - this.children = children; - this.attributes = attributes; - this.physical = physical; - } - - public boolean holdsChildren() { - - return this.children; - } - - public boolean containData() { - - return this.dataContent; - } - - public boolean holdsAttributes() { - - return this.attributes; - } - - public boolean isPhysical() { - - return this.physical; - } - - public String toString() { - - return typeName; - } - - public boolean equals(Object obj) { - - if (obj == null) { - return false; - } - if (obj instanceof StoRIType) { - StoRIType storyType = (StoRIType) obj; - if (storyType.toString().toLowerCase() - .equals(this.toString().toLowerCase())) { - return true; - } - } else { - return false; - } - return false; - } - - @Override - public int hashCode() { - - int result = 17; - result = 31 * result + (typeName != null ? typeName.hashCode() : 0); - result = 31 * result + (dataContent ? 1 : 0); - result = 31 * result + (children ? 1 : 0); - result = 31 * result + (attributes ? 1 : 0); - result = 31 * result + (physical ? 1 : 0); - return result; - } - + private final String typeName; + private boolean dataContent; + private boolean children; + private boolean attributes; + private boolean physical; + + // Means that StoRI has the correspondent FILE in underlying file system. + public static final StoRIType FILE = new StoRIType("file", true, false, true, true); + // Means that StoRI corresponds to a FOLDER in underlying file system. + public static final StoRIType FOLDER = new StoRIType("folder", false, true, true, true); + // Means that StoRI corresponds to a LINK in underlying file system. + public static final StoRIType LINK = new StoRIType("link", false, false, true, true); + // Means that StoRI corresponds to a FILE representing a SPACE in virtual file + // system. + // and the SPACE is not alloted for a Logical File, that is could not exists + // the physical file for it. + public static final StoRIType SPACE = new StoRIType("space", false, false, true, false); + // Means that StoRI corresponds to a FILE representing a SPACE in underlying + // file system. + // and the SPACE is alloted for a Logical File, that is exists a physical file + // for it. + public static final StoRIType SPACE_BOUND = new StoRIType("spaceBound", true, false, true, true); + // Means that StoRI has the correspondent FILE in underlying file system. + public static final StoRIType IMAGINARY = new StoRIType("imaginary", false, false, false, false); + + public static final StoRIType UNKNOWN = new StoRIType("unknown", false, false, false, false); + + private StoRIType( + String type, boolean dataContent, boolean children, boolean attributes, boolean physical) { + + this.typeName = type; + this.dataContent = dataContent; + this.children = children; + this.attributes = attributes; + this.physical = physical; + } + + public boolean holdsChildren() { + + return this.children; + } + + public boolean containData() { + + return this.dataContent; + } + + public boolean holdsAttributes() { + + return this.attributes; + } + + public boolean isPhysical() { + + return this.physical; + } + + public String toString() { + + return typeName; + } + + public boolean equals(Object obj) { + + if (obj == null) { + return false; + } + if (obj instanceof StoRIType) { + StoRIType storyType = (StoRIType) obj; + if (storyType.toString().toLowerCase().equals(this.toString().toLowerCase())) { + return true; + } + } else { + return false; + } + return false; + } + + @Override + public int hashCode() { + + int result = 17; + result = 31 * result + (typeName != null ? typeName.hashCode() : 0); + result = 31 * result + (dataContent ? 1 : 0); + result = 31 * result + (children ? 1 : 0); + result = 31 * result + (attributes ? 1 : 0); + result = 31 * result + (physical ? 1 : 0); + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/StorageClassType.java b/src/main/java/it/grid/storm/namespace/model/StorageClassType.java index a293aebb..221bd8ed 100644 --- a/src/main/java/it/grid/storm/namespace/model/StorageClassType.java +++ b/src/main/java/it/grid/storm/namespace/model/StorageClassType.java @@ -1,69 +1,66 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; public enum StorageClassType { + T0D0("T0D0", "T0D0"), + T0D1("T0D1", "T0D1"), + T1D0("T1D0", "T1D0"), + T1D1("T1D1", "T1D1"), + UNKNOWN("UNKNOWN", "Storage Class Type UNKNOWN!"); - T0D0("T0D0", "T0D0"), T0D1("T0D1", "T0D1"), T1D0("T1D0", "T1D0"), T1D1( - "T1D1", "T1D1"), UNKNOWN("UNKNOWN", "Storage Class Type UNKNOWN!"); + private String storageClassTypeString; + private String stringSchema; - private String storageClassTypeString; - private String stringSchema; + private StorageClassType(String storageClassTypeString, String stringSchema) { - private StorageClassType(String storageClassTypeString, String stringSchema) { + this.storageClassTypeString = storageClassTypeString; + this.stringSchema = stringSchema; + } - this.storageClassTypeString = storageClassTypeString; - this.stringSchema = stringSchema; + /** + * @param storageClassTypeString String + * @return StorageClassType + */ + public static StorageClassType getStorageClassType(String storageClassTypeString) { - } + for (StorageClassType sct : StorageClassType.values()) { + if (sct.getStorageClassTypeString().equals(storageClassTypeString)) { + return sct; + } + } - /** - * - * @param storageClassTypeString - * String - * @return StorageClassType - */ - public static StorageClassType getStorageClassType( - String storageClassTypeString) { + return UNKNOWN; + } - for (StorageClassType sct : StorageClassType.values()) { - if (sct.getStorageClassTypeString().equals(storageClassTypeString)) { - return sct; - } - } + /** + * Returns the String representation of this storage class type instance. + * + * @return the String representation of this storage class type instance. + */ + public String getStorageClassTypeString() { - return UNKNOWN; - } + return storageClassTypeString; + } - /** - * Returns the String representation of this storage class type instance. - * - * @return the String representation of this storage class type instance. - */ - public String getStorageClassTypeString() { + public String getStringSchema() { - return storageClassTypeString; - } + return stringSchema; + } - public String getStringSchema() { + public boolean isTapeEnabled() { - return stringSchema; - } + if (this.equals(T1D0) || this.equals(T1D1)) { + return true; + } - public boolean isTapeEnabled() { + return false; + } - if (this.equals(T1D0) || this.equals(T1D1)) { - return true; - } + // Only get method for Schema + public String toString() { - return false; - } - - // Only get method for Schema - public String toString() { - - return this.stringSchema; - } + return this.stringSchema; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/SubjectRules.java b/src/main/java/it/grid/storm/namespace/model/SubjectRules.java index 6a2c89ce..2130df8f 100644 --- a/src/main/java/it/grid/storm/namespace/model/SubjectRules.java +++ b/src/main/java/it/grid/storm/namespace/model/SubjectRules.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -8,58 +7,47 @@ import it.grid.storm.griduser.VONameMatchingRule; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF + * * @author R.Zappi * @version 1.0 */ public class SubjectRules { - private DNMatchingRule dnMatchingRule = null; - private VONameMatchingRule voNameMatchingRule = null; + private DNMatchingRule dnMatchingRule = null; + private VONameMatchingRule voNameMatchingRule = null; - public SubjectRules() { + public SubjectRules() { - this.dnMatchingRule = DNMatchingRule.buildMatchAllDNMatchingRule(); - this.voNameMatchingRule = VONameMatchingRule - .buildMatchAllVONameMatchingRule(); - } + this.dnMatchingRule = DNMatchingRule.buildMatchAllDNMatchingRule(); + this.voNameMatchingRule = VONameMatchingRule.buildMatchAllVONameMatchingRule(); + } - public SubjectRules(String dn) { + public SubjectRules(String dn) { - this.dnMatchingRule = new DNMatchingRule(dn); - this.voNameMatchingRule = VONameMatchingRule - .buildMatchAllVONameMatchingRule(); - } + this.dnMatchingRule = new DNMatchingRule(dn); + this.voNameMatchingRule = VONameMatchingRule.buildMatchAllVONameMatchingRule(); + } - public SubjectRules(String dn, String voName) { + public SubjectRules(String dn, String voName) { - this.dnMatchingRule = new DNMatchingRule(dn); - this.voNameMatchingRule = new VONameMatchingRule(voName); - } + this.dnMatchingRule = new DNMatchingRule(dn); + this.voNameMatchingRule = new VONameMatchingRule(voName); + } - public DNMatchingRule getDNMatchingRule() { + public DNMatchingRule getDNMatchingRule() { - return this.dnMatchingRule; - } + return this.dnMatchingRule; + } - public VONameMatchingRule getVONameMatchingRule() { - - return this.voNameMatchingRule; - } + public VONameMatchingRule getVONameMatchingRule() { + return this.voNameMatchingRule; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/TransportProtocol.java b/src/main/java/it/grid/storm/namespace/model/TransportProtocol.java index 535613b6..1e1b200f 100644 --- a/src/main/java/it/grid/storm/namespace/model/TransportProtocol.java +++ b/src/main/java/it/grid/storm/namespace/model/TransportProtocol.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; @@ -8,101 +7,99 @@ public class TransportProtocol { - private int protocolID = -1; - private Protocol protocol = null; - private Authority service = null; + private int protocolID = -1; + private Protocol protocol = null; + private Authority service = null; - public TransportProtocol(Protocol protocol, Authority service) { + public TransportProtocol(Protocol protocol, Authority service) { - this.protocol = protocol; - this.service = service; - } + this.protocol = protocol; + this.service = service; + } - public TransportProtocol(Protocol protocol) { + public TransportProtocol(Protocol protocol) { - this.protocol = protocol; - } + this.protocol = protocol; + } - public Protocol getProtocol() { + public Protocol getProtocol() { - return this.protocol; - } + return this.protocol; + } - // Used in Protocol Pool definition - public void setProtocolID(int id) { + // Used in Protocol Pool definition + public void setProtocolID(int id) { - this.protocolID = id; - } + this.protocolID = id; + } - // Used in Protocol Pool definition - public int getProtocolID() { + // Used in Protocol Pool definition + public int getProtocolID() { - return this.protocolID; - } + return this.protocolID; + } - public Authority getAuthority() { + public Authority getAuthority() { - if (this.protocol.equals(Protocol.FILE)) { - return Authority.EMPTY; - } else { - return this.service; - } - } + if (this.protocol.equals(Protocol.FILE)) { + return Authority.EMPTY; + } else { + return this.service; + } + } - public void setLocalAuthority() { + public void setLocalAuthority() { - if (!this.protocol.equals(Protocol.FILE)) { - this.service = new Authority(NamingConst.getServiceDefaultHost()); - } - } + if (!this.protocol.equals(Protocol.FILE)) { + this.service = new Authority(NamingConst.getServiceDefaultHost()); + } + } - public void setAuthority(Authority service) { + public void setAuthority(Authority service) { - this.service = service; - } + this.service = service; + } - private String getURIRoot() { + private String getURIRoot() { - StringBuilder sb = new StringBuilder(); - if (protocolID != -1) - sb.append("[id:" + this.protocolID + "] "); - sb.append(protocol.getSchema()); - sb.append("://"); - if (service != null) { - sb.append(service); - } - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + if (protocolID != -1) sb.append("[id:" + this.protocolID + "] "); + sb.append(protocol.getSchema()); + sb.append("://"); + if (service != null) { + sb.append(service); + } + return sb.toString(); + } - public String toString() { + public String toString() { - return getURIRoot(); - } + return getURIRoot(); + } - public boolean equals(Object other) { + public boolean equals(Object other) { - boolean result = false; - if (other instanceof TransportProtocol) { - TransportProtocol otherTP = (TransportProtocol) other; - if (otherTP.getProtocol().equals(this.getProtocol())) { // Protocol is - // equal - // Check if the Authority is equal. - if (otherTP.getAuthority().equals(this.getAuthority())) { - result = true; - } - } - } - return result; - } + boolean result = false; + if (other instanceof TransportProtocol) { + TransportProtocol otherTP = (TransportProtocol) other; + if (otherTP.getProtocol().equals(this.getProtocol())) { // Protocol is + // equal + // Check if the Authority is equal. + if (otherTP.getAuthority().equals(this.getAuthority())) { + result = true; + } + } + } + return result; + } - @Override - public int hashCode() { - - int result = 17; - result = 31 * result + protocolID; - result = 31 * result + ((protocol == null) ? 0 : protocol.hashCode()); - result = 31 * result + ((service == null) ? 0 : service.hashCode()); - return result; - } + @Override + public int hashCode() { + int result = 17; + result = 31 * result + protocolID; + result = 31 * result + ((protocol == null) ? 0 : protocol.hashCode()); + result = 31 * result + ((service == null) ? 0 : service.hashCode()); + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/model/VirtualFS.java b/src/main/java/it/grid/storm/namespace/model/VirtualFS.java index b4f8c68d..90930765 100644 --- a/src/main/java/it/grid/storm/namespace/model/VirtualFS.java +++ b/src/main/java/it/grid/storm/namespace/model/VirtualFS.java @@ -1,25 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import static it.grid.storm.metrics.StormMetricRegistry.METRIC_REGISTRY; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Hashtable; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; - import it.grid.storm.balancer.BalancingStrategy; -import it.grid.storm.balancer.Node; import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.common.GUID; import it.grid.storm.common.types.PFN; @@ -57,6 +44,14 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TSpaceType; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Hashtable; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class VirtualFS { @@ -89,10 +84,10 @@ public class VirtualFS { // For debug purpose only public long creationTime = System.currentTimeMillis(); - /***************************************************************************** - * BUILDING METHODs - ****************************************************************************/ - + /** + * *************************************************************************** BUILDING METHODs + * ************************************************************************** + */ public void setAliasName(String name) { this.aliasName = name; @@ -110,9 +105,9 @@ public void setFSDriver(Class fsDriver) throws NamespaceException { this.genericFS = makeFSInstance(); fsWrapper = RandomWaitFilesystemAdapter.maybeWrapFilesystem(fsWrapper); - this.fsWrapper = new MetricsFilesystemAdapter( - new Filesystem(getFSDriverInstance()), - METRIC_REGISTRY.getRegistry()); + this.fsWrapper = + new MetricsFilesystemAdapter( + new Filesystem(getFSDriverInstance()), METRIC_REGISTRY.getRegistry()); } public void setSpaceTokenDescription(String spaceTokenDescription) { @@ -135,8 +130,7 @@ public void setProperties(PropertyInterface prop) { this.properties = prop; } - public void setSpaceSystemDriver(Class spaceDriver) - throws NamespaceException { + public void setSpaceSystemDriver(Class spaceDriver) throws NamespaceException { if (spaceDriver == null) { throw new NamespaceException("NULL space driver"); @@ -193,25 +187,22 @@ private String buildRootPath(String rootPath) throws NamespaceException { rootPathUri = new URI(rootPath); } catch (URISyntaxException e) { throw new NamespaceException( - "Unable to set rootPath. Invalid string. URISyntaxException: " - + e.getMessage()); + "Unable to set rootPath. Invalid string. URISyntaxException: " + e.getMessage()); } return rootPathUri.normalize().toString(); } private void buildStoRIRoot(String rootPath) throws NamespaceException { - /** - * @todo - */ + /** @todo */ // storiRoot = new StoRIImpl(this, rootPath, StoRIType.FOLDER); } - /***************************************************************************** - * READ METHODs - ****************************************************************************/ - + /** + * *************************************************************************** READ METHODs + * ************************************************************************** + */ public String getFSType() { return this.type; @@ -236,11 +227,10 @@ public TSizeInBytes getUsedOnlineSpace() throws NamespaceException { TSizeInBytes result = TSizeInBytes.makeEmpty(); /** - * @todo : This method must contact Space Manager (or who for him) to - * retrieve the real situation - * - * @todo : Contact Space Catalog to retrieve the logical space occupied. - * This space must to be equal to space occupied in underlying FS. + * @todo : This method must contact Space Manager (or who for him) to retrieve the real + * situation + * @todo : Contact Space Catalog to retrieve the logical space occupied. This space must to be + * equal to space occupied in underlying FS. */ return result; } @@ -249,14 +239,12 @@ public TSizeInBytes getAvailableOnlineSpace() throws NamespaceException { TSizeInBytes result = TSizeInBytes.makeEmpty(); /** - * @todo : This method must contact Space Manager (or who for him) to - * retrieve the real situation - * - * @todo : Contact Space Catalog to retrieve the logical space occupied. - * This space must to be equal to space occupied in underlying FS. + * @todo : This method must contact Space Manager (or who for him) to retrieve the real + * situation + * @todo : Contact Space Catalog to retrieve the logical space occupied. This space must to be + * equal to space occupied in underlying FS. */ return result; - } public TSizeInBytes getUsedNearlineSpace() throws NamespaceException { @@ -302,51 +290,46 @@ public List getMappingRules() throws NamespaceException { if (this.mappingRules.isEmpty()) { throw new NamespaceException( - "No one MAPPING RULES bound with this VFS (" + aliasName + "). "); + "No one MAPPING RULES bound with this VFS (" + aliasName + "). "); } return this.mappingRules; } - public List getApproachableRules() - throws NamespaceException { + public List getApproachableRules() throws NamespaceException { if (this.approachableRules.isEmpty()) { throw new NamespaceException( - "No one APPROACHABLE RULES bound with this VFS (" + aliasName + "). "); + "No one APPROACHABLE RULES bound with this VFS (" + aliasName + "). "); } return this.approachableRules; } /** * makeFSInstance - * + * * @return genericfs */ private genericfs makeFSInstance() throws NamespaceException { genericfs fs = null; if (fsDriver == null) { - throw new NamespaceException( - "Cannot build FS Driver istance without a valid Driver Class!"); + throw new NamespaceException("Cannot build FS Driver istance without a valid Driver Class!"); } Class fsArgumentsClass[] = new Class[1]; fsArgumentsClass[0] = String.class; - Object[] fsArguments = new Object[] { this.rootPath }; + Object[] fsArguments = new Object[] {this.rootPath}; Constructor fsConstructor = null; try { fsConstructor = fsDriver.getConstructor(fsArgumentsClass); } catch (SecurityException ex) { - log.error( - "Unable to retrieve the FS Driver Constructor. Security problem.", ex); + log.error("Unable to retrieve the FS Driver Constructor. Security problem.", ex); throw new NamespaceException( - "Unable to retrieve the FS Driver Constructor. Security problem.", ex); + "Unable to retrieve the FS Driver Constructor. Security problem.", ex); } catch (NoSuchMethodException ex) { - log.error( - "Unable to retrieve the FS Driver Constructor. Security problem.", ex); + log.error("Unable to retrieve the FS Driver Constructor. Security problem.", ex); throw new NamespaceException( - "Unable to retrieve the FS Driver Constructor. No such constructor.", - ex); + "Unable to retrieve the FS Driver Constructor. No such constructor.", ex); } try { fs = (genericfs) fsConstructor.newInstance(fsArguments); @@ -357,21 +340,17 @@ private genericfs makeFSInstance() throws NamespaceException { log.debug("VFS Ex Stack: "); ex1.printStackTrace(); - throw new NamespaceException("Unable to instantiate the FS Driver. ", - ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. ", ex1); } catch (IllegalArgumentException ex1) { - log.error("Unable to instantiate the FS Driver. Using wrong argument.", - ex1); + log.error("Unable to instantiate the FS Driver. Using wrong argument.", ex1); throw new NamespaceException( - "Unable to instantiate the FS Driver. Using wrong argument.", ex1); + "Unable to instantiate the FS Driver. Using wrong argument.", ex1); } catch (IllegalAccessException ex1) { log.error("Unable to instantiate the FS Driver. Illegal Access.", ex1); - throw new NamespaceException( - "Unable to instantiate the FS Driver. Illegal Access.", ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. Illegal Access.", ex1); } catch (InstantiationException ex1) { log.error("Unable to instantiate the FS Driver. Generic problem..", ex1); - throw new NamespaceException( - "Unable to instantiate the FS Driver. Generic problem..", ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. Generic problem..", ex1); } return fs; @@ -384,10 +363,8 @@ public FilesystemIF getFilesystem() throws NamespaceException { FilesystemIF fs = new Filesystem(getFSDriverInstance()); fs = RandomWaitFilesystemAdapter.maybeWrapFilesystem(fs); - - fsWrapper = new MetricsFilesystemAdapter(fs, - METRIC_REGISTRY.getRegistry()); + fsWrapper = new MetricsFilesystemAdapter(fs, METRIC_REGISTRY.getRegistry()); } return this.fsWrapper; } @@ -407,7 +384,7 @@ public SpaceSystem getSpaceSystemDriverInstance() throws NamespaceException { /** * makeSpaceSystemInstance - * + * * @return SpaceSystem */ private SpaceSystem makeSpaceSystemInstance() throws NamespaceException { @@ -416,72 +393,52 @@ private SpaceSystem makeSpaceSystemInstance() throws NamespaceException { if (spaceSystemDriver == null) { throw new NamespaceException( - "Cannot build Space Driver istance without a valid Driver Class!"); + "Cannot build Space Driver istance without a valid Driver Class!"); } // Check if SpaceSystem is GPFSSpaceSystem used for GPFS FS // Check if SpaceSystem is MockSpaceSystem used for Posix FS - if ((this.spaceSystemDriver.getName() - .equals(GPFSSpaceSystem.class.getName())) - || (this.spaceSystemDriver.getName() - .equals(MockSpaceSystem.class.getName()))) { + if ((this.spaceSystemDriver.getName().equals(GPFSSpaceSystem.class.getName())) + || (this.spaceSystemDriver.getName().equals(MockSpaceSystem.class.getName()))) { // The class type argument is the mount point of GPFS file system Class ssArgumentsClass[] = new Class[1]; ssArgumentsClass[0] = String.class; - Object[] ssArguments = new Object[] { this.rootPath }; + Object[] ssArguments = new Object[] {this.rootPath}; Constructor ssConstructor = null; try { ssConstructor = spaceSystemDriver.getConstructor(ssArgumentsClass); } catch (SecurityException ex) { - log.error( - "Unable to retrieve the FS Driver Constructor. Security problem.", - ex); + log.error("Unable to retrieve the FS Driver Constructor. Security problem.", ex); throw new NamespaceException( - "Unable to retrieve the FS Driver Constructor. Security problem.", - ex); + "Unable to retrieve the FS Driver Constructor. Security problem.", ex); } catch (NoSuchMethodException ex) { - log.error( - "Unable to retrieve the FS Driver Constructor. Security problem.", - ex); + log.error("Unable to retrieve the FS Driver Constructor. Security problem.", ex); throw new NamespaceException( - "Unable to retrieve the FS Driver Constructor. No such constructor.", - ex); + "Unable to retrieve the FS Driver Constructor. No such constructor.", ex); } try { ss = (SpaceSystem) ssConstructor.newInstance(ssArguments); } catch (InvocationTargetException ex1) { - log.error("Unable to instantiate the SpaceSystem Driver. Wrong target.", - ex1); - throw new NamespaceException("Unable to instantiate the FS Driver. ", - ex1); + log.error("Unable to instantiate the SpaceSystem Driver. Wrong target.", ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. ", ex1); } catch (IllegalArgumentException ex1) { - log.error( - "Unable to instantiate the SpaceSystem Driver. Using wrong argument.", - ex1); + log.error("Unable to instantiate the SpaceSystem Driver. Using wrong argument.", ex1); throw new NamespaceException( - "Unable to instantiate the FS Driver. Using wrong argument.", ex1); + "Unable to instantiate the FS Driver. Using wrong argument.", ex1); } catch (IllegalAccessException ex1) { - log.error( - "Unable to instantiate the SpaceSystem Driver. Illegal Access.", ex1); - throw new NamespaceException( - "Unable to instantiate the FS Driver. Illegal Access.", ex1); + log.error("Unable to instantiate the SpaceSystem Driver. Illegal Access.", ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. Illegal Access.", ex1); } catch (InstantiationException ex1) { - log.error( - "Unable to instantiate the SpaceSystem Driver. Generic problem..", - ex1); - throw new NamespaceException( - "Unable to instantiate the FS Driver. Generic problem..", ex1); + log.error("Unable to instantiate the SpaceSystem Driver. Generic problem..", ex1); + throw new NamespaceException("Unable to instantiate the FS Driver. Generic problem..", ex1); } } else { log.error("None Space System Driver built"); - /** - * @todo : Perhaps a "genericSpaceSystem" could be more disederable rather - * than NULL - */ + /** @todo : Perhaps a "genericSpaceSystem" could be more disederable rather than NULL */ ss = null; } @@ -508,10 +465,10 @@ public StoRI getRoot() throws NamespaceException { return storiRoot; } - /***************************************************************************** - * BUSINESS METHODs - ****************************************************************************/ - + /** + * *************************************************************************** BUSINESS METHODs + * ************************************************************************** + */ public boolean isApproachableByUser(GridUserInterface user) { for (ApproachableRule approachableRule : this.approachableRules) { @@ -534,9 +491,7 @@ public boolean isApproachableByAnonymous() { public StoRI createFile(String relativePath) throws NamespaceException { - /** - * @todo Check if relativePath is a valid path for a file. - */ + /** @todo Check if relativePath is a valid path for a file. */ StoRIType type = StoRIType.UNKNOWN; // log.debug("CREATING STORI BY RELATIVE PATH : "+relativePath); StoRI stori = new StoRIImpl(this, mappingRules.get(0), relativePath, type); @@ -545,36 +500,32 @@ public StoRI createFile(String relativePath) throws NamespaceException { public StoRI createFile(String relativePath, StoRIType type) { - /** - * @todo Check if relativePath is a valid path for a file. - */ + /** @todo Check if relativePath is a valid path for a file. */ log.debug("VFS Class - Relative Path : " + relativePath); StoRI stori = new StoRIImpl(this, mappingRules.get(0), relativePath, type); return stori; } - public StoRI createFile(String relativePath, StoRIType type, MappingRule rule) - throws NamespaceException { + public StoRI createFile(String relativePath, StoRIType type, MappingRule rule) + throws NamespaceException { - return new StoRIImpl(this, rule, relativePath, type); - } + return new StoRIImpl(this, rule, relativePath, type); + } - /**************************************************************** - * Methods used by StoRI to perform IMPLICIT SPACE RESERVATION - *****************************************************************/ + /** + * ************************************************************** Methods used by StoRI to perform + * IMPLICIT SPACE RESERVATION *************************************************************** + */ /** - * Workaround to manage the DEFAULT SPACE TOKEN defined per Storage Area. This - * workaround simply give the possibility to define a list of DEFAULT SPACE - * TOKENs by the StoRM configuration file. If the token specified into the - * PrepareToPut request belongs to the list of default space token, the space - * file is not used (since it does not exists into the space catalog) and a + * Workaround to manage the DEFAULT SPACE TOKEN defined per Storage Area. This workaround simply + * give the possibility to define a list of DEFAULT SPACE TOKENs by the StoRM configuration file. + * If the token specified into the PrepareToPut request belongs to the list of default space + * token, the space file is not used (since it does not exists into the space catalog) and a * simple allocation of blocks is performed for the file - * - * Return true if the space token specified is a DEAFULT SPACE TOKENS. - * + * + *

Return true if the space token specified is a DEAFULT SPACE TOKENS. */ - private Boolean isVOSAToken(TSpaceToken token) throws NamespaceException { ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); @@ -584,18 +535,16 @@ private Boolean isVOSAToken(TSpaceToken token) throws NamespaceException { ssd = catalog.getStorageSpace(token); } catch (TransferObjectDecodingException e) { log.error( - "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " - + e.getMessage()); + "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " + + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area space information. TransferObjectDecodingException : " - + e.getMessage()); + "Error retrieving Storage Area space information. TransferObjectDecodingException : " + + e.getMessage()); } catch (DataAccessException e) { - log - .error("Unable to get StorageSpaceTO from the DB. DataAccessException: " - + e.getMessage()); + log.error("Unable to get StorageSpaceTO from the DB. DataAccessException: " + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area space information. DataAccessException : " - + e.getMessage()); + "Error retrieving Storage Area space information. DataAccessException : " + + e.getMessage()); } if ((ssd != null) && (ssd.getSpaceType().equals(TSpaceType.VOSPACE))) { @@ -606,15 +555,13 @@ private Boolean isVOSAToken(TSpaceToken token) throws NamespaceException { } public void makeSilhouetteForFile(StoRI stori, TSizeInBytes presumedSize) - throws NamespaceException { + throws NamespaceException { // Check if StoRI is a file if (!(stori.getStoRIType().equals(StoRIType.FILE))) { - log.error("Unable to associate a Space to the StoRI with type: " - + stori.getStoRIType()); + log.error("Unable to associate a Space to the StoRI with type: " + stori.getStoRIType()); throw new NamespaceException( - "Unable to associate a Space to the StoRI with type: " - + stori.getStoRIType()); + "Unable to associate a Space to the StoRI with type: " + stori.getStoRIType()); } // Retrieve the instance of the right Space System @@ -625,7 +572,6 @@ public void makeSilhouetteForFile(StoRI stori, TSizeInBytes presumedSize) Space space = createSpace(presumedSize, presumedSize, localFile, spaceSystem); stori.setSpace(space); - } /* @@ -635,17 +581,14 @@ public void makeSilhouetteForFile(StoRI stori, TSizeInBytes presumedSize) * update will be overwritten from the other thread! */ - public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, - TSizeInBytes sizePresumed) + public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, TSizeInBytes sizePresumed) throws NamespaceException, ExpiredSpaceTokenException { // Check if StoRI is a file if (!(file.getStoRIType().equals(StoRIType.FILE))) { - log.error("Unable to associate a Space to the StoRI with type: " - + file.getStoRIType()); + log.error("Unable to associate a Space to the StoRI with type: " + file.getStoRIType()); throw new NamespaceException( - "Unable to associate a Space to the StoRI with type: " - + file.getStoRIType()); + "Unable to associate a Space to the StoRI with type: " + file.getStoRIType()); } if (isVOSAToken(token)) { @@ -658,8 +601,8 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, } /** - * Token for Dynamic space reservation specified. Go ahead in the old way, - * look into the space reservation catalog, ... + * Token for Dynamic space reservation specified. Go ahead in the old way, look into the space + * reservation catalog, ... */ // Use of Reserve Space Manager @@ -668,22 +611,20 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, spaceData = new ReservedSpaceCatalog().getStorageSpace(token); } catch (TransferObjectDecodingException e) { log.error( - "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " - + e.getMessage()); + "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " + + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area information from Token. TransferObjectDecodingException : " - + e.getMessage()); + "Error retrieving Storage Area information from Token. TransferObjectDecodingException : " + + e.getMessage()); } catch (DataAccessException e) { - log.error("Unable to build get StorageSpaceTO. DataAccessException: " - + e.getMessage()); + log.error("Unable to build get StorageSpaceTO. DataAccessException: " + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area information from Token. DataAccessException : " - + e.getMessage()); + "Error retrieving Storage Area information from Token. DataAccessException : " + + e.getMessage()); } if (spaceData == null) { - throw new NamespaceException( - "No Storage Space stored with this token :" + token); + throw new NamespaceException("No Storage Space stored with this token :" + token); } // Check here if Space Reservation is expired @@ -702,9 +643,7 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, log.debug("Available Space : " + availableSpaceSize); // The unusedSpaceSize should have the same size of Space File - /** - * @todo : compare Real size and Available Space size. - */ + /** @todo : compare Real size and Available Space size. */ // Verify that Size retrieved from DB are not null. if (availableSpaceSize == null) { @@ -739,26 +678,22 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, // Create Space StoRI StoRI spaceFile = retrieveSpaceFileByPFN(pfn, totalSize); - if ((!(spaceFile.getLocalFile().exists())) - || (spaceFile.getLocalFile().isDirectory())) { + if ((!(spaceFile.getLocalFile().exists())) || (spaceFile.getLocalFile().isDirectory())) { log.error( - "Unable to get the correct space file!spaceFile does not exsists or it is a directory."); + "Unable to get the correct space file!spaceFile does not exsists or it is a directory."); return; } /** - * Splitting the Space File. In this first version the original space file - * is truncated at the original size minus the new ptp file size presumed, - * and a new space pre_allocation, bound with the new ptp file, is done. - * - * @todo In the final version, if the new size requested is greater then - * the half of the original space file, the original spacefile is renamed - * to the desired ptp file name and then truncated to the requested size. - * A new space pre_allocation is perfored and bound with the old original - * space file name. - * + * Splitting the Space File. In this first version the original space file is truncated at the + * original size minus the new ptp file size presumed, and a new space pre_allocation, bound + * with the new ptp file, is done. + * + * @todo In the final version, if the new size requested is greater then the half of the + * original space file, the original spacefile is renamed to the desired ptp file name and + * then truncated to the requested size. A new space pre_allocation is perfored and bound + * with the old original space file name. */ - spaceFile.setStoRIType(StoRIType.SPACE_BOUND); file.setSpace(spaceFile.getSpace()); @@ -766,10 +701,9 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, TSizeInBytes newUsedSpaceSize = TSizeInBytes.makeEmpty(); TSizeInBytes newAvailableSpaceSize = TSizeInBytes.makeEmpty(); try { - newUsedSpaceSize = TSizeInBytes - .make(totalSpaceSize.value() - remainingSize, SizeUnit.BYTES); - newAvailableSpaceSize = TSizeInBytes.make(remainingSize, - SizeUnit.BYTES); + newUsedSpaceSize = + TSizeInBytes.make(totalSpaceSize.value() - remainingSize, SizeUnit.BYTES); + newAvailableSpaceSize = TSizeInBytes.make(remainingSize, SizeUnit.BYTES); } catch (InvalidTSizeAttributesException ex) { log.error("Unable to create Used Space Size, so use EMPTY size ", ex); } @@ -780,14 +714,11 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, spaceData.setUsedSpaceSize(newAvailableSpaceSize); // Update the catalogs storeSpaceByToken(spaceData); - } - - else { // Case presumedSize is empty + } else { // Case presumedSize is empty log.warn(" --- Here there is a call with a empty presumed size!--- "); useAllSpaceForFile(token, file); } - } /* @@ -800,22 +731,20 @@ public synchronized void useSpaceForFile(TSpaceToken token, StoRI file, * the half size of the available spaceFile. TODO */ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) - throws NamespaceException, ExpiredSpaceTokenException { + throws NamespaceException, ExpiredSpaceTokenException { // Check if StoRI is a file if (!(file.getStoRIType().equals(StoRIType.FILE))) { - log.error("Unable to associate a Space to the StoRI with type: " - + file.getStoRIType()); + log.error("Unable to associate a Space to the StoRI with type: " + file.getStoRIType()); throw new NamespaceException( - "Unable to associate a Space to the StoRI with type: " - + file.getStoRIType()); + "Unable to associate a Space to the StoRI with type: " + file.getStoRIType()); } // Get the default space size TSizeInBytes defaultFileSize = null; try { - defaultFileSize = TSizeInBytes - .make(Configuration.getInstance().getFileDefaultSize(), SizeUnit.BYTES); + defaultFileSize = + TSizeInBytes.make(Configuration.getInstance().getFileDefaultSize(), SizeUnit.BYTES); } catch (it.grid.storm.srm.types.InvalidTSizeAttributesException e) { log.debug("Invalid size created."); } @@ -826,22 +755,20 @@ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) spaceData = new ReservedSpaceCatalog().getStorageSpace(token); } catch (TransferObjectDecodingException e) { log.error( - "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " - + e.getMessage()); + "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: " + + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area information from Token. TransferObjectDecodingException : " - + e.getMessage()); + "Error retrieving Storage Area information from Token. TransferObjectDecodingException : " + + e.getMessage()); } catch (DataAccessException e) { - log.error("Unable to build get StorageSpaceTO. DataAccessException: " - + e.getMessage()); + log.error("Unable to build get StorageSpaceTO. DataAccessException: " + e.getMessage()); throw new NamespaceException( - "Error retrieving Storage Area information from Token. DataAccessException : " - + e.getMessage()); + "Error retrieving Storage Area information from Token. DataAccessException : " + + e.getMessage()); } if (spaceData == null) { - throw new NamespaceException( - "No Storage Space stored with this token :" + token); + throw new NamespaceException("No Storage Space stored with this token :" + token); } // Check here if Space Reservation is expired @@ -849,23 +776,18 @@ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) throw new ExpiredSpaceTokenException("Space Token Expired :" + token); } - /** - * First of all, Check if it's default or not - */ + /** First of all, Check if it's default or not */ // if(spaceData.getSpaceType()== StorageSpaceData.DEFAULT) { if (isVOSAToken(token)) { // ADD HERE THE LOGIC TO MANAGE DEFAULT SPACE RESERVATION /** - * Check if a DEFAULT SPACE TOKEN is specified. IN that case do nothing - * and create a simple silhouette for the file... - * - * - * TOREMOVE. The space data will contains this information!!! i METADATA - * non venfgono agrgiornati, sara fatta una funzionalita' nella - * getspacemetadatacatalog che in caso di query sul defaulr space token - * vada a vedre la quota sul file system. - * + * Check if a DEFAULT SPACE TOKEN is specified. IN that case do nothing and create a simple + * silhouette for the file... + * + *

TOREMOVE. The space data will contains this information!!! i METADATA non venfgono + * agrgiornati, sara fatta una funzionalita' nella getspacemetadatacatalog che in caso di + * query sul defaulr space token vada a vedre la quota sul file system. */ // WARNING, This double check have to be removed, the firs should be fdone // on teh space type @@ -882,8 +804,8 @@ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) } else { /** - * Token for Dynamic space reservation specified. Go ahead in the old way, - * look into the space reservation catalog, ... + * Token for Dynamic space reservation specified. Go ahead in the old way, look into the space + * reservation catalog, ... */ // Check here if Space Reservation is expired @@ -909,8 +831,7 @@ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) } else { TSizeInBytes fileSizeToUse = null; try { - fileSizeToUse = TSizeInBytes.make(availableSpaceSize.value() / 2, - SizeUnit.BYTES); + fileSizeToUse = TSizeInBytes.make(availableSpaceSize.value() / 2, SizeUnit.BYTES); } catch (it.grid.storm.srm.types.InvalidTSizeAttributesException e) { log.debug("Invalid size created."); } @@ -918,15 +839,14 @@ public synchronized void useAllSpaceForFile(TSpaceToken token, StoRI file) useSpaceForFile(token, file, fileSizeToUse); } } - } - /**************************************************************** - * Methods used by StoRI to perform EXPLICIT SPACE RESERVATION - *****************************************************************/ - - public StoRI createSpace(String relativePath, long guaranteedSize, - long totalSize) throws NamespaceException { + /** + * ************************************************************** Methods used by StoRI to perform + * EXPLICIT SPACE RESERVATION *************************************************************** + */ + public StoRI createSpace(String relativePath, long guaranteedSize, long totalSize) + throws NamespaceException { StoRIType type = StoRIType.SPACE; /* @@ -948,46 +868,38 @@ public StoRI createSpace(String relativePath, long guaranteedSize, TSizeInBytes totSize = TSizeInBytes.makeEmpty(); try { totSize = TSizeInBytes.make(totalSize, SizeUnit.BYTES); - } - - catch (InvalidTSizeAttributesException ex2) { + } catch (InvalidTSizeAttributesException ex2) { log.error("Unable to create Total Size, so use EMPTY size", ex2); } - Space space = createSpace(guarSize, totSize, stori.getLocalFile(), - spaceSystem); + Space space = createSpace(guarSize, totSize, stori.getLocalFile(), spaceSystem); stori.setSpace(space); return stori; } - public StoRI createSpace(String relativePath, long totalsize) - throws NamespaceException { + public StoRI createSpace(String relativePath, long totalsize) throws NamespaceException { StoRI stori = createSpace(relativePath, totalsize, totalsize); return stori; } /** - * This method is used to split the specified spaceFile to the desired PtP - * file. The operations performed depends on the input parameters. If the - * desired new size is minor then the half of the total reserved space size, - * the original space file is truncated to new size : (original size - new PtP - * file presumed size), then a new space_preallocation, of the new PtP file + * This method is used to split the specified spaceFile to the desired PtP file. The operations + * performed depends on the input parameters. If the desired new size is minor then the half of + * the total reserved space size, the original space file is truncated to new size : (original + * size - new PtP file presumed size), then a new space_preallocation, of the new PtP file * presumed size, is bound to the requested file. - * - * If the presumed size is greater then the half fo the global space - * available, the original space file is renamed to the new PtP file and - * truncated to the presumed size. A new space_preallocation is done to - * recreate the remaining original space file - * - * @param spaceOrig StoRI bounds to the original space file. @param file StoRI - * bounds to the desired new PtP file. @param long new PtP file size - * presumed. @returns new Size + * + *

If the presumed size is greater then the half fo the global space available, the original + * space file is renamed to the new PtP file and truncated to the presumed size. A new + * space_preallocation is done to recreate the remaining original space file + * + * @param spaceOrig StoRI bounds to the original space file. @param file StoRI bounds to the + * desired new PtP file. @param long new PtP file size presumed. @returns new Size */ - public TSizeInBytes splitSpace(StoRI spaceOrig, StoRI file, long sizePresumed) - throws NamespaceException { + throws NamespaceException { // Update Storage Space to new values of size TSizeInBytes newSize = TSizeInBytes.makeEmpty(); @@ -995,14 +907,11 @@ public TSizeInBytes splitSpace(StoRI spaceOrig, StoRI file, long sizePresumed) // Save the name of the current Space File String spacePFN = spaceOrig.getAbsolutePath(); log.debug("VFS Split: spaceFileName:" + spacePFN); - String relativeSpacePFN = NamespaceUtil - .extractRelativePath(this.getRootPath(), spacePFN); + String relativeSpacePFN = NamespaceUtil.extractRelativePath(this.getRootPath(), spacePFN); /** * extractRelativePath seems not working in this case! WHY? - * - * @todo Because the mapping rule choosen is always the same, for all - * StFNRoot...BUG to FIX.. - * + * + * @todo Because the mapping rule choosen is always the same, for all StFNRoot...BUG to FIX.. */ log.debug("Looking for root:" + this.getRootPath()); int index = spacePFN.indexOf(this.getRootPath()); @@ -1017,12 +926,13 @@ public TSizeInBytes splitSpace(StoRI spaceOrig, StoRI file, long sizePresumed) log.debug("VFS Split: relativeSpacePFN:" + relativeSpacePFN); if (failure) { - log.warn( - "SpacePFN does not refer to this VFS root! Something goes wrong in app-rule?"); + log.warn("SpacePFN does not refer to this VFS root! Something goes wrong in app-rule?"); try { newSize = TSizeInBytes.make(sizePresumed, SizeUnit.BYTES); - file = createSpace(NamespaceUtil.extractRelativePath(this.getRootPath(), - file.getAbsolutePath()), sizePresumed); + file = + createSpace( + NamespaceUtil.extractRelativePath(this.getRootPath(), file.getAbsolutePath()), + sizePresumed); file.getSpace().allot(); } catch (InvalidTSizeAttributesException ex) { log.error("Unable to create UNUsed Space Size, so use EMPTY size ", ex); @@ -1036,34 +946,34 @@ public TSizeInBytes splitSpace(StoRI spaceOrig, StoRI file, long sizePresumed) long realSize = spaceOrig.getLocalFile().getSize(); /** - * The next steps depends on the input parameters. Case (1) : new PtP file - * size minor than the half of the available space file. In this case the - * spaceFile is truncated, and a new file is created with the desired - * amount of preallocated blocks. Case(2) : new PtP file size greater than - * the half of the available space file. The spaceFile is renamed to the - * new PtP file, truncated to the presumed size and a new preallocation is - * done bound to the original space file name. - * + * The next steps depends on the input parameters. Case (1) : new PtP file size minor than the + * half of the available space file. In this case the spaceFile is truncated, and a new file + * is created with the desired amount of preallocated blocks. Case(2) : new PtP file size + * greater than the half of the available space file. The spaceFile is renamed to the new PtP + * file, truncated to the presumed size and a new preallocation is done bound to the original + * space file name. */ - if (sizePresumed <= (realSize / 2)) { log.debug("SplitSpace Case (1)"); // Truncate - log.debug("SplitSpace: " + spaceOrig.getAbsolutePath() - + " truncating file to size:" + (realSize - sizePresumed)); - spaceOrig.getSpace().getSpaceFile() - .truncateFile((realSize - sizePresumed)); + log.debug( + "SplitSpace: " + + spaceOrig.getAbsolutePath() + + " truncating file to size:" + + (realSize - sizePresumed)); + spaceOrig.getSpace().getSpaceFile().truncateFile((realSize - sizePresumed)); // Allocate space for file try { newSize = TSizeInBytes.make(sizePresumed, SizeUnit.BYTES); - file = createSpace(NamespaceUtil.extractRelativePath( - this.getRootPath(), file.getAbsolutePath()), sizePresumed); + file = + createSpace( + NamespaceUtil.extractRelativePath(this.getRootPath(), file.getAbsolutePath()), + sizePresumed); file.getSpace().allot(); } catch (InvalidTSizeAttributesException ex) { - log.error("Unable to create UNUsed Space Size, so use EMPTY size ", - ex); + log.error("Unable to create UNUsed Space Size, so use EMPTY size ", ex); } catch (it.grid.storm.filesystem.ReservationException e2) { log.error("Unable to create space into File System"); } @@ -1087,31 +997,27 @@ public TSizeInBytes splitSpace(StoRI spaceOrig, StoRI file, long sizePresumed) newSize = TSizeInBytes.make(remainingSize, SizeUnit.BYTES); // Create a new Space file with the old name and with the size // computed. - spaceOrig = createSpace( - NamespaceUtil.extractRelativePath(this.getRootPath(), spacePFN), - newSize.value()); + spaceOrig = + createSpace( + NamespaceUtil.extractRelativePath(this.getRootPath(), spacePFN), newSize.value()); // Create the new SpaceFile into the file system spaceOrig.getSpace().allot(); } catch (InvalidTSizeAttributesException ex) { - log.error("Unable to create UNUsed Space Size, so use EMPTY size ", - ex); + log.error("Unable to create UNUsed Space Size, so use EMPTY size ", ex); } catch (it.grid.storm.filesystem.ReservationException e2) { log.error("Unable to create space into File System"); } - } - } // failure else return newSize; } - /************************************************** - * Methods used by Space Reservation Manager - *************************************************/ - - public StoRI createSpace(long guarSize, long totalSize) - throws NamespaceException { + /** + * ************************************************ Methods used by Space Reservation Manager + * *********************************************** + */ + public StoRI createSpace(long guarSize, long totalSize) throws NamespaceException { // retrieve SPACE FILE NAME String relativePath = makeSpaceFilePath(); @@ -1127,7 +1033,6 @@ public StoRI createSpace(long totalSize) throws NamespaceException { TSizeInBytes guarSize = defValue.getDefaultGuaranteedSpaceSize(); StoRI stori = createSpace(relativePath, guarSize.value(), totalSize); return stori; - } public StoRI createSpace() throws NamespaceException { @@ -1141,16 +1046,13 @@ public StoRI createSpace() throws NamespaceException { TSizeInBytes guarSize = defValue.getDefaultGuaranteedSpaceSize(); // retrieve DEFAULT TOTAL size TSizeInBytes totalSize = defValue.getDefaultTotalSpaceSize(); - StoRI stori = createSpace(relativePath, guarSize.value(), - totalSize.value()); + StoRI stori = createSpace(relativePath, guarSize.value(), totalSize.value()); return stori; } public StoRI createDefaultStoRI() throws NamespaceException { - /** - * @todo: When is used this method? - */ + /** @todo: When is used this method? */ return null; } @@ -1163,8 +1065,7 @@ public String toString() { sb.append(" VFS Name : '" + this.aliasName + "'" + sep); sb.append(" VFS root : '" + this.rootPath + "'" + sep); sb.append(" VFS FS driver : '" + this.fsDriver.getName() + "'" + sep); - sb.append( - " VFS Space driver : '" + this.spaceSystemDriver.getName() + "'" + sep); + sb.append(" VFS Space driver : '" + this.spaceSystemDriver.getName() + "'" + sep); sb.append(" -- DEFAULT VALUES --" + sep); sb.append(this.defValue); sb.append(" -- CAPABILITY --" + sep); @@ -1173,14 +1074,15 @@ public String toString() { return sb.toString(); } - /**************************************************************************** - * UTILITY METHODS - ****************************************************************************/ - - /***************************************** - * Methods used for manage SPACE - *****************************************/ + /** + * ************************************************************************** UTILITY METHODS + * ************************************************************************** + */ + /** + * *************************************** Methods used for manage SPACE + * *************************************** + */ private String makeSpaceFilePath() throws NamespaceException { String result = ""; @@ -1191,22 +1093,21 @@ private String makeSpaceFilePath() throws NamespaceException { return result; } - private Space createSpace(TSizeInBytes guarSize, TSizeInBytes totalSize, - LocalFile file, SpaceSystem spaceSystem) throws NamespaceException { + private Space createSpace( + TSizeInBytes guarSize, TSizeInBytes totalSize, LocalFile file, SpaceSystem spaceSystem) + throws NamespaceException { Space space = null; try { space = new Space(guarSize, totalSize, file, spaceSystem); } catch (InvalidSpaceAttributesException ex3) { log.error("Error while retrieving Space System Driver for VFS ", ex3); - throw new NamespaceException( - "Error while retrieving Space System Driver for VFS ", ex3); + throw new NamespaceException("Error while retrieving Space System Driver for VFS ", ex3); } return space; } - public StorageSpaceData getSpaceByAlias(String desc) - throws NamespaceException { + public StorageSpaceData getSpaceByAlias(String desc) throws NamespaceException { // Retrieve Storage Space from Persistence ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); @@ -1214,8 +1115,7 @@ public StorageSpaceData getSpaceByAlias(String desc) return spaceData; } - public void storeSpaceByToken(StorageSpaceData spaceData) - throws NamespaceException { + public void storeSpaceByToken(StorageSpaceData spaceData) throws NamespaceException { // Retrieve Storage Space from Persistence ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); @@ -1226,8 +1126,7 @@ public void storeSpaceByToken(StorageSpaceData spaceData) } } - public StoRI retrieveSpaceFileByPFN(PFN pfn, long totalSize) - throws NamespaceException { + public StoRI retrieveSpaceFileByPFN(PFN pfn, long totalSize) throws NamespaceException { NamespaceInterface namespace = NamespaceDirector.getNamespace(); StoRI stori = namespace.resolveStoRIbyPFN(pfn); @@ -1245,10 +1144,10 @@ public long getCreationTime() { return creationTime; } - /****************************************** - * VERSION 1.4 * - *******************************************/ - + /** + * **************************************** VERSION 1.4 * + * ***************************************** + */ public TSpaceToken getSpaceToken() { return this.spaceToken; @@ -1283,8 +1182,7 @@ public String getStorageAreaAuthzFixed() throws NamespaceException { if (getStorageAreaAuthzType().equals(SAAuthzType.FIXED)) { return saAuthzSourceName; } else { - throw new NamespaceException( - "Required FIXED-AUTHZ, but it is UNDEFINED."); + throw new NamespaceException("Required FIXED-AUTHZ, but it is UNDEFINED."); } } diff --git a/src/main/java/it/grid/storm/namespace/naming/NameParser.java b/src/main/java/it/grid/storm/namespace/naming/NameParser.java index d13f1651..f0699f80 100644 --- a/src/main/java/it/grid/storm/namespace/naming/NameParser.java +++ b/src/main/java/it/grid/storm/namespace/naming/NameParser.java @@ -1,172 +1,159 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; public class NameParser { - public NameParser() { - - } - - /** - * Get the basename of an URI. It's possibly an empty string. - * - * @param uri - * a string regarded an URI - * @return the basename string; an empty string if the path ends with slash - */ - public String getName(String uri) { - - if (uri == null || uri.length() == 0) { - return uri; - } - String path = this.getPath(uri); - int at = path.lastIndexOf("/"); - int to = path.length(); - return (at >= 0) ? path.substring(at + 1, to) : path; - } - - /** - * Get the path of an URI. - * - * @param uri - * a string regarded an URI - * @return the path string - */ - public String getPath(String uri) { - - if (uri == null) { - return null; - } - // consider of net_path - int at = uri.indexOf("//"); - int from = uri.indexOf("/", - at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); - // the authority part of URI ignored - int to = uri.length(); - // check the query - if (uri.indexOf('?', from) != -1) { - to = uri.indexOf('?', from); - } - // check the fragment - if (uri.lastIndexOf("#") > from && uri.lastIndexOf("#") < to) { - to = uri.lastIndexOf("#"); - } - // get only the path. - return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from, to); - } - - /** - * Get the query of an URI. - * - * @param uri - * a string regarded an URI - * @return the query string; null if empty or undefined - */ - public String getQuery(String uri) { - - if (uri == null || uri.length() == 0) { - return null; - } - // consider of net_path - int at = uri.indexOf("//"); - int from = uri.indexOf("/", - at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); - // the authority part of URI ignored - int to = uri.length(); - // reuse the at and from variables to consider the query - at = uri.indexOf("?", from); - if (at >= 0) { - from = at + 1; - } else { - return null; - } - // check the fragment - if (uri.lastIndexOf("#") > from) { - to = uri.lastIndexOf("#"); - } - // get the path and query. - return (from < 0 || from == to) ? null : uri.substring(from, to); - } - - /** - * Get the path and query of an URI. - * - * @param uri - * a string regarded an URI - * @return the path and query string - */ - public String getPathQuery(String uri) { - - if (uri == null) { - return null; - } - // consider of net_path - int at = uri.indexOf("//"); - int from = uri.indexOf("/", - at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); - // the authority part of URI ignored - int to = uri.length(); - // Ignore the '?' mark so to ignore the query. - // check the fragment - if (uri.lastIndexOf("#") > from) { - to = uri.lastIndexOf("#"); - } - // get the path and query. - return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from, to); - } - - /** - * Get the path of an URI and its rest part. - * - * @param uri - * a string regarded an URI - * @return the string from the path part - */ - public String getFromPath(String uri) { - - if (uri == null) { - return null; - } - // consider of net_path - int at = uri.indexOf("//"); - int from = uri.indexOf("/", - at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); - // get the path and its rest. - return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from); - } - - /** - * This method counts the slashes after the scheme. - * - * @param filename - * @return nuof slashes - */ - protected int countSlashes(final String filename) { - - int state = 0; - int nuofSlash = 0; - for (int pos = 0; pos < filename.length(); pos++) { - char c = filename.charAt(pos); - if (state == 0) { - if (c >= 'a' && c <= 'z') { - continue; - } - if (c == ':') { - state++; - continue; - } - } else if (state == 1) { - if (c == '/') { - nuofSlash++; - } else { - return nuofSlash; - } - } - } - return nuofSlash; - } - + public NameParser() {} + + /** + * Get the basename of an URI. It's possibly an empty string. + * + * @param uri a string regarded an URI + * @return the basename string; an empty string if the path ends with slash + */ + public String getName(String uri) { + + if (uri == null || uri.length() == 0) { + return uri; + } + String path = this.getPath(uri); + int at = path.lastIndexOf("/"); + int to = path.length(); + return (at >= 0) ? path.substring(at + 1, to) : path; + } + + /** + * Get the path of an URI. + * + * @param uri a string regarded an URI + * @return the path string + */ + public String getPath(String uri) { + + if (uri == null) { + return null; + } + // consider of net_path + int at = uri.indexOf("//"); + int from = uri.indexOf("/", at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); + // the authority part of URI ignored + int to = uri.length(); + // check the query + if (uri.indexOf('?', from) != -1) { + to = uri.indexOf('?', from); + } + // check the fragment + if (uri.lastIndexOf("#") > from && uri.lastIndexOf("#") < to) { + to = uri.lastIndexOf("#"); + } + // get only the path. + return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from, to); + } + + /** + * Get the query of an URI. + * + * @param uri a string regarded an URI + * @return the query string; null if empty or undefined + */ + public String getQuery(String uri) { + + if (uri == null || uri.length() == 0) { + return null; + } + // consider of net_path + int at = uri.indexOf("//"); + int from = uri.indexOf("/", at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); + // the authority part of URI ignored + int to = uri.length(); + // reuse the at and from variables to consider the query + at = uri.indexOf("?", from); + if (at >= 0) { + from = at + 1; + } else { + return null; + } + // check the fragment + if (uri.lastIndexOf("#") > from) { + to = uri.lastIndexOf("#"); + } + // get the path and query. + return (from < 0 || from == to) ? null : uri.substring(from, to); + } + + /** + * Get the path and query of an URI. + * + * @param uri a string regarded an URI + * @return the path and query string + */ + public String getPathQuery(String uri) { + + if (uri == null) { + return null; + } + // consider of net_path + int at = uri.indexOf("//"); + int from = uri.indexOf("/", at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); + // the authority part of URI ignored + int to = uri.length(); + // Ignore the '?' mark so to ignore the query. + // check the fragment + if (uri.lastIndexOf("#") > from) { + to = uri.lastIndexOf("#"); + } + // get the path and query. + return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from, to); + } + + /** + * Get the path of an URI and its rest part. + * + * @param uri a string regarded an URI + * @return the string from the path part + */ + public String getFromPath(String uri) { + + if (uri == null) { + return null; + } + // consider of net_path + int at = uri.indexOf("//"); + int from = uri.indexOf("/", at >= 0 ? (uri.lastIndexOf("/", at - 1) >= 0 ? 0 : at + 2) : 0); + // get the path and its rest. + return (from < 0) ? (at >= 0 ? "/" : uri) : uri.substring(from); + } + + /** + * This method counts the slashes after the scheme. + * + * @param filename + * @return nuof slashes + */ + protected int countSlashes(final String filename) { + + int state = 0; + int nuofSlash = 0; + for (int pos = 0; pos < filename.length(); pos++) { + char c = filename.charAt(pos); + if (state == 0) { + if (c >= 'a' && c <= 'z') { + continue; + } + if (c == ':') { + state++; + continue; + } + } else if (state == 1) { + if (c == '/') { + nuofSlash++; + } else { + return nuofSlash; + } + } + } + return nuofSlash; + } } diff --git a/src/main/java/it/grid/storm/namespace/naming/NamespaceUtil.java b/src/main/java/it/grid/storm/namespace/naming/NamespaceUtil.java index e2ffcaa2..507fbc1b 100644 --- a/src/main/java/it/grid/storm/namespace/naming/NamespaceUtil.java +++ b/src/main/java/it/grid/storm/namespace/naming/NamespaceUtil.java @@ -1,580 +1,547 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.StringTokenizer; -import java.util.Vector; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.base.Preconditions; import com.google.common.collect.Lists; - import it.grid.storm.griduser.VONameMatchingRule; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.MappingRule; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.srm.types.TSURL; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; +import java.util.Vector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class NamespaceUtil { - private static final Logger log = LoggerFactory.getLogger(NamespaceUtil.class); - - /** - * PRIVATE Constructor - */ - private NamespaceUtil() { - - } - - /** - * Compute the distance between two path. Return -1 when the two path are different completely. - * - * @param path1 String - * @param path2 String - * @return int - */ - public static int computeDistanceFromPath(String path1, String path2) { - - return (new Path(path1)).distance(new Path(path2)); - } - - /** - * Retrieve all path elements within path - * - * @param path String - * @return Collection - */ - public static List getPathElement(String path) { - - return (new Path(path)).getPathElements(); - } - - /** - * getFileName - * - * @param stfn String - * @return String - */ - public static String getFileName(String stfn) { - - if (stfn != null) { - if (stfn.endsWith(NamingConst.SEPARATOR)) { - return ""; - } else { - Path path = new Path(stfn); - int length = path.getLength(); - if (length > 0) { - PathElement elem = path.getElementAt(length - 1); - return elem.toString(); - } else { - return ""; - } - } - } else { - return ""; - } - } - - /** - * Return all the VFS residing on a specified path (mount-point) - * - * @param mountPointPath - * @return the set - */ - public static Collection getResidentVFS(String mountPointPath) { - - List vfsSet = NamespaceDirector.getNamespace().getAllDefinedVFS(); - for (VirtualFS vfs : vfsSet) { - String vfsRootPath; - boolean enclosed; - - vfsRootPath = vfs.getRootPath(); - enclosed = NamespaceUtil.isEnclosed(mountPointPath, vfsRootPath); - if (!enclosed) { - vfsSet.remove(vfs); - } - } - return vfsSet; - } - - public static String consumeFileName(String file) { - - if (file != null) { - if (file.endsWith(NamingConst.SEPARATOR)) { - return file; - } else { - Path path = new Path(file); - int length = path.getLength(); - if (length > 1) { - return path.getSubPath(length - 1).getPath() + NamingConst.SEPARATOR; - } else { - return Path.PATH_SEPARATOR; - } - } - } else { - return Path.PATH_SEPARATOR; - } - } - - /** - * get - * - * @param stfn String - * @return String - */ - public static String getStFNPath(String stfn) { - - return consumeFileName(stfn); - } - - public static String consumeElement(String stfnPath) { - - Path path = new Path(stfnPath); - int length = path.getLength(); - if (length > 1) { - return path.getSubPath(length - 1).getPath() + NamingConst.SEPARATOR; - } else { - return ""; - } - } - - public static String extractRelativePath(String root, String absolute) { - - if (absolute.startsWith(root)) { - Path rootPath = new Path(root); - int rootLength = rootPath.getLength(); - - Path absPath = new Path(absolute); - List elem = Lists.newArrayList(); - - for (int i = 0; i < absPath.getLength(); i++) { - // Why use length and not compare single element? - if (i >= rootLength) { - elem.add(absPath.getElementAt(i)); - } - } - Path result = new Path(elem, false); - - return result.getPath(); - } else { - return absolute; - } - } - - /** - * Is the first path within the second one? - * - * @param root - * @param wrapperCandidate - * @return - */ - public static boolean isEnclosed(String root, String wrapperCandidate) { - - boolean result = false; - Path rootPath = new Path(root); - Path wrapperPath = new Path(wrapperCandidate); - result = rootPath.isEnclosed(wrapperPath); - return result; - } - - /** - * - * @param stfnPath - * @param vfsApproachable - * @return the mapped rule or null if not found - */ - public static MappingRule getWinnerRule(String stfnPath, Collection mappingRules, - Collection vfsApproachable) { - - Preconditions.checkNotNull(stfnPath, "Unable to get winning rule: invalid null stfnPath"); - Preconditions.checkNotNull(mappingRules, - "Unable to get winning rule: invalid null mapping rules"); - Preconditions.checkNotNull(vfsApproachable, - "Unable to get winning rule: invalid null VFS list"); - - if (mappingRules.isEmpty()) { - log.warn("Unable to get winning rule: empty mapping rules"); - return null; - } - - if (vfsApproachable.isEmpty()) { - log.debug("Unable to get winning rule: empty VFS list"); - return null; - } - - log.debug("Searching winner rule for {}", stfnPath); - MappingRule winnerRule = null; - - Vector rules = new Vector(mappingRules); - - int minDistance = Integer.MAX_VALUE; - for (MappingRule rule : rules) { - if (isEnclosed(rule.getStFNRoot(), stfnPath) - && vfsApproachable.contains(rule.getMappedFS())) { - int distance = computeDistanceFromPath(rule.getStFNRoot(), stfnPath); - if (distance < minDistance) { - minDistance = distance; - winnerRule = rule; - } - } - } - return winnerRule; - } - - public static MappingRule getWinnerRule(TSURL surl, Collection mappingRules, - Collection vfsApproachable) { - - return getWinnerRule(surl.sfn().stfn().toString(), mappingRules, vfsApproachable); - } - - public static VirtualFS getWinnerVFS(String absolutePath, - Map vfsListByRootPath) throws NamespaceException { - - VirtualFS vfsWinner = null; - int distance = Integer.MAX_VALUE; - for (String vfsRoot : vfsListByRootPath.keySet()) { - int d = computeDistanceFromPath(vfsRoot, absolutePath); - log.debug("Pondering VFS Root '{}' against '{}'. Distance = {}", vfsRoot, absolutePath, d); - if (d < distance) { - boolean enclosed = isEnclosed(vfsRoot, absolutePath); - if (enclosed) { - distance = d; - vfsWinner = vfsListByRootPath.get(vfsRoot); - log.debug("Partial winner is {} (VFS: {})", vfsRoot, vfsWinner.getAliasName()); - } - } - } - if (vfsWinner == null) { - log.error("Unable to found a VFS compatible with path: '{}'", absolutePath); - throw new NamespaceException( - "Unable to found a VFS compatible with path :'" + absolutePath + "'"); - } - return vfsWinner; - } - - public static String resolveVOName(String filename, - Map vfsListByRootPath) throws NamespaceException { - - VirtualFS vfs = getWinnerVFS(filename, vfsListByRootPath); - /* NamespaceException raised if vfs is not found => vfs is not null */ - VONameMatchingRule rule = - vfs.getApproachableRules().get(0).getSubjectRules().getVONameMatchingRule(); - return rule.getVOName(); - } - - /** - * ===================== INNER CLASSES ====================== - */ - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - */ - static class PathElement { - - private final String pathChunk; - - public PathElement(String path) { - - this.pathChunk = path; - } - - public String getPathChunk() { - - return this.pathChunk; - } - - @Override - public int hashCode() { - - return this.pathChunk.hashCode(); - } - - @Override - public boolean equals(Object obj) { - - boolean result = true; - if (!(obj instanceof PathElement)) { - result = false; - } else { - PathElement other = (PathElement) obj; - result = (this.getPathChunk()).equals(other.getPathChunk()); - } - return result; - } - - @Override - public String toString() { - - return pathChunk; - } - } - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - */ - private static class Path { - - private List path; - private static String PATH_SEPARATOR = "/"; - public static final String[] EMPTY_STRING_ARRAY = {}; - public boolean directory; - public boolean absolutePath; - - public Path() { - - this.path = Lists.newArrayList(); - this.directory = false; - this.absolutePath = true; - } - - public Path(List path, boolean absolutePath) { - - this.path = path; - this.directory = false; - this.absolutePath = absolutePath; - } - - public Path(String path) { - - // Factorize path into array of PathElement... - if (path.startsWith(PATH_SEPARATOR)) { - this.absolutePath = true; - } else { - this.absolutePath = false; - } - if (path.endsWith(PATH_SEPARATOR)) { - this.directory = true; - } else { - this.directory = false; - } - - String[] pathElements = factorizePath(path); - if (pathElements != null) { - // ...and build Path - this.path = Lists.newArrayList(); - for (String pathElement : pathElements) { - addPathElement(new PathElement(pathElement)); - } - } - } - - public String[] factorizePath(String path) { - - return toStringArray(path, PATH_SEPARATOR); - } - - public List getPathElements() { - - List result = Lists.newArrayList(); - Iterator scan = path.iterator(); - while (scan.hasNext()) { - PathElement p = scan.next(); - result.add(p.toString()); - } - return result; - } - - private String[] toStringArray(String value, String delim) { - - if (value != null) { - return split(delim, value); - } else { - return EMPTY_STRING_ARRAY; - } - } - - private String[] split(String seperators, String list) { - - return split(seperators, list, false); - } - - private String[] split(String seperators, String list, boolean include) { - - StringTokenizer tokens = new StringTokenizer(list, seperators, include); - String[] result = new String[tokens.countTokens()]; - int i = 0; - while (tokens.hasMoreTokens()) { - result[i++] = tokens.nextToken(); - } - return result; - } - - public String getPath() { - - StringBuilder buf = new StringBuilder(); - if (this.absolutePath) { - buf.append(PATH_SEPARATOR); - } - for (Iterator iter = path.iterator(); iter.hasNext();) { - PathElement item = iter.next(); - buf.append(item.getPathChunk()); - if (iter.hasNext()) { - buf.append(PATH_SEPARATOR); - } - } - if (this.directory) { - buf.append(PATH_SEPARATOR); - } - return buf.toString(); - } - - public int getLength() { - - if (path != null) { - return path.size(); - } else { - return 0; - } - } - - /** - * - * @param position int - * @return PathElement - */ - public PathElement getElementAt(int position) { - - if (position < getLength()) { - return this.path.get(position); - } else { - return null; - } - } - - /** - * - * @param obj Object - * @return boolean - */ - @Override - public boolean equals(Object obj) { - - boolean result = true; - if (!(obj instanceof Path)) { - result = false; - } else { - Path other = (Path) obj; - if (other.getLength() != this.getLength()) { - result = false; - } else { - int size = this.getLength(); - for (int i = 0; i < size; i++) { - if (!(this.getElementAt(i)).equals(other.getElementAt(i))) { - result = false; - break; - } - } - } - } - return result; - } - - /** - * - * @param pathChunk PathElement - */ - public void addPathElement(PathElement pathChunk) { - - this.path.add(pathChunk); - } - - /** - * - * @param elements int - * @return Path - */ - public Path getSubPath(int elements) { - - Path result = new Path(); - for (int i = 0; i < elements; i++) { - result.addPathElement(this.getElementAt(i)); - } - return result; - } - - /** - * - * @param wrapperCandidate Path - * @return boolean - */ - public boolean isEnclosed(Path wrapperCandidate) { - - boolean result = false; - if (this.getLength() > wrapperCandidate.getLength()) { - result = false; - } else { - Path other = wrapperCandidate.getSubPath(this.getLength()); - result = other.equals(this); - } - return result; - } - - /** - * - * @param other Path - * @return int - */ - public int distance(Path other) { - - int result = -1; - Path a; - Path b; - if (this.getLength() > other.getLength()) { - a = this; - b = other; - } else { - a = other; - b = this; - } - if (b.isEnclosed(a)) { - result = (a.getLength() - b.getLength()); - } else { - result = a.getLength() + b.getLength(); - } - return result; - } - - /** - * - * @return String - */ - @Override - public String toString() { - - StringBuilder buf = new StringBuilder(); - buf.append("["); - for (int i = 0; i < this.getLength(); i++) { - buf.append(" "); - buf.append(this.getElementAt(i).getPathChunk()); - } - buf.append(" ]"); - return buf.toString(); - } - } - + private static final Logger log = LoggerFactory.getLogger(NamespaceUtil.class); + + /** PRIVATE Constructor */ + private NamespaceUtil() {} + + /** + * Compute the distance between two path. Return -1 when the two path are different completely. + * + * @param path1 String + * @param path2 String + * @return int + */ + public static int computeDistanceFromPath(String path1, String path2) { + + return (new Path(path1)).distance(new Path(path2)); + } + + /** + * Retrieve all path elements within path + * + * @param path String + * @return Collection + */ + public static List getPathElement(String path) { + + return (new Path(path)).getPathElements(); + } + + /** + * getFileName + * + * @param stfn String + * @return String + */ + public static String getFileName(String stfn) { + + if (stfn != null) { + if (stfn.endsWith(NamingConst.SEPARATOR)) { + return ""; + } else { + Path path = new Path(stfn); + int length = path.getLength(); + if (length > 0) { + PathElement elem = path.getElementAt(length - 1); + return elem.toString(); + } else { + return ""; + } + } + } else { + return ""; + } + } + + /** + * Return all the VFS residing on a specified path (mount-point) + * + * @param mountPointPath + * @return the set + */ + public static Collection getResidentVFS(String mountPointPath) { + + List vfsSet = NamespaceDirector.getNamespace().getAllDefinedVFS(); + for (VirtualFS vfs : vfsSet) { + String vfsRootPath; + boolean enclosed; + + vfsRootPath = vfs.getRootPath(); + enclosed = NamespaceUtil.isEnclosed(mountPointPath, vfsRootPath); + if (!enclosed) { + vfsSet.remove(vfs); + } + } + return vfsSet; + } + + public static String consumeFileName(String file) { + + if (file != null) { + if (file.endsWith(NamingConst.SEPARATOR)) { + return file; + } else { + Path path = new Path(file); + int length = path.getLength(); + if (length > 1) { + return path.getSubPath(length - 1).getPath() + NamingConst.SEPARATOR; + } else { + return Path.PATH_SEPARATOR; + } + } + } else { + return Path.PATH_SEPARATOR; + } + } + + /** + * get + * + * @param stfn String + * @return String + */ + public static String getStFNPath(String stfn) { + + return consumeFileName(stfn); + } + + public static String consumeElement(String stfnPath) { + + Path path = new Path(stfnPath); + int length = path.getLength(); + if (length > 1) { + return path.getSubPath(length - 1).getPath() + NamingConst.SEPARATOR; + } else { + return ""; + } + } + + public static String extractRelativePath(String root, String absolute) { + + if (absolute.startsWith(root)) { + Path rootPath = new Path(root); + int rootLength = rootPath.getLength(); + + Path absPath = new Path(absolute); + List elem = Lists.newArrayList(); + + for (int i = 0; i < absPath.getLength(); i++) { + // Why use length and not compare single element? + if (i >= rootLength) { + elem.add(absPath.getElementAt(i)); + } + } + Path result = new Path(elem, false); + + return result.getPath(); + } else { + return absolute; + } + } + + /** + * Is the first path within the second one? + * + * @param root + * @param wrapperCandidate + * @return + */ + public static boolean isEnclosed(String root, String wrapperCandidate) { + + boolean result = false; + Path rootPath = new Path(root); + Path wrapperPath = new Path(wrapperCandidate); + result = rootPath.isEnclosed(wrapperPath); + return result; + } + + /** + * @param stfnPath + * @param vfsApproachable + * @return the mapped rule or null if not found + */ + public static MappingRule getWinnerRule( + String stfnPath, + Collection mappingRules, + Collection vfsApproachable) { + + Preconditions.checkNotNull(stfnPath, "Unable to get winning rule: invalid null stfnPath"); + Preconditions.checkNotNull( + mappingRules, "Unable to get winning rule: invalid null mapping rules"); + Preconditions.checkNotNull( + vfsApproachable, "Unable to get winning rule: invalid null VFS list"); + + if (mappingRules.isEmpty()) { + log.warn("Unable to get winning rule: empty mapping rules"); + return null; + } + + if (vfsApproachable.isEmpty()) { + log.debug("Unable to get winning rule: empty VFS list"); + return null; + } + + log.debug("Searching winner rule for {}", stfnPath); + MappingRule winnerRule = null; + + Vector rules = new Vector(mappingRules); + + int minDistance = Integer.MAX_VALUE; + for (MappingRule rule : rules) { + if (isEnclosed(rule.getStFNRoot(), stfnPath) + && vfsApproachable.contains(rule.getMappedFS())) { + int distance = computeDistanceFromPath(rule.getStFNRoot(), stfnPath); + if (distance < minDistance) { + minDistance = distance; + winnerRule = rule; + } + } + } + return winnerRule; + } + + public static MappingRule getWinnerRule( + TSURL surl, Collection mappingRules, Collection vfsApproachable) { + + return getWinnerRule(surl.sfn().stfn().toString(), mappingRules, vfsApproachable); + } + + public static VirtualFS getWinnerVFS( + String absolutePath, Map vfsListByRootPath) throws NamespaceException { + + VirtualFS vfsWinner = null; + int distance = Integer.MAX_VALUE; + for (String vfsRoot : vfsListByRootPath.keySet()) { + int d = computeDistanceFromPath(vfsRoot, absolutePath); + log.debug("Pondering VFS Root '{}' against '{}'. Distance = {}", vfsRoot, absolutePath, d); + if (d < distance) { + boolean enclosed = isEnclosed(vfsRoot, absolutePath); + if (enclosed) { + distance = d; + vfsWinner = vfsListByRootPath.get(vfsRoot); + log.debug("Partial winner is {} (VFS: {})", vfsRoot, vfsWinner.getAliasName()); + } + } + } + if (vfsWinner == null) { + log.error("Unable to found a VFS compatible with path: '{}'", absolutePath); + throw new NamespaceException( + "Unable to found a VFS compatible with path :'" + absolutePath + "'"); + } + return vfsWinner; + } + + public static String resolveVOName(String filename, Map vfsListByRootPath) + throws NamespaceException { + + VirtualFS vfs = getWinnerVFS(filename, vfsListByRootPath); + /* NamespaceException raised if vfs is not found => vfs is not null */ + VONameMatchingRule rule = + vfs.getApproachableRules().get(0).getSubjectRules().getVONameMatchingRule(); + return rule.getVOName(); + } + + /** ===================== INNER CLASSES ====================== */ + + /** + * Title: + * + *

Description: + */ + static class PathElement { + + private final String pathChunk; + + public PathElement(String path) { + + this.pathChunk = path; + } + + public String getPathChunk() { + + return this.pathChunk; + } + + @Override + public int hashCode() { + + return this.pathChunk.hashCode(); + } + + @Override + public boolean equals(Object obj) { + + boolean result = true; + if (!(obj instanceof PathElement)) { + result = false; + } else { + PathElement other = (PathElement) obj; + result = (this.getPathChunk()).equals(other.getPathChunk()); + } + return result; + } + + @Override + public String toString() { + + return pathChunk; + } + } + + /** + * Title: + * + *

Description: + */ + private static class Path { + + private List path; + private static String PATH_SEPARATOR = "/"; + public static final String[] EMPTY_STRING_ARRAY = {}; + public boolean directory; + public boolean absolutePath; + + public Path() { + + this.path = Lists.newArrayList(); + this.directory = false; + this.absolutePath = true; + } + + public Path(List path, boolean absolutePath) { + + this.path = path; + this.directory = false; + this.absolutePath = absolutePath; + } + + public Path(String path) { + + // Factorize path into array of PathElement... + if (path.startsWith(PATH_SEPARATOR)) { + this.absolutePath = true; + } else { + this.absolutePath = false; + } + if (path.endsWith(PATH_SEPARATOR)) { + this.directory = true; + } else { + this.directory = false; + } + + String[] pathElements = factorizePath(path); + if (pathElements != null) { + // ...and build Path + this.path = Lists.newArrayList(); + for (String pathElement : pathElements) { + addPathElement(new PathElement(pathElement)); + } + } + } + + public String[] factorizePath(String path) { + + return toStringArray(path, PATH_SEPARATOR); + } + + public List getPathElements() { + + List result = Lists.newArrayList(); + Iterator scan = path.iterator(); + while (scan.hasNext()) { + PathElement p = scan.next(); + result.add(p.toString()); + } + return result; + } + + private String[] toStringArray(String value, String delim) { + + if (value != null) { + return split(delim, value); + } else { + return EMPTY_STRING_ARRAY; + } + } + + private String[] split(String seperators, String list) { + + return split(seperators, list, false); + } + + private String[] split(String seperators, String list, boolean include) { + + StringTokenizer tokens = new StringTokenizer(list, seperators, include); + String[] result = new String[tokens.countTokens()]; + int i = 0; + while (tokens.hasMoreTokens()) { + result[i++] = tokens.nextToken(); + } + return result; + } + + public String getPath() { + + StringBuilder buf = new StringBuilder(); + if (this.absolutePath) { + buf.append(PATH_SEPARATOR); + } + for (Iterator iter = path.iterator(); iter.hasNext(); ) { + PathElement item = iter.next(); + buf.append(item.getPathChunk()); + if (iter.hasNext()) { + buf.append(PATH_SEPARATOR); + } + } + if (this.directory) { + buf.append(PATH_SEPARATOR); + } + return buf.toString(); + } + + public int getLength() { + + if (path != null) { + return path.size(); + } else { + return 0; + } + } + + /** + * @param position int + * @return PathElement + */ + public PathElement getElementAt(int position) { + + if (position < getLength()) { + return this.path.get(position); + } else { + return null; + } + } + + /** + * @param obj Object + * @return boolean + */ + @Override + public boolean equals(Object obj) { + + boolean result = true; + if (!(obj instanceof Path)) { + result = false; + } else { + Path other = (Path) obj; + if (other.getLength() != this.getLength()) { + result = false; + } else { + int size = this.getLength(); + for (int i = 0; i < size; i++) { + if (!(this.getElementAt(i)).equals(other.getElementAt(i))) { + result = false; + break; + } + } + } + } + return result; + } + + /** @param pathChunk PathElement */ + public void addPathElement(PathElement pathChunk) { + + this.path.add(pathChunk); + } + + /** + * @param elements int + * @return Path + */ + public Path getSubPath(int elements) { + + Path result = new Path(); + for (int i = 0; i < elements; i++) { + result.addPathElement(this.getElementAt(i)); + } + return result; + } + + /** + * @param wrapperCandidate Path + * @return boolean + */ + public boolean isEnclosed(Path wrapperCandidate) { + + boolean result = false; + if (this.getLength() > wrapperCandidate.getLength()) { + result = false; + } else { + Path other = wrapperCandidate.getSubPath(this.getLength()); + result = other.equals(this); + } + return result; + } + + /** + * @param other Path + * @return int + */ + public int distance(Path other) { + + int result = -1; + Path a; + Path b; + if (this.getLength() > other.getLength()) { + a = this; + b = other; + } else { + a = other; + b = this; + } + if (b.isEnclosed(a)) { + result = (a.getLength() - b.getLength()); + } else { + result = a.getLength() + b.getLength(); + } + return result; + } + + /** @return String */ + @Override + public String toString() { + + StringBuilder buf = new StringBuilder(); + buf.append("["); + for (int i = 0; i < this.getLength(); i++) { + buf.append(" "); + buf.append(this.getElementAt(i).getPathChunk()); + } + buf.append(" ]"); + return buf.toString(); + } + } } diff --git a/src/main/java/it/grid/storm/namespace/naming/NamingConst.java b/src/main/java/it/grid/storm/namespace/naming/NamingConst.java index 88d819ae..354dc460 100644 --- a/src/main/java/it/grid/storm/namespace/naming/NamingConst.java +++ b/src/main/java/it/grid/storm/namespace/naming/NamingConst.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; @@ -8,42 +7,36 @@ public class NamingConst { - /** - * The separator character used in file paths. - */ - public static final char SEPARATOR_CHAR = '/'; + /** The separator character used in file paths. */ + public static final char SEPARATOR_CHAR = '/'; - /** - * The separator used in file paths. - */ - public static final String SEPARATOR = "/"; + /** The separator used in file paths. */ + public static final String SEPARATOR = "/"; - /** - * The absolute path of the root of a file system. - */ - public static final String ROOT_PATH = "/"; + /** The absolute path of the root of a file system. */ + public static final String ROOT_PATH = "/"; - private static NamingConst instance = new NamingConst(); + private static NamingConst instance = new NamingConst(); - private final Configuration config; + private final Configuration config; - private NamingConst() { + private NamingConst() { - config = Configuration.getInstance(); - } + config = Configuration.getInstance(); + } - public static String getServiceDefaultHost() { + public static String getServiceDefaultHost() { - return instance.config.getServiceHostname(); - } + return instance.config.getServiceHostname(); + } - public static int getServicePort() { + public static int getServicePort() { - return instance.config.getServicePort(); - } + return instance.config.getServicePort(); + } - public static String getServiceSFNQueryPrefix() { + public static String getServiceSFNQueryPrefix() { - return "SFN"; - } + return "SFN"; + } } diff --git a/src/main/java/it/grid/storm/namespace/naming/SRMURL.java b/src/main/java/it/grid/storm/namespace/naming/SRMURL.java index 2dc268df..298bc690 100644 --- a/src/main/java/it/grid/storm/namespace/naming/SRMURL.java +++ b/src/main/java/it/grid/storm/namespace/naming/SRMURL.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; @@ -8,304 +7,273 @@ public abstract class SRMURL { - protected TransportProtocol transfProtocol; - protected SRMURLType surlType = null; - /** - * If this is a normal form SRMURL path contains the file path, it instead is - * a query form SRMURL path contains the service endpoint - */ - protected String path; - protected String queryString = null; + protected TransportProtocol transfProtocol; + protected SRMURLType surlType = null; + /** + * If this is a normal form SRMURL path contains the file path, it instead is a query form SRMURL + * path contains the service endpoint + */ + protected String path; - protected int local = -1; // undef - protected boolean localSURL = false; + protected String queryString = null; - public SRMURL(Protocol protocol, String hostname, int port, - String servicePath, String queryString) { + protected int local = -1; // undef + protected boolean localSURL = false; - Authority authority = new Authority(hostname, port); - this.transfProtocol = new TransportProtocol(protocol, authority); - // The path and the query string must to be expressed in absolute form! - this.path = makeInAbsoluteForm(servicePath); - if (queryString != null) { - this.queryString = makeInAbsoluteForm(queryString); - } + public SRMURL( + Protocol protocol, String hostname, int port, String servicePath, String queryString) { - } + Authority authority = new Authority(hostname, port); + this.transfProtocol = new TransportProtocol(protocol, authority); + // The path and the query string must to be expressed in absolute form! + this.path = makeInAbsoluteForm(servicePath); + if (queryString != null) { + this.queryString = makeInAbsoluteForm(queryString); + } + } - public SRMURL(Protocol protocol, String hostname, int port, String stfn) { + public SRMURL(Protocol protocol, String hostname, int port, String stfn) { - Authority autority = new Authority(hostname, port); - this.transfProtocol = new TransportProtocol(protocol, autority); + Authority autority = new Authority(hostname, port); + this.transfProtocol = new TransportProtocol(protocol, autority); - // The path and the query string must to be expressed in absolute form! - this.path = makeInAbsoluteForm(stfn); - } + // The path and the query string must to be expressed in absolute form! + this.path = makeInAbsoluteForm(stfn); + } - /** - * Provides from the received path string a string that starts with - * NamingConst.ROOT_PATH ("/") - * - * @param path - * @return - */ - private String makeInAbsoluteForm(String path) { + /** + * Provides from the received path string a string that starts with NamingConst.ROOT_PATH ("/") + * + * @param path + * @return + */ + private String makeInAbsoluteForm(String path) { - StringBuilder absolutePath = new StringBuilder(); + StringBuilder absolutePath = new StringBuilder(); - if ((path == null) || (path.length() == 0)) { - absolutePath.append(NamingConst.ROOT_PATH); - } else { - if (path.charAt(0) != NamingConst.SEPARATOR_CHAR) { - absolutePath.insert(0, NamingConst.ROOT_PATH); - } - absolutePath.append(path); - } - return absolutePath.toString(); - } - - /** - * @param hostname - */ - public void setServiceHostName(String hostname) { - - this.transfProtocol.setAuthority(new Authority(hostname)); - } - - public String getServiceHostname() { - - return this.transfProtocol.getAuthority().getServiceHostname(); - } - - public void setServiceHostPort(int port) { - - this.transfProtocol.getAuthority().setServicePort(port); - } - - public int getServiceHostPort() { - - return this.transfProtocol.getAuthority().getServicePort(); - } - - public String getPath() { - - return path; - } - - public String getQueryString() { - - return queryString; - } - - public String getTransportPrefix() { - - return transfProtocol.toString(); - } - - public String getSURLType() { - - if (surlType == null) { - surlType = computeType(); - } - return surlType.toString(); - } - - /** - * Returns true if the hostname of this srmurl is the one specified in - * configuration file field "storm.service.FE-public.hostname" - * - * @return - */ - public boolean isLocal() { - - if (local == -1) { - localSURL = getServiceHostname().equals( - NamingConst.getServiceDefaultHost()); - local = 1; - } - return localSURL; - } - - public boolean isQueriedFormSURL() { - - if (surlType == null) { - surlType = computeType(); - } - return (surlType.equals(SRMURLType.QUERIED)); - } - - public boolean isNormalFormSURL() { - - return (!(isQueriedFormSURL())); - } - - private SRMURLType computeType() { - - if (this.getQueryString() != null) { - return SRMURLType.QUERIED; - } else { - return SRMURLType.SIMPLE; - } - } - - /** - * If this is a queri form SRMURL returns the service endpoint, an empty - * string otherwise - * - * @return - */ - public String getServiceEndPoint() { - - if (isQueriedFormSURL()) { - return getPath(); - } else { - return ""; - } - } - - public String getStFN() { - - if (isQueriedFormSURL()) { - return this.getQueryString(); - } else { // In this case the path represents the StFN - return this.getPath(); - } - } - - /** - * - *

- * Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: - *

- * - * @author not attributable - * @version 1.0 - */ - protected static class SRMURLType { - - private String type; - public final static SRMURLType QUERIED = new SRMURLType("query_form"); - public final static SRMURLType SIMPLE = new SRMURLType("simple_form"); - - private SRMURLType(String type) { - - this.type = type; - } - - public String toString() { - - return type; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (obj == null) { - return false; - } - if (!(obj instanceof SRMURLType)) { - return false; - } - SRMURLType other = (SRMURLType) obj; - if (type == null) { - if (other.type != null) { - return false; - } - } else if (!type.equals(other.type)) { - return false; - } - return true; - } - - @Override - public int hashCode() { - - final int prime = 31; - int result = 17; - result = prime * result + type.hashCode(); - return result; - } - - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 17; - result = prime * result + local; - result = prime * result + (localSURL ? 1231 : 1237); - result = prime * result + ((path == null) ? 0 : path.hashCode()); - result = prime * result - + ((queryString == null) ? 0 : queryString.hashCode()); - result = prime * result + ((surlType == null) ? 0 : surlType.hashCode()); - result = prime * result - + ((transfProtocol == null) ? 0 : transfProtocol.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof SRMURL)) - return false; - SRMURL other = (SRMURL) obj; - if (local != other.local) - return false; - if (localSURL != other.localSURL) - return false; - if (path == null) { - if (other.path != null) - return false; - } else if (!path.equals(other.path)) - return false; - if (queryString == null) { - if (other.queryString != null) - return false; - } else if (!queryString.equals(other.queryString)) - return false; - if (surlType == null) { - if (other.surlType != null) - return false; - } else if (!surlType.equals(other.surlType)) - return false; - if (transfProtocol == null) { - if (other.transfProtocol != null) - return false; - } else if (!transfProtocol.equals(other.transfProtocol)) - return false; - return true; - } + if ((path == null) || (path.length() == 0)) { + absolutePath.append(NamingConst.ROOT_PATH); + } else { + if (path.charAt(0) != NamingConst.SEPARATOR_CHAR) { + absolutePath.insert(0, NamingConst.ROOT_PATH); + } + absolutePath.append(path); + } + return absolutePath.toString(); + } + /** @param hostname */ + public void setServiceHostName(String hostname) { + + this.transfProtocol.setAuthority(new Authority(hostname)); + } + + public String getServiceHostname() { + + return this.transfProtocol.getAuthority().getServiceHostname(); + } + + public void setServiceHostPort(int port) { + + this.transfProtocol.getAuthority().setServicePort(port); + } + + public int getServiceHostPort() { + + return this.transfProtocol.getAuthority().getServicePort(); + } + + public String getPath() { + + return path; + } + + public String getQueryString() { + + return queryString; + } + + public String getTransportPrefix() { + + return transfProtocol.toString(); + } + + public String getSURLType() { + + if (surlType == null) { + surlType = computeType(); + } + return surlType.toString(); + } + + /** + * Returns true if the hostname of this srmurl is the one specified in configuration file field + * "storm.service.FE-public.hostname" + * + * @return + */ + public boolean isLocal() { + + if (local == -1) { + localSURL = getServiceHostname().equals(NamingConst.getServiceDefaultHost()); + local = 1; + } + return localSURL; + } + + public boolean isQueriedFormSURL() { + + if (surlType == null) { + surlType = computeType(); + } + return (surlType.equals(SRMURLType.QUERIED)); + } + + public boolean isNormalFormSURL() { + + return (!(isQueriedFormSURL())); + } + + private SRMURLType computeType() { + + if (this.getQueryString() != null) { + return SRMURLType.QUERIED; + } else { + return SRMURLType.SIMPLE; + } + } + + /** + * If this is a queri form SRMURL returns the service endpoint, an empty string otherwise + * + * @return + */ + public String getServiceEndPoint() { + + if (isQueriedFormSURL()) { + return getPath(); + } else { + return ""; + } + } + + public String getStFN() { + + if (isQueriedFormSURL()) { + return this.getQueryString(); + } else { // In this case the path represents the StFN + return this.getPath(); + } + } + + /** + * Title: + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: + * + * @author not attributable + * @version 1.0 + */ + protected static class SRMURLType { + + private String type; + public static final SRMURLType QUERIED = new SRMURLType("query_form"); + public static final SRMURLType SIMPLE = new SRMURLType("simple_form"); + + private SRMURLType(String type) { + + this.type = type; + } + + public String toString() { + + return type; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (obj == null) { + return false; + } + if (!(obj instanceof SRMURLType)) { + return false; + } + SRMURLType other = (SRMURLType) obj; + if (type == null) { + if (other.type != null) { + return false; + } + } else if (!type.equals(other.type)) { + return false; + } + return true; + } + + @Override + public int hashCode() { + + final int prime = 31; + int result = 17; + result = prime * result + type.hashCode(); + return result; + } + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 17; + result = prime * result + local; + result = prime * result + (localSURL ? 1231 : 1237); + result = prime * result + ((path == null) ? 0 : path.hashCode()); + result = prime * result + ((queryString == null) ? 0 : queryString.hashCode()); + result = prime * result + ((surlType == null) ? 0 : surlType.hashCode()); + result = prime * result + ((transfProtocol == null) ? 0 : transfProtocol.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) return true; + if (obj == null) return false; + if (!(obj instanceof SRMURL)) return false; + SRMURL other = (SRMURL) obj; + if (local != other.local) return false; + if (localSURL != other.localSURL) return false; + if (path == null) { + if (other.path != null) return false; + } else if (!path.equals(other.path)) return false; + if (queryString == null) { + if (other.queryString != null) return false; + } else if (!queryString.equals(other.queryString)) return false; + if (surlType == null) { + if (other.surlType != null) return false; + } else if (!surlType.equals(other.surlType)) return false; + if (transfProtocol == null) { + if (other.transfProtocol != null) return false; + } else if (!transfProtocol.equals(other.transfProtocol)) return false; + return true; + } } diff --git a/src/main/java/it/grid/storm/namespace/naming/SURL.java b/src/main/java/it/grid/storm/namespace/naming/SURL.java index 7a405eef..8a37d743 100644 --- a/src/main/java/it/grid/storm/namespace/naming/SURL.java +++ b/src/main/java/it/grid/storm/namespace/naming/SURL.java @@ -1,272 +1,266 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.Protocol; - import java.net.URI; import java.util.ArrayList; - import org.slf4j.Logger; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ public class SURL extends SRMURL { - private static Logger log = NamespaceDirector.getLogger(); - private static ArrayList schemes = new ArrayList(); - - static { - schemes.add("srm"); - } - - public final boolean directory; - - private SURL(final String hostName, final int port, - final String serviceEndpoint, final String queryString) { - - super(Protocol.SRM, hostName, port, serviceEndpoint, queryString); - directory = checkDirectory(queryString); - } - - private SURL(final String hostName, final int port, final String stfn) { - - super(Protocol.SRM, hostName, port, stfn); - directory = checkDirectory(stfn); - } - - // TODO MICHELE USER_SURL debug - public SURL(final String stfn) { - - super(Protocol.SRM, NamingConst.getServiceDefaultHost(), NamingConst - .getServicePort(), stfn); - directory = checkDirectory(stfn); - } - - /** - * Build SURL from the string format. Many control will be executed in the - * string format No other way to create a SURL, if u got a SURL for sure it's - * a valid URI normalized - * - * @param surlString - * String - * @return SURL - */ - public static SURL makeSURLfromString(String surlString) - throws NamespaceException { - - SURL result = null; - - // checks if is a valid uri and normalize - URI uri = null; - try { - uri = URI.create(surlString); - } catch (IllegalArgumentException uriEx) { - throw new NamespaceException("SURL_String :'" + surlString - + "' is INVALID. Reason: URI Except: " + uriEx.getMessage()); - } catch (NullPointerException npe) { - throw new NamespaceException("SURL_String :'" + surlString - + "' is INVALID. Reason: URI Except (null SURL): " + npe.getMessage()); - } - - // Check the scheme - // uri should be not null - String scheme = uri.getScheme(); - if (!(schemes.contains(scheme))) { - throw new NamespaceException("SURL_String :'" + surlString - + "' is INVALID. Reason: unknown scheme '" + scheme + "'"); - } - - // Check the query - String host = uri.getHost(); - if (host == null) { - throw new NamespaceException("SURL_String :'" + surlString - + "' is INVALID. Reason: malformed host!"); - } - int port = uri.getPort(); - String query = uri.getQuery(); - if (query == null || query.trim().equals("")) { - String stfn = uri.getPath(); - result = new SURL(host, port, stfn); - } else { - // The SURL_Str is in a Query FORM. - log.debug(" !! SURL ('" + surlString + "') in a query form (query:'" - + query + "') !!"); - String service = uri.getPath(); - log.debug(" Service endpoint : " + service); - if (checkQuery(query)) { - log.debug(" Query is in a valid form."); - // Extract the StFN from query: - String stfn = extractStFNfromQuery(query); - result = new SURL(host, port, service, stfn); - } else { - log.warn("SURL_String :'" + surlString - + "' is not VALID! (query is in invalid form)"); - throw new NamespaceException("SURL_String :'" + surlString - + "' is not VALID within the Query!"); - } - } - return result; - } - - public String getQueryFormAsString() { - if (this.isNormalFormSURL()) { - String uriString = transfProtocol.getProtocol().getSchema() + "://" - + this.transfProtocol.getAuthority().getServiceHostname(); - if (this.transfProtocol.getAuthority().getServicePort() >= 0) { - uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); - } - uriString += "/srm/managerv2?SFN=" + this.path; - return uriString; - } - return this.getSURLAsURIString(); - } - - public String getNormalFormAsString() { - if (this.isQueriedFormSURL()) { - String uriString = transfProtocol.getProtocol().getSchema() + "://" - + this.transfProtocol.getAuthority().getServiceHostname(); - if (this.transfProtocol.getAuthority().getServicePort() >= 0) { - uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); - } - uriString += this.getStFN(); - return uriString; - } - return this.getSURLAsURIString(); - } - - public boolean isDirectory() { - - return directory; - } - - private boolean checkDirectory(String path) { - - if (path != null && path.endsWith(NamingConst.SEPARATOR)) { - return true; - } else { - return false; - } - } - - /** - * - * Checks if the query string begins with the correct prefix ("SFN=") - * - * @param query - * @return - */ - private static boolean checkQuery(String query) { - - if (query == null) { - log.error("Received a null query to check!"); - return false; - } - return query.startsWith(NamingConst.getServiceSFNQueryPrefix() + "="); - } - - private static String extractStFNfromQuery(String query) { - - String stfn = ""; - if (query == null) { - return stfn; - } else { - int len = query.length(); - if (len < 4) { - return stfn; - } else { - stfn = query.substring(4); - } - } - return stfn; - } - - /** - * get the path and query string e.g. /path/service?SFN=pippo.txt if query - * form e.g /path/pippo.txt if simple form - * - * @return the path and its query string - */ - public String getPathQuery() { - - StringBuilder sb = new StringBuilder(250); - sb.append(getPath()); - if (this.isQueriedFormSURL()) { - sb.append("?"); - sb.append(NamingConst.getServiceSFNQueryPrefix()); - sb.append("="); - sb.append(getQueryString()); - } - return sb.toString(); - } - - public String getSURLAsURIString() { - - String uriString = transfProtocol.getProtocol().getSchema() + "://" - + this.transfProtocol.getAuthority().getServiceHostname(); - if (this.transfProtocol.getAuthority().getServicePort() >= 0) { - uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); - } - if (this.isNormalFormSURL()) { - uriString += this.path; - } else { - uriString += this.getPathQuery(); - } - return uriString; - } - - @Override - public String toString() { - - StringBuilder buffer = new StringBuilder(); - buffer.append(this.transfProtocol.toString()); - buffer.append(this.getPathQuery()); - return buffer.toString(); - } - - @Override - public int hashCode() { - - int result = super.hashCode(); - result += 37 * schemes.hashCode() + 63 * (directory ? 1 : 0); - return result; - } - - /* - * - */ - @Override - public boolean equals(Object obj) { - - if (!super.equals(obj)) - return false; - if (!(obj instanceof SURL)) - return false; - SURL other = (SURL) obj; - if (directory != other.directory) - return false; - return true; - } + private static Logger log = NamespaceDirector.getLogger(); + private static ArrayList schemes = new ArrayList(); + + static { + schemes.add("srm"); + } + + public final boolean directory; + + private SURL( + final String hostName, + final int port, + final String serviceEndpoint, + final String queryString) { + + super(Protocol.SRM, hostName, port, serviceEndpoint, queryString); + directory = checkDirectory(queryString); + } + + private SURL(final String hostName, final int port, final String stfn) { + + super(Protocol.SRM, hostName, port, stfn); + directory = checkDirectory(stfn); + } + + // TODO MICHELE USER_SURL debug + public SURL(final String stfn) { + + super(Protocol.SRM, NamingConst.getServiceDefaultHost(), NamingConst.getServicePort(), stfn); + directory = checkDirectory(stfn); + } + + /** + * Build SURL from the string format. Many control will be executed in the string format No other + * way to create a SURL, if u got a SURL for sure it's a valid URI normalized + * + * @param surlString String + * @return SURL + */ + public static SURL makeSURLfromString(String surlString) throws NamespaceException { + + SURL result = null; + + // checks if is a valid uri and normalize + URI uri = null; + try { + uri = URI.create(surlString); + } catch (IllegalArgumentException uriEx) { + throw new NamespaceException( + "SURL_String :'" + + surlString + + "' is INVALID. Reason: URI Except: " + + uriEx.getMessage()); + } catch (NullPointerException npe) { + throw new NamespaceException( + "SURL_String :'" + + surlString + + "' is INVALID. Reason: URI Except (null SURL): " + + npe.getMessage()); + } + + // Check the scheme + // uri should be not null + String scheme = uri.getScheme(); + if (!(schemes.contains(scheme))) { + throw new NamespaceException( + "SURL_String :'" + surlString + "' is INVALID. Reason: unknown scheme '" + scheme + "'"); + } + + // Check the query + String host = uri.getHost(); + if (host == null) { + throw new NamespaceException( + "SURL_String :'" + surlString + "' is INVALID. Reason: malformed host!"); + } + int port = uri.getPort(); + String query = uri.getQuery(); + if (query == null || query.trim().equals("")) { + String stfn = uri.getPath(); + result = new SURL(host, port, stfn); + } else { + // The SURL_Str is in a Query FORM. + log.debug(" !! SURL ('" + surlString + "') in a query form (query:'" + query + "') !!"); + String service = uri.getPath(); + log.debug(" Service endpoint : " + service); + if (checkQuery(query)) { + log.debug(" Query is in a valid form."); + // Extract the StFN from query: + String stfn = extractStFNfromQuery(query); + result = new SURL(host, port, service, stfn); + } else { + log.warn("SURL_String :'" + surlString + "' is not VALID! (query is in invalid form)"); + throw new NamespaceException( + "SURL_String :'" + surlString + "' is not VALID within the Query!"); + } + } + return result; + } + + public String getQueryFormAsString() { + if (this.isNormalFormSURL()) { + String uriString = + transfProtocol.getProtocol().getSchema() + + "://" + + this.transfProtocol.getAuthority().getServiceHostname(); + if (this.transfProtocol.getAuthority().getServicePort() >= 0) { + uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); + } + uriString += "/srm/managerv2?SFN=" + this.path; + return uriString; + } + return this.getSURLAsURIString(); + } + + public String getNormalFormAsString() { + if (this.isQueriedFormSURL()) { + String uriString = + transfProtocol.getProtocol().getSchema() + + "://" + + this.transfProtocol.getAuthority().getServiceHostname(); + if (this.transfProtocol.getAuthority().getServicePort() >= 0) { + uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); + } + uriString += this.getStFN(); + return uriString; + } + return this.getSURLAsURIString(); + } + + public boolean isDirectory() { + + return directory; + } + + private boolean checkDirectory(String path) { + + if (path != null && path.endsWith(NamingConst.SEPARATOR)) { + return true; + } else { + return false; + } + } + + /** + * Checks if the query string begins with the correct prefix ("SFN=") + * + * @param query + * @return + */ + private static boolean checkQuery(String query) { + + if (query == null) { + log.error("Received a null query to check!"); + return false; + } + return query.startsWith(NamingConst.getServiceSFNQueryPrefix() + "="); + } + + private static String extractStFNfromQuery(String query) { + + String stfn = ""; + if (query == null) { + return stfn; + } else { + int len = query.length(); + if (len < 4) { + return stfn; + } else { + stfn = query.substring(4); + } + } + return stfn; + } + + /** + * get the path and query string e.g. /path/service?SFN=pippo.txt if query form e.g + * /path/pippo.txt if simple form + * + * @return the path and its query string + */ + public String getPathQuery() { + + StringBuilder sb = new StringBuilder(250); + sb.append(getPath()); + if (this.isQueriedFormSURL()) { + sb.append("?"); + sb.append(NamingConst.getServiceSFNQueryPrefix()); + sb.append("="); + sb.append(getQueryString()); + } + return sb.toString(); + } + + public String getSURLAsURIString() { + + String uriString = + transfProtocol.getProtocol().getSchema() + + "://" + + this.transfProtocol.getAuthority().getServiceHostname(); + if (this.transfProtocol.getAuthority().getServicePort() >= 0) { + uriString += ":" + this.transfProtocol.getAuthority().getServicePort(); + } + if (this.isNormalFormSURL()) { + uriString += this.path; + } else { + uriString += this.getPathQuery(); + } + return uriString; + } + + @Override + public String toString() { + + StringBuilder buffer = new StringBuilder(); + buffer.append(this.transfProtocol.toString()); + buffer.append(this.getPathQuery()); + return buffer.toString(); + } + + @Override + public int hashCode() { + + int result = super.hashCode(); + result += 37 * schemes.hashCode() + 63 * (directory ? 1 : 0); + return result; + } + + /* + * + */ + @Override + public boolean equals(Object obj) { + + if (!super.equals(obj)) return false; + if (!(obj instanceof SURL)) return false; + SURL other = (SURL) obj; + if (directory != other.directory) return false; + return true; + } } diff --git a/src/main/java/it/grid/storm/namespace/naming/TURL.java b/src/main/java/it/grid/storm/namespace/naming/TURL.java index ed4e2e4c..11a7866c 100644 --- a/src/main/java/it/grid/storm/namespace/naming/TURL.java +++ b/src/main/java/it/grid/storm/namespace/naming/TURL.java @@ -1,28 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.naming; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2006 - *

- * - *

- * Company: INFN-CNAF and ICTP/eGrid project - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2006 + * + *

Company: INFN-CNAF and ICTP/eGrid project + * * @author Riccardo Zappi * @version 1.0 */ -public class TURL { -} +public class TURL {} diff --git a/src/main/java/it/grid/storm/namespace/remote/Constants.java b/src/main/java/it/grid/storm/namespace/remote/Constants.java index b07b8b72..5d65ade9 100644 --- a/src/main/java/it/grid/storm/namespace/remote/Constants.java +++ b/src/main/java/it/grid/storm/namespace/remote/Constants.java @@ -1,34 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class Constants { - public static final String ENCODING_SCHEME = "UTF-8"; - public static final String RESOURCE = "configuration"; - public static final String VERSION_1_0 = "1.0"; - public static final String VERSION_1_1 = "1.1"; - public static final String VERSION_1_2 = "1.2"; - public static final String VERSION_1_3 = "1.3"; - public static final String VERSION = "1.4"; - public static final String LIST_ALL_KEY = "StorageAreaList"; - public static final char VFS_LIST_SEPARATOR = ':'; - public static final String VFS_NAME_KEY = "name"; - public static final char VFS_FIELD_MATCHER = '='; - public static final char VFS_FIELD_SEPARATOR = '&'; - public static final String VFS_ROOT_KEY = "root"; - public static final String VFS_STFN_ROOT_KEY = "stfnRoot"; - public static final char VFS_STFN_ROOT_SEPARATOR = ';'; - public static final String VFS_ENABLED_PROTOCOLS_KEY = "protocols"; - public static final char VFS_ENABLED_PROTOCOLS_SEPARATOR = ';'; - public static final String VFS_ANONYMOUS_PERMS_KEY = "anonymous"; - public static final String LIST_ALL_VFS = "VirtualFSList"; - - public static enum HttpPerms { NOREAD, READ, READWRITE }; + public static final String ENCODING_SCHEME = "UTF-8"; + public static final String RESOURCE = "configuration"; + public static final String VERSION_1_0 = "1.0"; + public static final String VERSION_1_1 = "1.1"; + public static final String VERSION_1_2 = "1.2"; + public static final String VERSION_1_3 = "1.3"; + public static final String VERSION = "1.4"; + public static final String LIST_ALL_KEY = "StorageAreaList"; + public static final char VFS_LIST_SEPARATOR = ':'; + public static final String VFS_NAME_KEY = "name"; + public static final char VFS_FIELD_MATCHER = '='; + public static final char VFS_FIELD_SEPARATOR = '&'; + public static final String VFS_ROOT_KEY = "root"; + public static final String VFS_STFN_ROOT_KEY = "stfnRoot"; + public static final char VFS_STFN_ROOT_SEPARATOR = ';'; + public static final String VFS_ENABLED_PROTOCOLS_KEY = "protocols"; + public static final char VFS_ENABLED_PROTOCOLS_SEPARATOR = ';'; + public static final String VFS_ANONYMOUS_PERMS_KEY = "anonymous"; + public static final String LIST_ALL_VFS = "VirtualFSList"; + public static enum HttpPerms { + NOREAD, + READ, + READWRITE + }; } diff --git a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResource.java b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResource.java index d2b6823b..8bda2d28 100644 --- a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResource.java +++ b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResource.java @@ -1,39 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote.resource; +import com.google.common.collect.Maps; +import it.grid.storm.namespace.NamespaceDirector; +import it.grid.storm.namespace.NamespaceException; +import it.grid.storm.namespace.model.SAInfo; +import it.grid.storm.namespace.model.VirtualFS; +import it.grid.storm.namespace.remote.Constants; import java.util.List; import java.util.Map; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Maps; - -import it.grid.storm.namespace.NamespaceDirector; -import it.grid.storm.namespace.NamespaceException; -import it.grid.storm.namespace.model.SAInfo; -import it.grid.storm.namespace.model.VirtualFS; -import it.grid.storm.namespace.remote.Constants; - -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION) public class VirtualFSResource { private static final Logger log = LoggerFactory.getLogger(VirtualFSResource.class); - /** - * @return - */ + /** @return */ @GET @Path("/" + Constants.LIST_ALL_VFS) @Produces(MediaType.APPLICATION_JSON) @@ -53,5 +44,4 @@ public Map listVFS() { return output; } - } diff --git a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_0.java b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_0.java index 42d0688f..dd8e7a98 100644 --- a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_0.java +++ b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_0.java @@ -1,39 +1,31 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote.resource; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; +import it.grid.storm.namespace.NamespaceDirector; +import it.grid.storm.namespace.NamespaceException; +import it.grid.storm.namespace.model.MappingRule; +import it.grid.storm.namespace.model.VirtualFS; +import it.grid.storm.namespace.remote.Constants; import java.util.List; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.namespace.NamespaceDirector; -import it.grid.storm.namespace.NamespaceException; -import it.grid.storm.namespace.model.MappingRule; -import it.grid.storm.namespace.model.VirtualFS; -import it.grid.storm.namespace.remote.Constants; - -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION_1_0) public class VirtualFSResourceCompat_1_0 { private static final Logger log = LoggerFactory.getLogger(VirtualFSResourceCompat_1_0.class); - /** - * @return - */ + /** @return */ @GET @Path("/" + Constants.LIST_ALL_KEY) @Produces("text/plain") @@ -49,11 +41,12 @@ public String listVFS() { try { vfsListString += encodeVFS(vfs); } catch (NamespaceException e) { - log.error("Unable to encode the virtual file system. NamespaceException : {}", - e.getMessage()); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to encode the virtual file system") - .build()); + log.error( + "Unable to encode the virtual file system. NamespaceException : {}", e.getMessage()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to encode the virtual file system") + .build()); } } return vfsListString; diff --git a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_1.java b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_1.java index 8873f373..ba5e7c2f 100644 --- a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_1.java +++ b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_1.java @@ -1,41 +1,33 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote.resource; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; +import it.grid.storm.namespace.NamespaceDirector; +import it.grid.storm.namespace.NamespaceException; +import it.grid.storm.namespace.model.MappingRule; +import it.grid.storm.namespace.model.Protocol; +import it.grid.storm.namespace.model.VirtualFS; +import it.grid.storm.namespace.remote.Constants; import java.util.Iterator; import java.util.List; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.namespace.NamespaceDirector; -import it.grid.storm.namespace.NamespaceException; -import it.grid.storm.namespace.model.MappingRule; -import it.grid.storm.namespace.model.Protocol; -import it.grid.storm.namespace.model.VirtualFS; -import it.grid.storm.namespace.remote.Constants; - -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION_1_1) public class VirtualFSResourceCompat_1_1 { private static final Logger log = LoggerFactory.getLogger(VirtualFSResourceCompat_1_1.class); - /** - * @return - */ + /** @return */ @GET @Path("/" + Constants.LIST_ALL_KEY) @Produces("text/plain") @@ -51,11 +43,12 @@ public String listVFS() { try { vfsListString += encodeVFS(vfs); } catch (NamespaceException e) { - log.error("Unable to encode the virtual file system. NamespaceException : {}", - e.getMessage()); - throw new WebApplicationException(Response.status(INTERNAL_SERVER_ERROR) - .entity("Unable to encode the virtual file system") - .build()); + log.error( + "Unable to encode the virtual file system. NamespaceException : {}", e.getMessage()); + throw new WebApplicationException( + Response.status(INTERNAL_SERVER_ERROR) + .entity("Unable to encode the virtual file system") + .build()); } } return vfsListString; diff --git a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_2.java b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_2.java index fcbf651d..920c5c8a 100644 --- a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_2.java +++ b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_2.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote.resource; @@ -8,39 +7,31 @@ import static java.lang.String.join; import static java.lang.String.valueOf; +import com.google.common.collect.Lists; +import it.grid.storm.namespace.NamespaceDirector; +import it.grid.storm.namespace.NamespaceException; +import it.grid.storm.namespace.model.MappingRule; +import it.grid.storm.namespace.model.Protocol; +import it.grid.storm.namespace.model.VirtualFS; +import it.grid.storm.namespace.remote.Constants; +import it.grid.storm.namespace.remote.Constants.HttpPerms; import java.util.Iterator; import java.util.List; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - -import it.grid.storm.namespace.NamespaceDirector; -import it.grid.storm.namespace.NamespaceException; -import it.grid.storm.namespace.model.MappingRule; -import it.grid.storm.namespace.model.Protocol; -import it.grid.storm.namespace.model.VirtualFS; -import it.grid.storm.namespace.remote.Constants; -import it.grid.storm.namespace.remote.Constants.HttpPerms; - -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION_1_2) public class VirtualFSResourceCompat_1_2 { private static final Logger log = LoggerFactory.getLogger(VirtualFSResourceCompat_1_2.class); - /** - * @return - */ + /** @return */ @GET @Path("/" + Constants.LIST_ALL_KEY) @Produces("text/plain") @@ -49,16 +40,18 @@ public String listVFS() { log.info("Serving VFS resource listing"); List vfsCollection = NamespaceDirector.getNamespace().getAllDefinedVFS(); List encodedVFSs = Lists.newArrayList(); - vfsCollection.forEach(vfs -> { - try { - encodedVFSs.add(encodeVFS(vfs)); - } catch (NamespaceException e) { - log.error( - "Unable to encode the virtual file system. NamespaceException : {}", e.getMessage()); - throw new WebApplicationException( - Response.serverError().entity("Unable to encode the virtual file system").build()); - } - }); + vfsCollection.forEach( + vfs -> { + try { + encodedVFSs.add(encodeVFS(vfs)); + } catch (NamespaceException e) { + log.error( + "Unable to encode the virtual file system. NamespaceException : {}", + e.getMessage()); + throw new WebApplicationException( + Response.serverError().entity("Unable to encode the virtual file system").build()); + } + }); return join(valueOf(VFS_LIST_SEPARATOR), encodedVFSs); } diff --git a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_3.java b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_3.java index bc0c9622..de174a1a 100644 --- a/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_3.java +++ b/src/main/java/it/grid/storm/namespace/remote/resource/VirtualFSResourceCompat_1_3.java @@ -1,39 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.remote.resource; +import com.google.common.collect.Maps; +import it.grid.storm.namespace.NamespaceDirector; +import it.grid.storm.namespace.NamespaceException; +import it.grid.storm.namespace.model.SAInfoV13; +import it.grid.storm.namespace.model.VirtualFS; +import it.grid.storm.namespace.remote.Constants; import java.util.List; import java.util.Map; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Maps; - -import it.grid.storm.namespace.NamespaceDirector; -import it.grid.storm.namespace.NamespaceException; -import it.grid.storm.namespace.model.SAInfoV13; -import it.grid.storm.namespace.model.VirtualFS; -import it.grid.storm.namespace.remote.Constants; - -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ @Path("/" + Constants.RESOURCE + "/" + Constants.VERSION_1_3) public class VirtualFSResourceCompat_1_3 { private static final Logger log = LoggerFactory.getLogger(VirtualFSResourceCompat_1_3.class); - /** - * @return - */ + /** @return */ @GET @Path("/" + Constants.LIST_ALL_VFS) @Produces(MediaType.APPLICATION_JSON) @@ -53,5 +44,4 @@ public Map listVFS() { return output; } - } diff --git a/src/main/java/it/grid/storm/namespace/util/userinfo/LocalGroups.java b/src/main/java/it/grid/storm/namespace/util/userinfo/LocalGroups.java index 1dc439be..6465306e 100644 --- a/src/main/java/it/grid/storm/namespace/util/userinfo/LocalGroups.java +++ b/src/main/java/it/grid/storm/namespace/util/userinfo/LocalGroups.java @@ -1,98 +1,91 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.namespace.util.userinfo; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public final class LocalGroups { - private static final Logger log = LoggerFactory.getLogger(LocalGroups.class); - - private static final String UNKNOWN_GROUP = "unknown"; - - private Map nameIdMap = new ConcurrentHashMap(); - private Map idNameMap = new ConcurrentHashMap(); - private static final LocalGroups instance = new LocalGroups(); - private long parsingInstant = 0; - private static final long minimumLifetime = 1000 * 60 * 5; // 10 minutes; - - private LocalGroups() { - - init(); - } - - private synchronized void init() { - - nameIdMap.clear(); - idNameMap.clear(); - // Parsing all the database and cache it - nameIdMap.putAll(UserInfoExecutor.digestGroupDatabase()); - for (Entry nameIdEntry : nameIdMap.entrySet()) { - idNameMap.put(nameIdEntry.getValue(), nameIdEntry.getKey()); - } - parsingInstant = System.currentTimeMillis(); - } - - public static synchronized LocalGroups getInstance() { - - if (instance.computeParsedAge() > LocalGroups.minimumLifetime) { - instance.init(); - } - return instance; - } - - private long computeParsedAge() { - - return System.currentTimeMillis() - parsingInstant; - } - - public boolean isGroupDefined(String groupName) { - - boolean result = nameIdMap.keySet().contains(groupName); - if (!result) { - try { - Integer grupId = Integer.valueOf(groupName); - if (grupId != null) { - result = idNameMap.containsKey(grupId); - } - } catch (NumberFormatException e) { - // not a number, just an attempt failed - } - } - return result; - } - - public int getGroupId(String groupName) { - - int result = -1; - if (isGroupDefined(groupName)) { - result = nameIdMap.get(groupName).intValue(); - } - return result; - } - - public String getGroupName(int groupId) { - - String result = UNKNOWN_GROUP; - if (idNameMap.containsKey(Integer.valueOf(groupId))) { - result = idNameMap.get(Integer.valueOf(groupId)); - } else { - log.warn("Unable to find a group with GID='" + groupId + "'"); - } - return result; - } - + private static final Logger log = LoggerFactory.getLogger(LocalGroups.class); + + private static final String UNKNOWN_GROUP = "unknown"; + + private Map nameIdMap = new ConcurrentHashMap(); + private Map idNameMap = new ConcurrentHashMap(); + private static final LocalGroups instance = new LocalGroups(); + private long parsingInstant = 0; + private static final long minimumLifetime = 1000 * 60 * 5; // 10 minutes; + + private LocalGroups() { + + init(); + } + + private synchronized void init() { + + nameIdMap.clear(); + idNameMap.clear(); + // Parsing all the database and cache it + nameIdMap.putAll(UserInfoExecutor.digestGroupDatabase()); + for (Entry nameIdEntry : nameIdMap.entrySet()) { + idNameMap.put(nameIdEntry.getValue(), nameIdEntry.getKey()); + } + parsingInstant = System.currentTimeMillis(); + } + + public static synchronized LocalGroups getInstance() { + + if (instance.computeParsedAge() > LocalGroups.minimumLifetime) { + instance.init(); + } + return instance; + } + + private long computeParsedAge() { + + return System.currentTimeMillis() - parsingInstant; + } + + public boolean isGroupDefined(String groupName) { + + boolean result = nameIdMap.keySet().contains(groupName); + if (!result) { + try { + Integer grupId = Integer.valueOf(groupName); + if (grupId != null) { + result = idNameMap.containsKey(grupId); + } + } catch (NumberFormatException e) { + // not a number, just an attempt failed + } + } + return result; + } + + public int getGroupId(String groupName) { + + int result = -1; + if (isGroupDefined(groupName)) { + result = nameIdMap.get(groupName).intValue(); + } + return result; + } + + public String getGroupName(int groupId) { + + String result = UNKNOWN_GROUP; + if (idNameMap.containsKey(Integer.valueOf(groupId))) { + result = idNameMap.get(Integer.valueOf(groupId)); + } else { + log.warn("Unable to find a group with GID='" + groupId + "'"); + } + return result; + } } diff --git a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoCommand.java b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoCommand.java index 4bbb85d4..f343448b 100644 --- a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoCommand.java +++ b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.util.userinfo; @@ -10,273 +9,268 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; - import org.apache.commons.lang.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class UserInfoCommand { - private static final String COMMAND_ID = "id"; - private static final String COMMAND_GETENT = "getent"; - private static final Logger log = LoggerFactory - .getLogger(UserInfoCommand.class); - - public UserInfoCommand() { + private static final String COMMAND_ID = "id"; + private static final String COMMAND_GETENT = "getent"; + private static final Logger log = LoggerFactory.getLogger(UserInfoCommand.class); - super(); - } + public UserInfoCommand() { - /** - * - * @return String - */ - public static String getCommandId() { + super(); + } - return COMMAND_ID; - } + /** @return String */ + public static String getCommandId() { - /** - * - * @return String - */ - public static String getCommandGetENT() { + return COMMAND_ID; + } - return COMMAND_GETENT; - } + /** @return String */ + public static String getCommandGetENT() { - /** - * - * @param parameters - * @return - * @throws UserInfoException - */ - public int retrieveGroupID(UserInfoParameters parameters) - throws UserInfoException { + return COMMAND_GETENT; + } - int groupId = -1; - String[] command = buildCommandString(parameters); + /** + * @param parameters + * @return + * @throws UserInfoException + */ + public int retrieveGroupID(UserInfoParameters parameters) throws UserInfoException { - StringBuilder commandOutput = new StringBuilder(); - for (String element : command) { - commandOutput.append(element).append(" "); - log.debug("UserInfo Command INPUT String : " + commandOutput.toString()); - } - String output = getOutput(command); - if ((output != null) && (output.length() > 0)) { - try { - Long groupLong = Long.valueOf(Long.parseLong(output)); - if (groupLong.intValue() == groupLong.longValue()) { - // The number in the output string fits in an integer (is at most 16 - // bits) - groupId = groupLong.intValue(); - } else { - // The number in the output string does not fits in an integer (is - // between 17 and 32 bits) - log.warn("Group named '" + parameters + "' has a 32 bit GID " - + groupLong.longValue() - + " . Long GID are not managed by LCMAPS. Ignoring the group"); - } - } catch (NumberFormatException nfe) { - log.error("Group named '" + parameters - + "' return a result different from a long. NumberFormatException : " - + nfe); - throw new UserInfoException("Group named '" + parameters - + "' return a result different from a long. NumberFormatException : " - + nfe); - } - } else { - throw new UserInfoException("Group named '" + parameters - + "' return a result different from a integer"); - } - return groupId; - } + int groupId = -1; + String[] command = buildCommandString(parameters); - /** - * Creates an has map of coupplse parsing the - * "getent group" command output - * - * @return - */ - public HashMap retrieveGroupDb() { + StringBuilder commandOutput = new StringBuilder(); + for (String element : command) { + commandOutput.append(element).append(" "); + log.debug("UserInfo Command INPUT String : " + commandOutput.toString()); + } + String output = getOutput(command); + if ((output != null) && (output.length() > 0)) { + try { + Long groupLong = Long.valueOf(Long.parseLong(output)); + if (groupLong.intValue() == groupLong.longValue()) { + // The number in the output string fits in an integer (is at most 16 + // bits) + groupId = groupLong.intValue(); + } else { + // The number in the output string does not fits in an integer (is + // between 17 and 32 bits) + log.warn( + "Group named '" + + parameters + + "' has a 32 bit GID " + + groupLong.longValue() + + " . Long GID are not managed by LCMAPS. Ignoring the group"); + } + } catch (NumberFormatException nfe) { + log.error( + "Group named '" + + parameters + + "' return a result different from a long. NumberFormatException : " + + nfe); + throw new UserInfoException( + "Group named '" + + parameters + + "' return a result different from a long. NumberFormatException : " + + nfe); + } + } else { + throw new UserInfoException( + "Group named '" + parameters + "' return a result different from a integer"); + } + return groupId; + } - HashMap groupsDb = new HashMap(); - UserInfoParameters param = new UserInfoParameters(Arrays.asList("group")); - String[] command = buildCommandString(param); - String output = getOutput(command); - if ((output != null) && (output.length() > 0)) { - String lines[] = output.split("\\r?\\n"); - for (int i = 0; i < lines.length; i++) { - int gid = getGroupId(lines[i]); - String groupName = getGroupName(lines[i]); - if (gid > -1) { - groupsDb.put(groupName, gid); - } else { - log.warn("Error while parsing the line '" + lines[i] - + "' in group DB"); - } - } - } else { - throw new UserInfoException("Unable to digest group database."); - } - return groupsDb; - } + /** + * Creates an has map of coupplse parsing the "getent group" command output + * + * @return + */ + public HashMap retrieveGroupDb() { - /** - * Command "getent group " if parameters contain a string - * representing the groupname Command "getent group" if parameters is empty or - * null - * - * @param - * @return String[] - */ - private static String[] buildCommandString(UserInfoParameters parameters) { + HashMap groupsDb = new HashMap(); + UserInfoParameters param = new UserInfoParameters(Arrays.asList("group")); + String[] command = buildCommandString(param); + String output = getOutput(command); + if ((output != null) && (output.length() > 0)) { + String lines[] = output.split("\\r?\\n"); + for (int i = 0; i < lines.length; i++) { + int gid = getGroupId(lines[i]); + String groupName = getGroupName(lines[i]); + if (gid > -1) { + groupsDb.put(groupName, gid); + } else { + log.warn("Error while parsing the line '" + lines[i] + "' in group DB"); + } + } + } else { + throw new UserInfoException("Unable to digest group database."); + } + return groupsDb; + } - String[] command = null; - List param = null; - if (parameters != null) { - param = parameters.getParameters(); - command = new String[1 + param.size()]; - command[0] = UserInfoCommand.getCommandGetENT(); - int cont = 1; - // Adding parameters to the command - for (Object element : param) { - String p = (String) element; - command[cont++] = p; - } - } else { - command = new String[] { UserInfoCommand.getCommandGetENT() }; - } - return command; - } + /** + * Command "getent group " if parameters contain a string representing the groupname + * Command "getent group" if parameters is empty or null + * + * @param + * @return String[] + */ + private static String[] buildCommandString(UserInfoParameters parameters) { - /** - * - * @param command - * String[] - * @return String - */ - private String getOutput(String[] command) throws UserInfoException { + String[] command = null; + List param = null; + if (parameters != null) { + param = parameters.getParameters(); + command = new String[1 + param.size()]; + command[0] = UserInfoCommand.getCommandGetENT(); + int cont = 1; + // Adding parameters to the command + for (Object element : param) { + String p = (String) element; + command[cont++] = p; + } + } else { + command = new String[] {UserInfoCommand.getCommandGetENT()}; + } + return command; + } - String result = ""; - try { - Process child = Runtime.getRuntime().exec(command); - log.debug("Command executed: " + ArrayUtils.toString(command)); - BufferedReader stdInput = null; - BufferedReader stdError = null; - // Get the input stream and read from it - if (child != null) { - stdInput = new BufferedReader(new InputStreamReader( - child.getInputStream())); - stdError = new BufferedReader(new InputStreamReader( - child.getErrorStream())); - } + /** + * @param command String[] + * @return String + */ + private String getOutput(String[] command) throws UserInfoException { - if (stdInput != null) { + String result = ""; + try { + Process child = Runtime.getRuntime().exec(command); + log.debug("Command executed: " + ArrayUtils.toString(command)); + BufferedReader stdInput = null; + BufferedReader stdError = null; + // Get the input stream and read from it + if (child != null) { + stdInput = new BufferedReader(new InputStreamReader(child.getInputStream())); + stdError = new BufferedReader(new InputStreamReader(child.getErrorStream())); + } - // process the Command Output (Input for StoRM ;) ) - String line; - int row = 0; - log.trace("UserInfo Command Output :"); - while ((line = stdInput.readLine()) != null) { - log.trace(row + ": " + line); - boolean lineOk = processOutput(row, line); - if (lineOk) { - result = result + line + "\n"; - } - row++; - } + if (stdInput != null) { - // process the Errors - String errLine; - if (stdError != null) { - while ((errLine = stdError.readLine()) != null) { - log.warn("User Info Command Output contains an ERROR message " - + errLine); - throw new UserInfoException(errLine); - } - } - } - } catch (IOException ex) { - log.error("getUserInfo (id) I/O Exception: " + ex); - throw new UserInfoException(ex); - } - return result; - } + // process the Command Output (Input for StoRM ;) ) + String line; + int row = 0; + log.trace("UserInfo Command Output :"); + while ((line = stdInput.readLine()) != null) { + log.trace(row + ": " + line); + boolean lineOk = processOutput(row, line); + if (lineOk) { + result = result + line + "\n"; + } + row++; + } - private boolean processOutput(int row, String line) { + // process the Errors + String errLine; + if (stdError != null) { + while ((errLine = stdError.readLine()) != null) { + log.warn("User Info Command Output contains an ERROR message " + errLine); + throw new UserInfoException(errLine); + } + } + } + } catch (IOException ex) { + log.error("getUserInfo (id) I/O Exception: " + ex); + throw new UserInfoException(ex); + } + return result; + } - boolean result = false; - if (row >= 0) { - result = true; - } - return result; - } + private boolean processOutput(int row, String line) { - private String getGroupName(String line) { + boolean result = false; + if (row >= 0) { + result = true; + } + return result; + } - String groupName = null; - String[] fields = getElements(line); - if ((fields != null) && (fields.length > 1) && (fields[0] != null)) { - log.trace("field[0], GroupName ='" + fields[0] + "'"); - groupName = fields[0]; - } - return groupName; + private String getGroupName(String line) { - } + String groupName = null; + String[] fields = getElements(line); + if ((fields != null) && (fields.length > 1) && (fields[0] != null)) { + log.trace("field[0], GroupName ='" + fields[0] + "'"); + groupName = fields[0]; + } + return groupName; + } - /** - * Extracts from the received string (with at least 3 fields separated by ":" - * ) the GID value as a long - * - * @param line - * @return - */ - private int getGroupId(String line) throws UserInfoException { + /** + * Extracts from the received string (with at least 3 fields separated by ":" ) the GID value as a + * long + * + * @param line + * @return + */ + private int getGroupId(String line) throws UserInfoException { - int gidInt = -1; - String[] fields = getElements(line); - if ((fields != null) && (fields.length > 2) && (fields[2] != null)) { - log.trace("field[2], GID ='" + fields[2] + "'"); - try { - Long groupLong = Long.valueOf(Long.parseLong(fields[2])); - if (groupLong.intValue() == groupLong.longValue()) { - // The number in the output string fits in an integer (is at most 16 - // bits) - gidInt = groupLong.intValue(); - } else { - // The number in the output string does not fits in an integer (is - // between 17 and 32 bits) - log.warn("Group named '" + fields[2] + "' has a 32 bit GID " - + groupLong.longValue() - + " . Long GID are not managed by LCMAPS. Ignoring the group"); - } - } catch (NumberFormatException nfe) { - log.error("Group named '" + fields[2] - + "' return a result different from a long. NumberFormatException : " - + nfe); - throw new UserInfoException("Group named '" + fields[2] - + "' return a result different from a long. NumberFormatException : " - + nfe); - } - } - return gidInt; - } + int gidInt = -1; + String[] fields = getElements(line); + if ((fields != null) && (fields.length > 2) && (fields[2] != null)) { + log.trace("field[2], GID ='" + fields[2] + "'"); + try { + Long groupLong = Long.valueOf(Long.parseLong(fields[2])); + if (groupLong.intValue() == groupLong.longValue()) { + // The number in the output string fits in an integer (is at most 16 + // bits) + gidInt = groupLong.intValue(); + } else { + // The number in the output string does not fits in an integer (is + // between 17 and 32 bits) + log.warn( + "Group named '" + + fields[2] + + "' has a 32 bit GID " + + groupLong.longValue() + + " . Long GID are not managed by LCMAPS. Ignoring the group"); + } + } catch (NumberFormatException nfe) { + log.error( + "Group named '" + + fields[2] + + "' return a result different from a long. NumberFormatException : " + + nfe); + throw new UserInfoException( + "Group named '" + + fields[2] + + "' return a result different from a long. NumberFormatException : " + + nfe); + } + } + return gidInt; + } - /** - * Split the line in atomic part - * - * @param line - * @return - */ - private String[] getElements(String line) { + /** + * Split the line in atomic part + * + * @param line + * @return + */ + private String[] getElements(String line) { - String patternStr = ":"; - String[] fields = null; - if (line != null) { - log.trace("LINE = " + line); - fields = line.split(patternStr); - } - return fields; - } + String patternStr = ":"; + String[] fields = null; + if (line != null) { + log.trace("LINE = " + line); + fields = line.split(patternStr); + } + return fields; + } } diff --git a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoException.java b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoException.java index 91a3de1c..e683af3c 100644 --- a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoException.java +++ b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoException.java @@ -1,28 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.util.userinfo; public class UserInfoException extends RuntimeException { - public UserInfoException() { + public UserInfoException() { - super(); - } + super(); + } - public UserInfoException(String message) { + public UserInfoException(String message) { - super(message); - } + super(message); + } - public UserInfoException(String message, Throwable cause) { + public UserInfoException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public UserInfoException(Throwable cause) { + public UserInfoException(Throwable cause) { - super(cause); - } + super(cause); + } } diff --git a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoExecutor.java b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoExecutor.java index 0d5c20bf..b5668545 100644 --- a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoExecutor.java +++ b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoExecutor.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.util.userinfo; @@ -10,59 +9,57 @@ public class UserInfoExecutor { - public UserInfoExecutor() { + public UserInfoExecutor() { - super(); - } + super(); + } - public static int retrieveGroupID_ETC(String groupName) - throws UserInfoException { + public static int retrieveGroupID_ETC(String groupName) throws UserInfoException { - int groupId = 0; + int groupId = 0; - // Retrieve Device - String param1 = "-r"; - String param2 = "-g"; - String param3 = groupName + "001"; // Be Carefull! + // Retrieve Device + String param1 = "-r"; + String param2 = "-g"; + String param3 = groupName + "001"; // Be Carefull! - UserInfoCommand userInfoCommand = new UserInfoCommand(); - ArrayList params = new ArrayList(); + UserInfoCommand userInfoCommand = new UserInfoCommand(); + ArrayList params = new ArrayList(); - params.add(0, param1); - params.add(1, param2); - params.add(2, param3); - UserInfoParameters userInfoParameters = new UserInfoParameters(params); + params.add(0, param1); + params.add(1, param2); + params.add(2, param3); + UserInfoParameters userInfoParameters = new UserInfoParameters(params); - groupId = userInfoCommand.retrieveGroupID(userInfoParameters); + groupId = userInfoCommand.retrieveGroupID(userInfoParameters); - return groupId; - } + return groupId; + } - public static int retrieveGroupID(String groupName) throws UserInfoException { + public static int retrieveGroupID(String groupName) throws UserInfoException { - int groupId = 0; + int groupId = 0; - // Retrieve Device - String param1 = groupName; + // Retrieve Device + String param1 = groupName; - UserInfoCommand userInfoCommand = new UserInfoCommand(); - ArrayList params = new ArrayList(); + UserInfoCommand userInfoCommand = new UserInfoCommand(); + ArrayList params = new ArrayList(); - params.add(0, param1); + params.add(0, param1); - UserInfoParameters userInfoParameters = new UserInfoParameters(params); + UserInfoParameters userInfoParameters = new UserInfoParameters(params); - groupId = userInfoCommand.retrieveGroupID(userInfoParameters); + groupId = userInfoCommand.retrieveGroupID(userInfoParameters); - return groupId; - } + return groupId; + } - public static Map digestGroupDatabase() { - - Map groupsDb = new HashMap(); - UserInfoCommand userInfoCommand = new UserInfoCommand(); - groupsDb = userInfoCommand.retrieveGroupDb(); - return groupsDb; - } + public static Map digestGroupDatabase() { + Map groupsDb = new HashMap(); + UserInfoCommand userInfoCommand = new UserInfoCommand(); + groupsDb = userInfoCommand.retrieveGroupDb(); + return groupsDb; + } } diff --git a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoParameters.java b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoParameters.java index e46c9452..8fdcf3ee 100644 --- a/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoParameters.java +++ b/src/main/java/it/grid/storm/namespace/util/userinfo/UserInfoParameters.java @@ -1,47 +1,37 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.util.userinfo; -import java.util.List; import java.util.Iterator; +import java.util.List; public class UserInfoParameters { - private List parameters = null; - - public UserInfoParameters(List parameters) { - - this.parameters = parameters; - } - - /** - * - * @return List - */ - public List getParameters() { - - return this.parameters; - } - - /** - * - * @return String - */ - public String toString() { - - if (parameters == null) - return "NULL parameters"; - if (parameters.isEmpty()) - return "EMPTY parameters"; - StringBuilder result = new StringBuilder(); - Iterator scan = parameters.iterator(); - while (scan.hasNext()) { - result.append(scan.next()); - result.append(" "); - } - return result.toString(); - } + private List parameters = null; + + public UserInfoParameters(List parameters) { + + this.parameters = parameters; + } + + /** @return List */ + public List getParameters() { + + return this.parameters; + } + + /** @return String */ + public String toString() { + if (parameters == null) return "NULL parameters"; + if (parameters.isEmpty()) return "EMPTY parameters"; + StringBuilder result = new StringBuilder(); + Iterator scan = parameters.iterator(); + while (scan.hasNext()) { + result.append(scan.next()); + result.append(" "); + } + return result.toString(); + } } diff --git a/src/main/java/it/grid/storm/persistence/DAOFactory.java b/src/main/java/it/grid/storm/persistence/DAOFactory.java index d7130780..ece06d87 100644 --- a/src/main/java/it/grid/storm/persistence/DAOFactory.java +++ b/src/main/java/it/grid/storm/persistence/DAOFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * (c)2004 INFN / ICTP-eGrid This file can be distributed and/or modified under @@ -20,32 +19,29 @@ /** * Returns an implementation of all Catalog interfaces. - * + * * @author Riccardo Zappi - riccardo.zappi AT cnaf.infn.it * @version $Id: DAOFactory.java,v 1.3 2005/10/22 15:09:40 rzappi Exp $ */ public interface DAOFactory { - /** - * Returns an implementation of StorageSpaceCatalog, specific to a particular - * datastore. - * - * @throws DataAccessException - * @return StorageSpaceDAO - */ - public StorageSpaceDAO getStorageSpaceDAO() throws DataAccessException; + /** + * Returns an implementation of StorageSpaceCatalog, specific to a particular datastore. + * + * @throws DataAccessException + * @return StorageSpaceDAO + */ + public StorageSpaceDAO getStorageSpaceDAO() throws DataAccessException; - public TapeRecallDAO getTapeRecallDAO(); + public TapeRecallDAO getTapeRecallDAO(); - public TapeRecallDAO getTapeRecallDAO(boolean test) - throws DataAccessException; + public TapeRecallDAO getTapeRecallDAO(boolean test) throws DataAccessException; - public PtGChunkDAO getPtGChunkDAO() throws DataAccessException; + public PtGChunkDAO getPtGChunkDAO() throws DataAccessException; - public PtPChunkDAO getPtPChunkDAO() throws DataAccessException; + public PtPChunkDAO getPtPChunkDAO() throws DataAccessException; - public StorageAreaDAO getStorageAreaDAO() throws DataAccessException; - - public RequestSummaryDAO getRequestSummaryDAO() throws DataAccessException; + public StorageAreaDAO getStorageAreaDAO() throws DataAccessException; + public RequestSummaryDAO getRequestSummaryDAO() throws DataAccessException; } diff --git a/src/main/java/it/grid/storm/persistence/DataSourceConnectionFactory.java b/src/main/java/it/grid/storm/persistence/DataSourceConnectionFactory.java index 31337025..f871c690 100644 --- a/src/main/java/it/grid/storm/persistence/DataSourceConnectionFactory.java +++ b/src/main/java/it/grid/storm/persistence/DataSourceConnectionFactory.java @@ -1,16 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence; -import java.sql.Connection; import it.grid.storm.persistence.exceptions.PersistenceException; +import java.sql.Connection; public interface DataSourceConnectionFactory { - public Connection borrowConnection() throws PersistenceException; - - public void giveBackConnection(Connection con) throws PersistenceException; + public Connection borrowConnection() throws PersistenceException; + public void giveBackConnection(Connection con) throws PersistenceException; } diff --git a/src/main/java/it/grid/storm/persistence/MySqlDAOFactory.java b/src/main/java/it/grid/storm/persistence/MySqlDAOFactory.java index 77e6c6e3..3ef51e3a 100644 --- a/src/main/java/it/grid/storm/persistence/MySqlDAOFactory.java +++ b/src/main/java/it/grid/storm/persistence/MySqlDAOFactory.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.persistence.dao.PtGChunkDAO; import it.grid.storm.persistence.dao.PtPChunkDAO; import it.grid.storm.persistence.dao.RequestSummaryDAO; @@ -16,121 +12,113 @@ import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.impl.mysql.StorageSpaceDAOMySql; import it.grid.storm.persistence.impl.mysql.TapeRecallDAOMySql; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class MySqlDAOFactory implements DAOFactory { - public static final String factoryName = "JDBC - MySQL DAO Factory"; - - private static final Logger log = LoggerFactory - .getLogger(MySqlDAOFactory.class); - - private static MySqlDAOFactory factory = new MySqlDAOFactory(); - - /** - * - */ - private MySqlDAOFactory() { - log.info("DAO factory: {}", MySqlDAOFactory.factoryName); - } - - public static MySqlDAOFactory getInstance() { - - return MySqlDAOFactory.factory; - } - - /** - * Returns an implementation of StorageSpaceCatalog, specific to a particular - * datastore. - * - * @throws DataAccessException - * @return StorageSpaceDAO - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public StorageSpaceDAO getStorageSpaceDAO() throws DataAccessException { - - return new StorageSpaceDAOMySql(); - } - - /** - * Returns an implementation of TapeRecallCatalog, specific to a particular - * datastore. - * - * @throws DataAccessException - * @return TapeReallDAO - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public TapeRecallDAO getTapeRecallDAO() { - - return new TapeRecallDAOMySql(); - } - - /** - * @return String - */ - @Override - public String toString() { - - return MySqlDAOFactory.factoryName; - } - - - /** - * getPtGChunkDAO - * - * @return PtGChunkDAO - * @throws DataAccessException - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public PtGChunkDAO getPtGChunkDAO() throws DataAccessException { - - return null; - } - - /** - * getPtPChunkDAO - * - * @return PtPChunkDAO - * @throws DataAccessException - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public PtPChunkDAO getPtPChunkDAO() throws DataAccessException { - - return null; - } - - /** - * getRequestSummaryDAO - * - * @return RequestSummaryDAO - * @throws DataAccessException - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public RequestSummaryDAO getRequestSummaryDAO() throws DataAccessException { - - return null; - } - - /** - * getStorageAreaDAO - * - * @return StorageAreaDAO - * @throws DataAccessException - * @todo Implement this it.grid.storm.persistence.DAOFactory method - */ - public StorageAreaDAO getStorageAreaDAO() throws DataAccessException { - - return null; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.persistence.DAOFactory#getTapeRecallDAO(boolean) - */ - public TapeRecallDAO getTapeRecallDAO(boolean test) - throws DataAccessException { - - return new TapeRecallDAOMySql(); - } - + public static final String factoryName = "JDBC - MySQL DAO Factory"; + + private static final Logger log = LoggerFactory.getLogger(MySqlDAOFactory.class); + + private static MySqlDAOFactory factory = new MySqlDAOFactory(); + + /** */ + private MySqlDAOFactory() { + log.info("DAO factory: {}", MySqlDAOFactory.factoryName); + } + + public static MySqlDAOFactory getInstance() { + + return MySqlDAOFactory.factory; + } + + /** + * Returns an implementation of StorageSpaceCatalog, specific to a particular datastore. + * + * @throws DataAccessException + * @return StorageSpaceDAO + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public StorageSpaceDAO getStorageSpaceDAO() throws DataAccessException { + + return new StorageSpaceDAOMySql(); + } + + /** + * Returns an implementation of TapeRecallCatalog, specific to a particular datastore. + * + * @throws DataAccessException + * @return TapeReallDAO + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public TapeRecallDAO getTapeRecallDAO() { + + return new TapeRecallDAOMySql(); + } + + /** @return String */ + @Override + public String toString() { + + return MySqlDAOFactory.factoryName; + } + + /** + * getPtGChunkDAO + * + * @return PtGChunkDAO + * @throws DataAccessException + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public PtGChunkDAO getPtGChunkDAO() throws DataAccessException { + + return null; + } + + /** + * getPtPChunkDAO + * + * @return PtPChunkDAO + * @throws DataAccessException + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public PtPChunkDAO getPtPChunkDAO() throws DataAccessException { + + return null; + } + + /** + * getRequestSummaryDAO + * + * @return RequestSummaryDAO + * @throws DataAccessException + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public RequestSummaryDAO getRequestSummaryDAO() throws DataAccessException { + + return null; + } + + /** + * getStorageAreaDAO + * + * @return StorageAreaDAO + * @throws DataAccessException + * @todo Implement this it.grid.storm.persistence.DAOFactory method + */ + public StorageAreaDAO getStorageAreaDAO() throws DataAccessException { + + return null; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.persistence.DAOFactory#getTapeRecallDAO(boolean) + */ + public TapeRecallDAO getTapeRecallDAO(boolean test) throws DataAccessException { + + return new TapeRecallDAOMySql(); + } } diff --git a/src/main/java/it/grid/storm/persistence/PersistenceDirector.java b/src/main/java/it/grid/storm/persistence/PersistenceDirector.java index 6d1936e9..8a8e7002 100644 --- a/src/main/java/it/grid/storm/persistence/PersistenceDirector.java +++ b/src/main/java/it/grid/storm/persistence/PersistenceDirector.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence; @@ -9,58 +8,56 @@ import it.grid.storm.persistence.util.db.DBConnectionPool; import it.grid.storm.persistence.util.db.DataBaseStrategy; import it.grid.storm.persistence.util.db.Databases; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PersistenceDirector { - private static final Logger log = LoggerFactory.getLogger("persistence"); - private static Configuration config; - private static DataBaseStrategy dbMan; - private static DAOFactory daoFactory; - private static DataSourceConnectionFactory connFactory; - - static { - log.trace("Initializing Persistence Director..."); - config = Configuration.getInstance(); - dbMan = Databases.getDataBaseStrategy("mysql"); - daoFactory = MySqlDAOFactory.getInstance(); + private static final Logger log = LoggerFactory.getLogger("persistence"); + private static Configuration config; + private static DataBaseStrategy dbMan; + private static DAOFactory daoFactory; + private static DataSourceConnectionFactory connFactory; - int maxActive = config.getBEPersistencePoolDBMaxActive(); - int maxWait = config.getBEPersistencePoolDBMaxWait(); + static { + log.trace("Initializing Persistence Director..."); + config = Configuration.getInstance(); + dbMan = Databases.getDataBaseStrategy("mysql"); + daoFactory = MySqlDAOFactory.getInstance(); - log.debug("Datasource connection string = {}", dbMan.getConnectionString()); - log.debug("Pool Max Active = {}", maxActive); - log.debug("Pool Max Wait = {}", maxWait); + int maxActive = config.getBEPersistencePoolDBMaxActive(); + int maxWait = config.getBEPersistencePoolDBMaxWait(); - try { - DBConnectionPool.initPool(dbMan, maxActive, maxWait); - connFactory = DBConnectionPool.getPoolInstance(); - } catch (PersistenceException e) { - log.error(e.getMessage(), e); - System.exit(1); - } - } + log.debug("Datasource connection string = {}", dbMan.getConnectionString()); + log.debug("Pool Max Active = {}", maxActive); + log.debug("Pool Max Wait = {}", maxWait); - public static DAOFactory getDAOFactory() { + try { + DBConnectionPool.initPool(dbMan, maxActive, maxWait); + connFactory = DBConnectionPool.getPoolInstance(); + } catch (PersistenceException e) { + log.error(e.getMessage(), e); + System.exit(1); + } + } - return daoFactory; - } + public static DAOFactory getDAOFactory() { - public static DataBaseStrategy getDataBase() { + return daoFactory; + } - return dbMan; - } + public static DataBaseStrategy getDataBase() { - public static DataSourceConnectionFactory getConnectionFactory() { + return dbMan; + } - return connFactory; - } + public static DataSourceConnectionFactory getConnectionFactory() { - public static Logger getLogger() { + return connFactory; + } - return log; - } + public static Logger getLogger() { + return log; + } } diff --git a/src/main/java/it/grid/storm/persistence/dao/AbstractDAO.java b/src/main/java/it/grid/storm/persistence/dao/AbstractDAO.java index 733bc176..7ee6ca58 100644 --- a/src/main/java/it/grid/storm/persistence/dao/AbstractDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/AbstractDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; @@ -8,165 +7,155 @@ import it.grid.storm.persistence.PersistenceDirector; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.exceptions.PersistenceException; - import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractDAO { - private static final Logger log = LoggerFactory.getLogger(AbstractDAO.class); - - private DataSourceConnectionFactory connFactory; - - public AbstractDAO() { - connFactory = PersistenceDirector.getConnectionFactory(); - } - - protected void commit(Connection conn) { - - try { - conn.commit(); - conn.setAutoCommit(true); - } catch (SQLException e) { - log.error(e.getMessage(), e); - } - } - - protected Connection getConnection() throws DataAccessException { - - Connection conn = null; - try { - conn = connFactory.borrowConnection(); - } catch (PersistenceException ex) { - throw new DataAccessException(ex); - } - return conn; - } - - protected Statement getStatement(Connection conn) throws DataAccessException { - - Statement stat = null; - if (conn == null) { - throw new DataAccessException( - "No Connection available to create a Statement"); - } else { - try { - stat = conn.createStatement(); - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException(e); - } - } - return stat; - } - - /** - * Release a connection Accessor method. - * - * @param resultSet - * ResultSet - * @param statement - * Statement - * @param connection - * Connection - * @throws DataAccessException - */ - protected void releaseConnection(ResultSet resultSet, Statement statement, - Connection connection) throws DataAccessException { - - // Release the ResultSet - closeResultSet(resultSet); - - // Close the statement - closeStatement(statement); - - // Release the connection - closeConnection(connection); - } - - /** - * Release a connection and a list of statements and result sets Accessor - * method. - * - * @param resultSets - * @param statements - * @param connection - * @throws DataAccessException - */ - protected void releaseConnection(ResultSet[] resultSets, - Statement[] statements, Connection connection) throws DataAccessException { - - // Release the ResultSets - if (resultSets != null) { - for (ResultSet resultSet : resultSets) { - closeResultSet(resultSet); - } - } - // Close the statement - if (statements != null) { - for (Statement statement : statements) { - closeStatement(statement); - } - } - // Release the connection - closeConnection(connection); - } - - private void closeResultSet(ResultSet resultSet) throws DataAccessException { - - if (resultSet != null) { - try { - resultSet.close(); - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException(e); - } - } - } - - private void closeStatement(Statement statement) throws DataAccessException { - - if (statement != null) { - try { - statement.close(); - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException(e); - } - } - } - - private void closeConnection(Connection connection) - throws DataAccessException { - - if (connection != null) { - try { - connFactory.giveBackConnection(connection); - } catch (PersistenceException e) { - log.error(e.getMessage(), e); - throw new DataAccessException(e); - } - } - } - - /** - * @param conn - */ - protected void rollback(Connection conn) { - - try { - - conn.rollback(); - conn.setAutoCommit(true); - - } catch (SQLException e) { - log.error(e.getMessage(), e); - } - } - + private static final Logger log = LoggerFactory.getLogger(AbstractDAO.class); + + private DataSourceConnectionFactory connFactory; + + public AbstractDAO() { + connFactory = PersistenceDirector.getConnectionFactory(); + } + + protected void commit(Connection conn) { + + try { + conn.commit(); + conn.setAutoCommit(true); + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } + + protected Connection getConnection() throws DataAccessException { + + Connection conn = null; + try { + conn = connFactory.borrowConnection(); + } catch (PersistenceException ex) { + throw new DataAccessException(ex); + } + return conn; + } + + protected Statement getStatement(Connection conn) throws DataAccessException { + + Statement stat = null; + if (conn == null) { + throw new DataAccessException("No Connection available to create a Statement"); + } else { + try { + stat = conn.createStatement(); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException(e); + } + } + return stat; + } + + /** + * Release a connection Accessor method. + * + * @param resultSet ResultSet + * @param statement Statement + * @param connection Connection + * @throws DataAccessException + */ + protected void releaseConnection(ResultSet resultSet, Statement statement, Connection connection) + throws DataAccessException { + + // Release the ResultSet + closeResultSet(resultSet); + + // Close the statement + closeStatement(statement); + + // Release the connection + closeConnection(connection); + } + + /** + * Release a connection and a list of statements and result sets Accessor method. + * + * @param resultSets + * @param statements + * @param connection + * @throws DataAccessException + */ + protected void releaseConnection( + ResultSet[] resultSets, Statement[] statements, Connection connection) + throws DataAccessException { + + // Release the ResultSets + if (resultSets != null) { + for (ResultSet resultSet : resultSets) { + closeResultSet(resultSet); + } + } + // Close the statement + if (statements != null) { + for (Statement statement : statements) { + closeStatement(statement); + } + } + // Release the connection + closeConnection(connection); + } + + private void closeResultSet(ResultSet resultSet) throws DataAccessException { + + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException(e); + } + } + } + + private void closeStatement(Statement statement) throws DataAccessException { + + if (statement != null) { + try { + statement.close(); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException(e); + } + } + } + + private void closeConnection(Connection connection) throws DataAccessException { + + if (connection != null) { + try { + connFactory.giveBackConnection(connection); + } catch (PersistenceException e) { + log.error(e.getMessage(), e); + throw new DataAccessException(e); + } + } + } + + /** @param conn */ + protected void rollback(Connection conn) { + + try { + + conn.rollback(); + conn.setAutoCommit(true); + + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } } diff --git a/src/main/java/it/grid/storm/persistence/dao/PtGChunkDAO.java b/src/main/java/it/grid/storm/persistence/dao/PtGChunkDAO.java index 252a2679..c0170bc8 100644 --- a/src/main/java/it/grid/storm/persistence/dao/PtGChunkDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/PtGChunkDAO.java @@ -1,23 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; -import java.util.Collection; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.PtGChunkTO; import it.grid.storm.srm.types.TRequestToken; +import java.util.Collection; public interface PtGChunkDAO { - public PtGChunkTO getPtGChunkDataById(Long ssId) throws DataAccessException; + public PtGChunkTO getPtGChunkDataById(Long ssId) throws DataAccessException; - public void addPtGChunkData(PtGChunkTO ptgChunkTO) throws DataAccessException; + public void addPtGChunkData(PtGChunkTO ptgChunkTO) throws DataAccessException; - public Collection getPtGChunksDataByToken(TRequestToken token) - throws DataAccessException; + public Collection getPtGChunksDataByToken(TRequestToken token) throws DataAccessException; - public void removePtGChunksData(PtGChunkTO ptgChunkTO) - throws DataAccessException; + public void removePtGChunksData(PtGChunkTO ptgChunkTO) throws DataAccessException; } diff --git a/src/main/java/it/grid/storm/persistence/dao/PtPChunkDAO.java b/src/main/java/it/grid/storm/persistence/dao/PtPChunkDAO.java index 3db4fa5d..2e131d19 100644 --- a/src/main/java/it/grid/storm/persistence/dao/PtPChunkDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/PtPChunkDAO.java @@ -1,24 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; -import java.util.Collection; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.PtPChunkTO; import it.grid.storm.srm.types.TRequestToken; +import java.util.Collection; public interface PtPChunkDAO { - public PtPChunkTO getPtGChunkDataById(Long ssId) throws DataAccessException; + public PtPChunkTO getPtGChunkDataById(Long ssId) throws DataAccessException; - public void addPtGChunkData(PtPChunkTO ptpChunkData) - throws DataAccessException; + public void addPtGChunkData(PtPChunkTO ptpChunkData) throws DataAccessException; - public Collection getPtPChunksDataByToken(TRequestToken token) - throws DataAccessException; + public Collection getPtPChunksDataByToken(TRequestToken token) throws DataAccessException; - public void removePtGChunksData(PtPChunkTO ptpChunkData) - throws DataAccessException; + public void removePtGChunksData(PtPChunkTO ptpChunkData) throws DataAccessException; } diff --git a/src/main/java/it/grid/storm/persistence/dao/RequestSummaryDAO.java b/src/main/java/it/grid/storm/persistence/dao/RequestSummaryDAO.java index c0c3f8b7..fe54369e 100644 --- a/src/main/java/it/grid/storm/persistence/dao/RequestSummaryDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/RequestSummaryDAO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; @@ -9,12 +8,9 @@ public interface RequestSummaryDAO { - public RequestSummaryTO getRequestSummaryById(Long ssId) - throws DataAccessException; + public RequestSummaryTO getRequestSummaryById(Long ssId) throws DataAccessException; - public void addRequestSummary(RequestSummaryTO rsd) - throws DataAccessException; + public void addRequestSummary(RequestSummaryTO rsd) throws DataAccessException; - public void removeRequestSummary(RequestSummaryTO rsd) - throws DataAccessException; + public void removeRequestSummary(RequestSummaryTO rsd) throws DataAccessException; } diff --git a/src/main/java/it/grid/storm/persistence/dao/StorageAreaDAO.java b/src/main/java/it/grid/storm/persistence/dao/StorageAreaDAO.java index b244c951..a1c03911 100644 --- a/src/main/java/it/grid/storm/persistence/dao/StorageAreaDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/StorageAreaDAO.java @@ -1,8 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; -public interface StorageAreaDAO { -} +public interface StorageAreaDAO {} diff --git a/src/main/java/it/grid/storm/persistence/dao/StorageSpaceDAO.java b/src/main/java/it/grid/storm/persistence/dao/StorageSpaceDAO.java index c8a727b1..b8b9c2d5 100644 --- a/src/main/java/it/grid/storm/persistence/dao/StorageSpaceDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/StorageSpaceDAO.java @@ -1,74 +1,60 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; -import java.util.Collection; -import java.util.Date; - +import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.StorageSpaceTO; - -import it.grid.storm.griduser.GridUserInterface; +import java.util.Collection; +import java.util.Date; /** - * * Storage Space Data Access Object (DAO) - * - * DAO pattern - * - * + * + *

+ * DAO pattern */ public interface StorageSpaceDAO { - public StorageSpaceTO getStorageSpaceById(Long ssId) - throws DataAccessException; + public StorageSpaceTO getStorageSpaceById(Long ssId) throws DataAccessException; - public Collection getStorageSpaceByOwner( - GridUserInterface owner, String spaceAlias) throws DataAccessException; + public Collection getStorageSpaceByOwner( + GridUserInterface owner, String spaceAlias) throws DataAccessException; - public Collection getStorageSpaceBySpaceType(String stype) - throws DataAccessException; - - public Collection getStorageSpaceByAliasOnly(String spaceAlias) - throws DataAccessException; + public Collection getStorageSpaceBySpaceType(String stype) + throws DataAccessException; - public StorageSpaceTO getStorageSpaceByToken(String token) - throws DataAccessException; + public Collection getStorageSpaceByAliasOnly(String spaceAlias) + throws DataAccessException; - public Collection findAll() throws DataAccessException; + public StorageSpaceTO getStorageSpaceByToken(String token) throws DataAccessException; - public void addStorageSpace(StorageSpaceTO ss) throws DataAccessException; + public Collection findAll() throws DataAccessException; - public void removeStorageSpace(GridUserInterface user, String spaceToken) - throws DataAccessException; + public void addStorageSpace(StorageSpaceTO ss) throws DataAccessException; - public void removeStorageSpace(String spaceToken) throws DataAccessException; + public void removeStorageSpace(GridUserInterface user, String spaceToken) + throws DataAccessException; - public void updateStorageSpace(StorageSpaceTO ss) throws DataAccessException; + public void removeStorageSpace(String spaceToken) throws DataAccessException; - public void updateStorageSpaceFreeSpace(StorageSpaceTO ss) - throws DataAccessException; + public void updateStorageSpace(StorageSpaceTO ss) throws DataAccessException; - public void updateAllStorageSpace(StorageSpaceTO ss) - throws DataAccessException; + public void updateStorageSpaceFreeSpace(StorageSpaceTO ss) throws DataAccessException; - public Collection getExpired(long currentTimeInSecond) - throws DataAccessException; + public void updateAllStorageSpace(StorageSpaceTO ss) throws DataAccessException; - public Collection getStorageSpaceByUnavailableUsedSpace( - long unavailableSizeValue) throws DataAccessException; + public Collection getExpired(long currentTimeInSecond) throws DataAccessException; - public Collection getStorageSpaceByPreviousLastUpdate( - Date lastUpdateTimestamp) throws DataAccessException; - - public int increaseUsedSpace(String spaceToken, long usedSpaceToAdd) - throws DataAccessException; + public Collection getStorageSpaceByUnavailableUsedSpace(long unavailableSizeValue) + throws DataAccessException; - public int decreaseUsedSpace(String spaceToken, long usedSpaceToRemove) + public Collection getStorageSpaceByPreviousLastUpdate(Date lastUpdateTimestamp) throws DataAccessException; + public int increaseUsedSpace(String spaceToken, long usedSpaceToAdd) throws DataAccessException; + + public int decreaseUsedSpace(String spaceToken, long usedSpaceToRemove) + throws DataAccessException; } diff --git a/src/main/java/it/grid/storm/persistence/dao/TapeRecallDAO.java b/src/main/java/it/grid/storm/persistence/dao/TapeRecallDAO.java index 75f1d23d..7bc73b11 100644 --- a/src/main/java/it/grid/storm/persistence/dao/TapeRecallDAO.java +++ b/src/main/java/it/grid/storm/persistence/dao/TapeRecallDAO.java @@ -1,189 +1,165 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.dao; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.TapeRecallTO; - import java.util.Date; import java.util.List; import java.util.UUID; -/** - * Tape Recall Data Access Object (DAO) - */ - +/** Tape Recall Data Access Object (DAO) */ public abstract class TapeRecallDAO extends AbstractDAO { - /** - * - * @return - * @throws DataAccessException - */ - public abstract int getNumberInProgress() throws DataAccessException; - - /** - * - * @param voName - * @return - * @throws DataAccessException - */ - public abstract int getNumberInProgress(String voName) - throws DataAccessException; - - /** - * - * @return - * @throws DataAccessException - */ - public abstract int getNumberQueued() throws DataAccessException; - - /** - * - * @param voName - * @return - * @throws DataAccessException - */ - public abstract int getNumberQueued(String voName) throws DataAccessException; - - /** - * - * @return - * @throws DataAccessException - */ - public abstract int getReadyForTakeOver() throws DataAccessException; - - /** - * - * @param voName - * @return - * @throws DataAccessException - */ - public abstract int getReadyForTakeOver(String voName) - throws DataAccessException; - - /** - * @param taskId - * @param requestToken - * @return - * @throws DataAccessException - */ - public abstract TapeRecallTO getTask(UUID taskId, String requestToken) - throws DataAccessException; - - /** - * @param groupTaskId - * @return - * @throws DataAccessException - */ - public abstract List getGroupTasks(UUID groupTaskId) - throws DataAccessException; - - /** - * Verifies that a recall task with the given taskId and request token exists - * on the database - * - * @param taskId - * @param requestToken - * @return true if the recall task exists - * @throws DataAccessException - */ - public abstract boolean existsTask(UUID taskId, String requestToken) - throws DataAccessException; - - /** - * @param groupTaskId - * @return - * @throws DataAccessException - */ - public abstract boolean existsGroupTask(UUID groupTaskId) - throws DataAccessException; - - /** - * Method called by a garbage collector that removes all tape recalls that are - * not in QUEUED (1) or IN_PROGRESS (2) status - * - * @param expirationTime seconds must pass to consider the request as expired - * @param delete at most numMaxToPurge tasks - * @return the amount of tasks deleted - * @throws DataAccessException - */ - public abstract int purgeCompletedTasks(long expirationTime, int numMaxToPurge) - throws DataAccessException; - - /** - * @param taskId - * @param newValue - * @throws DataAccessException - */ - public abstract void setGroupTaskRetryValue(UUID groupTaskId, int value) - throws DataAccessException; - - /** - * - * @return - * @throws DataAccessException - */ - public abstract TapeRecallTO takeoverTask() throws DataAccessException; - - /** - * - * @param voName - * @return - * @throws DataAccessException - */ - public abstract TapeRecallTO takeoverTask(String voName) - throws DataAccessException; - - /** - * Performs the take-over of max numberOfTaks tasks possibly returning more - * than one file recall task for some files - * - * @param numberOfTaks - * @return - * @throws DataAccessException - */ - public abstract List takeoverTasksWithDoubles(int numberOfTaks) - throws DataAccessException; - - /** - * - * @param numberOfTaks - * @param voName - * @return - * @throws DataAccessException - */ - public abstract List takeoverTasksWithDoubles(int numberOfTaks, - String voName) throws DataAccessException; - - /** - * @param task - * @param statuses - * @param proposedGroupTaskId - * @return - * @throws DataAccessException - */ - public abstract UUID insertCloneTask(TapeRecallTO task, int[] statuses, - UUID proposedGroupTaskId) throws DataAccessException; - - /** - * @param groupTaskId - * @param statusId - * @return - * @throws DataAccessException - */ - public abstract boolean setGroupTaskStatus(UUID groupTaskId, int statusId, - Date timestamp) throws DataAccessException; - - /** - * - * @param numberOfTaks - * @return - * @throws DataAccessException - */ - public abstract List getAllInProgressTasks(int numberOfTaks) - throws DataAccessException; - -} \ No newline at end of file + /** + * @return + * @throws DataAccessException + */ + public abstract int getNumberInProgress() throws DataAccessException; + + /** + * @param voName + * @return + * @throws DataAccessException + */ + public abstract int getNumberInProgress(String voName) throws DataAccessException; + + /** + * @return + * @throws DataAccessException + */ + public abstract int getNumberQueued() throws DataAccessException; + + /** + * @param voName + * @return + * @throws DataAccessException + */ + public abstract int getNumberQueued(String voName) throws DataAccessException; + + /** + * @return + * @throws DataAccessException + */ + public abstract int getReadyForTakeOver() throws DataAccessException; + + /** + * @param voName + * @return + * @throws DataAccessException + */ + public abstract int getReadyForTakeOver(String voName) throws DataAccessException; + + /** + * @param taskId + * @param requestToken + * @return + * @throws DataAccessException + */ + public abstract TapeRecallTO getTask(UUID taskId, String requestToken) throws DataAccessException; + + /** + * @param groupTaskId + * @return + * @throws DataAccessException + */ + public abstract List getGroupTasks(UUID groupTaskId) throws DataAccessException; + + /** + * Verifies that a recall task with the given taskId and request token exists on the database + * + * @param taskId + * @param requestToken + * @return true if the recall task exists + * @throws DataAccessException + */ + public abstract boolean existsTask(UUID taskId, String requestToken) throws DataAccessException; + + /** + * @param groupTaskId + * @return + * @throws DataAccessException + */ + public abstract boolean existsGroupTask(UUID groupTaskId) throws DataAccessException; + + /** + * Method called by a garbage collector that removes all tape recalls that are not in QUEUED (1) + * or IN_PROGRESS (2) status + * + * @param expirationTime seconds must pass to consider the request as expired + * @param delete at most numMaxToPurge tasks + * @return the amount of tasks deleted + * @throws DataAccessException + */ + public abstract int purgeCompletedTasks(long expirationTime, int numMaxToPurge) + throws DataAccessException; + + /** + * @param taskId + * @param newValue + * @throws DataAccessException + */ + public abstract void setGroupTaskRetryValue(UUID groupTaskId, int value) + throws DataAccessException; + + /** + * @return + * @throws DataAccessException + */ + public abstract TapeRecallTO takeoverTask() throws DataAccessException; + + /** + * @param voName + * @return + * @throws DataAccessException + */ + public abstract TapeRecallTO takeoverTask(String voName) throws DataAccessException; + + /** + * Performs the take-over of max numberOfTaks tasks possibly returning more than one file recall + * task for some files + * + * @param numberOfTaks + * @return + * @throws DataAccessException + */ + public abstract List takeoverTasksWithDoubles(int numberOfTaks) + throws DataAccessException; + + /** + * @param numberOfTaks + * @param voName + * @return + * @throws DataAccessException + */ + public abstract List takeoverTasksWithDoubles(int numberOfTaks, String voName) + throws DataAccessException; + + /** + * @param task + * @param statuses + * @param proposedGroupTaskId + * @return + * @throws DataAccessException + */ + public abstract UUID insertCloneTask(TapeRecallTO task, int[] statuses, UUID proposedGroupTaskId) + throws DataAccessException; + + /** + * @param groupTaskId + * @param statusId + * @return + * @throws DataAccessException + */ + public abstract boolean setGroupTaskStatus(UUID groupTaskId, int statusId, Date timestamp) + throws DataAccessException; + + /** + * @param numberOfTaks + * @return + * @throws DataAccessException + */ + public abstract List getAllInProgressTasks(int numberOfTaks) + throws DataAccessException; +} diff --git a/src/main/java/it/grid/storm/persistence/exceptions/DataAccessException.java b/src/main/java/it/grid/storm/persistence/exceptions/DataAccessException.java index 4cec3340..14ff9d4a 100644 --- a/src/main/java/it/grid/storm/persistence/exceptions/DataAccessException.java +++ b/src/main/java/it/grid/storm/persistence/exceptions/DataAccessException.java @@ -1,23 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.exceptions; -/** - * This exception is used to mark generic failures in persistence layer - */ - +/** This exception is used to mark generic failures in persistence layer */ public class DataAccessException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; - public DataAccessException() { - - } + public DataAccessException() {} public DataAccessException(String message) { diff --git a/src/main/java/it/grid/storm/persistence/exceptions/InfrastructureException.java b/src/main/java/it/grid/storm/persistence/exceptions/InfrastructureException.java index 1969ad24..d39e8572 100644 --- a/src/main/java/it/grid/storm/persistence/exceptions/InfrastructureException.java +++ b/src/main/java/it/grid/storm/persistence/exceptions/InfrastructureException.java @@ -1,33 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.exceptions; /** - * This exception is used to mark (fatal) failures in infrastructure and system - * code. - * + * This exception is used to mark (fatal) failures in infrastructure and system code. + * * @author Christian Bauer */ public class InfrastructureException extends RuntimeException { - public InfrastructureException() { + public InfrastructureException() {} - } + public InfrastructureException(String message) { - public InfrastructureException(String message) { + super(message); + } - super(message); - } + public InfrastructureException(String message, Throwable cause) { - public InfrastructureException(String message, Throwable cause) { + super(message, cause); + } - super(message, cause); - } + public InfrastructureException(Throwable cause) { - public InfrastructureException(Throwable cause) { - - super(cause); - } + super(cause); + } } diff --git a/src/main/java/it/grid/storm/persistence/exceptions/PersistenceException.java b/src/main/java/it/grid/storm/persistence/exceptions/PersistenceException.java index 6c9de5a3..a8539c7e 100644 --- a/src/main/java/it/grid/storm/persistence/exceptions/PersistenceException.java +++ b/src/main/java/it/grid/storm/persistence/exceptions/PersistenceException.java @@ -1,34 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.exceptions; -/** - * This exception is used to mark generic failures in persistence layer - * - */ - +/** This exception is used to mark generic failures in persistence layer */ public class PersistenceException extends Exception { - public PersistenceException() { - - super(); - } + public PersistenceException() { - public PersistenceException(String message) { + super(); + } - super(message); - } + public PersistenceException(String message) { - public PersistenceException(String message, Throwable cause) { + super(message); + } - super(message, cause); - } + public PersistenceException(String message, Throwable cause) { - public PersistenceException(Throwable cause) { + super(message, cause); + } - super(cause); - } + public PersistenceException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/it/grid/storm/persistence/impl/mysql/StorageSpaceDAOMySql.java b/src/main/java/it/grid/storm/persistence/impl/mysql/StorageSpaceDAOMySql.java index d8d764ac..f2b3f314 100644 --- a/src/main/java/it/grid/storm/persistence/impl/mysql/StorageSpaceDAOMySql.java +++ b/src/main/java/it/grid/storm/persistence/impl/mysql/StorageSpaceDAOMySql.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.impl.mysql; @@ -11,7 +10,6 @@ import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.StorageSpaceTO; import it.grid.storm.persistence.util.helper.StorageSpaceSQLHelper; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -19,608 +17,581 @@ import java.util.Collection; import java.util.Date; import java.util.LinkedList; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * find = con.prepareStatement( - * "SELECT storm_get_filereq.rowid, storm_req.r_token, storm_get_filereq.from_surl, storm_get_filereq.lifetime, storm_get_filereq.s_token, storm_get_filereq.flags, storm_req.protocol, storm_get_filereq.actual_size, storm_get_filereq.status, storm_get_filereq.errstring, storm_get_filereq.pfn FROM storm_get_filereq, storm_req WHERE storm_get_filereq.r_token=storm_req.r_token AND storm_get_filereq.r_token=?" - * ); - **/ - -public class StorageSpaceDAOMySql extends AbstractDAO implements - StorageSpaceDAO { - - private static final Logger log = LoggerFactory - .getLogger(StorageSpaceDAOMySql.class); - - private StorageSpaceSQLHelper helper; - - /** - * CONSTRUCTOR - */ - public StorageSpaceDAOMySql() { - - helper = new StorageSpaceSQLHelper(PersistenceDirector.getDataBase() - .getDbmsVendor()); - } - - /** - * addStorageSpace - * - * @param ss - * StorageSpace - * @throws DataAccessException - */ - - public void addStorageSpace(StorageSpaceTO ss) throws DataAccessException { - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.insertQuery(conn, ss); - log.info("INSERT query = {}", prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("INSERT result = {}", res); - if (res <= 0) { - log - .error("No row inserted for statement : {}", prepStatement.toString()); - throw new DataAccessException("No rows inserted for Storage Space"); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing INSERT query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * getStorageSpaceById - * - * @param ssId - * Long - * @return StorageSpace - * @throws DataAccessException - */ - public StorageSpaceTO getStorageSpaceById(Long ssId) - throws DataAccessException { - - throw new DataAccessException("getStorageSpaceById: Unimplemented method!"); - } - - public Collection findAll() throws DataAccessException { - - throw new DataAccessException("findAll: Unimplemented method!"); - } - - /** - * Returns a Collection of StorageSpaceTO owned by 'user' and with the - * specified alias ('spaceAlias'). 'spaceAlias' can be NULL or empty and in - * these cases a Collection of all the StorageSpaceTO owned by 'user' is - * returned. - * - * @param owner - * VomsGridUser. - * @param spaceAlias - * String. - * @return Collection of StorageSpaceTO. - * @throws DataAccessException - */ - public Collection getStorageSpaceByOwner( - GridUserInterface owner, String spaceAlias) throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.selectBySpaceAliasQuery(conn, owner, spaceAlias); - log.debug("DB query = {}", prepStatement.toString()); - - res = prepStatement.executeQuery(); - - log.debug("query result = {}", res); - if (res.first() == false) { - log.debug("No rows found for query : {}", prepStatement.toString()); - } else { - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - /** - * Returns a Collection of StorageSpaceTO owned by 'VO'. - * - * @param voname - * Vo. - * @return Collection of StorageSpaceTO. - * @throws DataAccessException - */ - - public Collection getStorageSpaceBySpaceType(String stype) - throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - - PreparedStatement prepStatement = null; - - Connection conn = getConnection(); - ResultSet res = null; - - try { - prepStatement = helper.selectBySpaceType(conn, stype); - log.debug("DB query = {}", prepStatement.toString()); - - res = prepStatement.executeQuery(); - log.debug("query result = {}", res); - if (res.first() == false) { - log.info("No rows found for query : {}", prepStatement.toString()); - } else { - // Fetch each row from the result set - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - /** - * Returns a Collection of StorageSpaceTO with the specified alias - * ('spaceAlias'). 'spaceAlias' can not be be NULL or empty. - * - * @param spaceAlias - * String. - * @return Collection of StorageSpaceTO. - * @throws DataAccessException - */ - public Collection getStorageSpaceByAliasOnly(String spaceAlias) - throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - Connection conn = getConnection(); - ResultSet res = null; - - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.selectBySpaceAliasOnlyQuery(conn, spaceAlias); - log.debug("DB query = {}" , prepStatement.toString()); - - res = prepStatement.executeQuery(); - log.debug("query result = {}" , res); - - if (res.first() == false) { - log.info("No rows found for query : {}" , prepStatement.toString()); - } else { - // Fetch each row from the result set - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - /** - * getStorageSpaceByToken - * - * @param token - * TSpaceToken - * @return StorageSpace , null if not row found on that token - * @throws DataAccessException - */ - public StorageSpaceTO getStorageSpaceByToken(String token) - throws DataAccessException { - - StorageSpaceTO ssTO = null; - - Connection conn = getConnection(); - ResultSet res = null; - - PreparedStatement prepStatement = null; - try { - prepStatement = helper.selectByTokenQuery(conn, token); - log.debug("SELECT query = {}" , prepStatement.toString()); - - res = prepStatement.executeQuery(); - - log.debug("SELECT result = {}" , res); - if (res.first() == false) { - log.info("No rows found for query : {}" , prepStatement.toString()); - } else { - // take the first - ssTO = helper.makeStorageSpaceTO(res); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing INSERT query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return ssTO; - } - - @Override - public Collection getStorageSpaceByUnavailableUsedSpace( - long unavailableSizeValue) throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.selectByUnavailableUsedSpaceSizeQuery(conn, - unavailableSizeValue); - log.debug("SELECT query = {}" , prepStatement.toString()); - - res = prepStatement.executeQuery(); - log.debug("SELECT result = {}" , res); - if (res.first() == false) { - log.info("No rows found for query : {}" , prepStatement.toString()); - } else { - // Fetch each row from the result set - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing INSERT query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - @Override - public Collection getStorageSpaceByPreviousLastUpdate( - Date lastUpdateTimestamp) throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.selectByPreviousOrNullLastUpdateQuery(conn, - lastUpdateTimestamp.getTime()); - log.debug("SELECT query = {}" , prepStatement.toString()); - - res = prepStatement.executeQuery(); - log.debug("SELECT result = {}" , res); - if (res.first() == false) { - log.info("No rows found for query : {}" , prepStatement.toString()); - } else { - // Fetch each row from the result set - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing INSERT query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - /** - * removeStorageSpace - * - * @param ss - * StorageSpace - * @throws DataAccessException - */ - public void removeStorageSpace(GridUserInterface user, String spaceToken) - throws DataAccessException { - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.removeByTokenQuery(conn, user, spaceToken); - log.debug("query = {}" , prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("Number of rows removed: {}" , res); - if (res <= 0) { - log.error("Error removing Storage Space with token = {} for " - + "user {} not found", spaceToken, user.getDn()); - - throw new DataAccessException("Storage Space with token = '" - + spaceToken + "' for user '" + user.getDn() + "' not found!"); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DELETE query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * removeStorageSpace only by spaceToken - * - * @param ss - * StorageSpace - * @throws DataAccessException - */ - public void removeStorageSpace(String spaceToken) throws DataAccessException { - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.removeByTokenQuery(conn, spaceToken); - - log.debug("query = {}" , prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("Number of rows removed: {}" , res); - - if (res <= 0) { - log.error("Error removing Storage Space with token = {}. Space not found", - spaceToken); - - throw new DataAccessException("Storage Space with token = '" - + spaceToken + "' not found!"); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DELETE query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * - * @param ssTO - * StorageSpaceTO - * @throws DataAccessException - */ - public void updateStorageSpace(StorageSpaceTO ssTO) - throws DataAccessException { - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.updateByAliasAndTokenQuery(conn, ssTO); - log.debug("UPDATE query = {}" , prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("UPDATE row count = {}" , res); - - if (res != 1) { - if (res < 1) { - log.error("No storage space rows updated by query : {}" - , prepStatement.toString()); - } else { - log.warn("More than a single storage space rows updated by " - + "query : {}. updated {} rows.", - prepStatement.toString(), res); - } - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing UPDATE query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * - * @param ssTO - * StorageSpaceTO - * @throws DataAccessException - */ - public void updateStorageSpaceFreeSpace(StorageSpaceTO ssTO) - throws DataAccessException { - - long freeSpace = ssTO.getFreeSize(); - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.updateFreeSpaceByTokenQuery(conn, - ssTO.getSpaceToken(), freeSpace, new Date()); - - log.debug("UPDATE query = {}" , prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("UPDATE row count = {}", res); - if (res <= 0) { - log.error("No storage space rows updated by query : {}" - , prepStatement.toString()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing UPDATE query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * - * @param ssTO - * StorageSpaceTO - * @throws DataAccessException - */ - public void updateAllStorageSpace(StorageSpaceTO ssTO) - throws DataAccessException { - - Connection conn = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.updateByTokenQuery(conn, ssTO); - log.debug("UPDATE query = {}", prepStatement.toString()); - - int res = prepStatement.executeUpdate(); - log.debug("UPDATE row count = {}" , res); - if (res != 1) { - if (res < 1) { - log.error("No storage space rows updated by query {}" - , prepStatement.toString()); - } else { - log.warn("More than a single storage space rows updated " - + "by query : {}. updated {} rows" - ,prepStatement.toString(), res); - } - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing UPDATE query", e); - } finally { - releaseConnection(null, prepStatement, conn); - } - } - - /** - * Method used to retrieve the set of StorageTO for expired space. - * - * @param long timeInSecond - * @return Collection of transfer object - */ - public Collection getExpired(long currentTimeInSecond) - throws DataAccessException { - - StorageSpaceTO ssTO = null; - Collection result = new LinkedList(); - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = helper.selectExpiredQuery(conn, currentTimeInSecond); - log.debug("DB query = {}" , prepStatement.toString()); - - res = prepStatement.executeQuery(); - - log.debug("query result = {}" , res); - if (res.first() == false) { - log.debug("No rows found for query : {}" , prepStatement.toString()); - throw new DataAccessException("No storage space expired found at time " - + currentTimeInSecond); - } else { - // Fetch each row from the result set - do { - ssTO = helper.makeStorageSpaceTO(res); - result.add(ssTO); - } while (res.next()); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); - } - return result; - } - - @Override - public int increaseUsedSpace(String spaceToken, long usedSpaceToAdd) - throws DataAccessException { - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - int n = 0; - - try { - prepStatement = helper.increaseUsedSpaceByTokenQuery(conn, spaceToken, usedSpaceToAdd); - log.debug("DB query = {}" , prepStatement.toString()); - - n = prepStatement.executeUpdate(); - - log.debug("query result = {}" , n); - if (n == 0) { - log.debug("No rows updated for query : {}" , prepStatement.toString()); - throw new DataAccessException("No storage space updated!"); - } - - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); + * find = con.prepareStatement( "SELECT storm_get_filereq.rowid, storm_req.r_token, + * storm_get_filereq.from_surl, storm_get_filereq.lifetime, storm_get_filereq.s_token, + * storm_get_filereq.flags, storm_req.protocol, storm_get_filereq.actual_size, + * storm_get_filereq.status, storm_get_filereq.errstring, storm_get_filereq.pfn FROM + * storm_get_filereq, storm_req WHERE storm_get_filereq.r_token=storm_req.r_token AND + * storm_get_filereq.r_token=?" ); + */ +public class StorageSpaceDAOMySql extends AbstractDAO implements StorageSpaceDAO { + + private static final Logger log = LoggerFactory.getLogger(StorageSpaceDAOMySql.class); + + private StorageSpaceSQLHelper helper; + + /** CONSTRUCTOR */ + public StorageSpaceDAOMySql() { + + helper = new StorageSpaceSQLHelper(PersistenceDirector.getDataBase().getDbmsVendor()); + } + + /** + * addStorageSpace + * + * @param ss StorageSpace + * @throws DataAccessException + */ + public void addStorageSpace(StorageSpaceTO ss) throws DataAccessException { + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.insertQuery(conn, ss); + log.info("INSERT query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("INSERT result = {}", res); + if (res <= 0) { + log.error("No row inserted for statement : {}", prepStatement.toString()); + throw new DataAccessException("No rows inserted for Storage Space"); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing INSERT query", e); + } finally { + releaseConnection(null, prepStatement, conn); + } + } + + /** + * getStorageSpaceById + * + * @param ssId Long + * @return StorageSpace + * @throws DataAccessException + */ + public StorageSpaceTO getStorageSpaceById(Long ssId) throws DataAccessException { + + throw new DataAccessException("getStorageSpaceById: Unimplemented method!"); + } + + public Collection findAll() throws DataAccessException { + + throw new DataAccessException("findAll: Unimplemented method!"); + } + + /** + * Returns a Collection of StorageSpaceTO owned by 'user' and with the specified alias + * ('spaceAlias'). 'spaceAlias' can be NULL or empty and in these cases a Collection of all the + * StorageSpaceTO owned by 'user' is returned. + * + * @param owner VomsGridUser. + * @param spaceAlias String. + * @return Collection of StorageSpaceTO. + * @throws DataAccessException + */ + public Collection getStorageSpaceByOwner( + GridUserInterface owner, String spaceAlias) throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.selectBySpaceAliasQuery(conn, owner, spaceAlias); + log.debug("DB query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + + log.debug("query result = {}", res); + if (res.first() == false) { + log.debug("No rows found for query : {}", prepStatement.toString()); + } else { + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + /** + * Returns a Collection of StorageSpaceTO owned by 'VO'. + * + * @param voname Vo. + * @return Collection of StorageSpaceTO. + * @throws DataAccessException + */ + public Collection getStorageSpaceBySpaceType(String stype) + throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + + PreparedStatement prepStatement = null; + + Connection conn = getConnection(); + ResultSet res = null; + + try { + prepStatement = helper.selectBySpaceType(conn, stype); + log.debug("DB query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + log.debug("query result = {}", res); + if (res.first() == false) { + log.info("No rows found for query : {}", prepStatement.toString()); + } else { + // Fetch each row from the result set + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + /** + * Returns a Collection of StorageSpaceTO with the specified alias ('spaceAlias'). 'spaceAlias' + * can not be be NULL or empty. + * + * @param spaceAlias String. + * @return Collection of StorageSpaceTO. + * @throws DataAccessException + */ + public Collection getStorageSpaceByAliasOnly(String spaceAlias) + throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + Connection conn = getConnection(); + ResultSet res = null; + + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.selectBySpaceAliasOnlyQuery(conn, spaceAlias); + log.debug("DB query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + log.debug("query result = {}", res); + + if (res.first() == false) { + log.info("No rows found for query : {}", prepStatement.toString()); + } else { + // Fetch each row from the result set + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + /** + * getStorageSpaceByToken + * + * @param token TSpaceToken + * @return StorageSpace , null if not row found on that token + * @throws DataAccessException + */ + public StorageSpaceTO getStorageSpaceByToken(String token) throws DataAccessException { + + StorageSpaceTO ssTO = null; + + Connection conn = getConnection(); + ResultSet res = null; + + PreparedStatement prepStatement = null; + try { + prepStatement = helper.selectByTokenQuery(conn, token); + log.debug("SELECT query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + + log.debug("SELECT result = {}", res); + if (res.first() == false) { + log.info("No rows found for query : {}", prepStatement.toString()); + } else { + // take the first + ssTO = helper.makeStorageSpaceTO(res); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing INSERT query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return ssTO; + } + + @Override + public Collection getStorageSpaceByUnavailableUsedSpace(long unavailableSizeValue) + throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.selectByUnavailableUsedSpaceSizeQuery(conn, unavailableSizeValue); + log.debug("SELECT query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + log.debug("SELECT result = {}", res); + if (res.first() == false) { + log.info("No rows found for query : {}", prepStatement.toString()); + } else { + // Fetch each row from the result set + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing INSERT query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + @Override + public Collection getStorageSpaceByPreviousLastUpdate(Date lastUpdateTimestamp) + throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = + helper.selectByPreviousOrNullLastUpdateQuery(conn, lastUpdateTimestamp.getTime()); + log.debug("SELECT query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + log.debug("SELECT result = {}", res); + if (res.first() == false) { + log.info("No rows found for query : {}", prepStatement.toString()); + } else { + // Fetch each row from the result set + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing INSERT query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + /** + * removeStorageSpace + * + * @param ss StorageSpace + * @throws DataAccessException + */ + public void removeStorageSpace(GridUserInterface user, String spaceToken) + throws DataAccessException { + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.removeByTokenQuery(conn, user, spaceToken); + log.debug("query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("Number of rows removed: {}", res); + if (res <= 0) { + log.error( + "Error removing Storage Space with token = {} for " + "user {} not found", + spaceToken, + user.getDn()); + + throw new DataAccessException( + "Storage Space with token = '" + + spaceToken + + "' for user '" + + user.getDn() + + "' not found!"); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DELETE query", e); + } finally { + releaseConnection(null, prepStatement, conn); + } + } + + /** + * removeStorageSpace only by spaceToken + * + * @param ss StorageSpace + * @throws DataAccessException + */ + public void removeStorageSpace(String spaceToken) throws DataAccessException { + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.removeByTokenQuery(conn, spaceToken); + + log.debug("query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("Number of rows removed: {}", res); + + if (res <= 0) { + log.error("Error removing Storage Space with token = {}. Space not found", spaceToken); + + throw new DataAccessException("Storage Space with token = '" + spaceToken + "' not found!"); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DELETE query", e); + } finally { + releaseConnection(null, prepStatement, conn); + } + } + + /** + * @param ssTO StorageSpaceTO + * @throws DataAccessException + */ + public void updateStorageSpace(StorageSpaceTO ssTO) throws DataAccessException { + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.updateByAliasAndTokenQuery(conn, ssTO); + log.debug("UPDATE query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("UPDATE row count = {}", res); + + if (res != 1) { + if (res < 1) { + log.error("No storage space rows updated by query : {}", prepStatement.toString()); + } else { + log.warn( + "More than a single storage space rows updated by " + "query : {}. updated {} rows.", + prepStatement.toString(), + res); } - return n; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing UPDATE query", e); + } finally { + releaseConnection(null, prepStatement, conn); } - - @Override - public int decreaseUsedSpace(String spaceToken, long usedSpaceToRemove) - throws DataAccessException { - - Connection conn = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - int n = 0; - - try { - prepStatement = helper.decreaseUsedSpaceByTokenQuery(conn, spaceToken, usedSpaceToRemove); - log.debug("DB query = {}" , prepStatement.toString()); - - n = prepStatement.executeUpdate(); - - log.debug("query result = {}" , n); - if (n == 0) { - log.debug("No rows updated for query : {}" , prepStatement.toString()); - throw new DataAccessException("No storage space updated!"); - } - - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error while executing DB query", e); - } finally { - releaseConnection(res, prepStatement, conn); + } + + /** + * @param ssTO StorageSpaceTO + * @throws DataAccessException + */ + public void updateStorageSpaceFreeSpace(StorageSpaceTO ssTO) throws DataAccessException { + + long freeSpace = ssTO.getFreeSize(); + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = + helper.updateFreeSpaceByTokenQuery(conn, ssTO.getSpaceToken(), freeSpace, new Date()); + + log.debug("UPDATE query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("UPDATE row count = {}", res); + if (res <= 0) { + log.error("No storage space rows updated by query : {}", prepStatement.toString()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing UPDATE query", e); + } finally { + releaseConnection(null, prepStatement, conn); + } + } + + /** + * @param ssTO StorageSpaceTO + * @throws DataAccessException + */ + public void updateAllStorageSpace(StorageSpaceTO ssTO) throws DataAccessException { + + Connection conn = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.updateByTokenQuery(conn, ssTO); + log.debug("UPDATE query = {}", prepStatement.toString()); + + int res = prepStatement.executeUpdate(); + log.debug("UPDATE row count = {}", res); + if (res != 1) { + if (res < 1) { + log.error("No storage space rows updated by query {}", prepStatement.toString()); + } else { + log.warn( + "More than a single storage space rows updated " + "by query : {}. updated {} rows", + prepStatement.toString(), + res); } - return n; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing UPDATE query", e); + } finally { + releaseConnection(null, prepStatement, conn); + } + } + + /** + * Method used to retrieve the set of StorageTO for expired space. + * + * @param long timeInSecond + * @return Collection of transfer object + */ + public Collection getExpired(long currentTimeInSecond) + throws DataAccessException { + + StorageSpaceTO ssTO = null; + Collection result = new LinkedList(); + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = helper.selectExpiredQuery(conn, currentTimeInSecond); + log.debug("DB query = {}", prepStatement.toString()); + + res = prepStatement.executeQuery(); + + log.debug("query result = {}", res); + if (res.first() == false) { + log.debug("No rows found for query : {}", prepStatement.toString()); + throw new DataAccessException( + "No storage space expired found at time " + currentTimeInSecond); + } else { + // Fetch each row from the result set + do { + ssTO = helper.makeStorageSpaceTO(res); + result.add(ssTO); + } while (res.next()); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return result; + } + + @Override + public int increaseUsedSpace(String spaceToken, long usedSpaceToAdd) throws DataAccessException { + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + int n = 0; + + try { + prepStatement = helper.increaseUsedSpaceByTokenQuery(conn, spaceToken, usedSpaceToAdd); + log.debug("DB query = {}", prepStatement.toString()); + + n = prepStatement.executeUpdate(); + + log.debug("query result = {}", n); + if (n == 0) { + log.debug("No rows updated for query : {}", prepStatement.toString()); + throw new DataAccessException("No storage space updated!"); + } + + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); + } + return n; + } + + @Override + public int decreaseUsedSpace(String spaceToken, long usedSpaceToRemove) + throws DataAccessException { + + Connection conn = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + int n = 0; + + try { + prepStatement = helper.decreaseUsedSpaceByTokenQuery(conn, spaceToken, usedSpaceToRemove); + log.debug("DB query = {}", prepStatement.toString()); + + n = prepStatement.executeUpdate(); + + log.debug("query result = {}", n); + if (n == 0) { + log.debug("No rows updated for query : {}", prepStatement.toString()); + throw new DataAccessException("No storage space updated!"); + } + + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException("Error while executing DB query", e); + } finally { + releaseConnection(res, prepStatement, conn); } + return n; + } } diff --git a/src/main/java/it/grid/storm/persistence/impl/mysql/TapeRecallDAOMySql.java b/src/main/java/it/grid/storm/persistence/impl/mysql/TapeRecallDAOMySql.java index ba64a71a..67b6f588 100644 --- a/src/main/java/it/grid/storm/persistence/impl/mysql/TapeRecallDAOMySql.java +++ b/src/main/java/it/grid/storm/persistence/impl/mysql/TapeRecallDAOMySql.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.impl.mysql; import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.valueOf; import com.google.common.collect.Lists; - import it.grid.storm.persistence.PersistenceDirector; import it.grid.storm.persistence.dao.TapeRecallDAO; import it.grid.storm.persistence.exceptions.DataAccessException; @@ -16,7 +14,6 @@ import it.grid.storm.srm.types.InvalidTRequestTokenAttributesException; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -28,724 +25,702 @@ import java.util.GregorianCalendar; import java.util.List; import java.util.UUID; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TapeRecallDAOMySql extends TapeRecallDAO { - private static final Logger log = LoggerFactory - .getLogger(TapeRecallDAOMySql.class); - - private final TapeRecallMySQLHelper sqlHelper; - - public TapeRecallDAOMySql() { - - sqlHelper = new TapeRecallMySQLHelper(PersistenceDirector.getDataBase() - .getDbmsVendor()); - } - - @Override - public int getNumberInProgress() throws DataAccessException { - - return getNumberInProgress(null); - } - - @Override - public int getNumberInProgress(String voName) throws DataAccessException { - - Connection dbConnection = getConnection(); - int status = 0; - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - if (voName == null) { - prepStatement = sqlHelper.getQueryNumberInProgress(dbConnection); - } else { - prepStatement = sqlHelper - .getQueryNumberInProgress(dbConnection, voName); - } - - log.debug("QUERY: {}", prepStatement); - - res = prepStatement.executeQuery(); - - if (res.first()) { - status = res.getInt(1); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return status; - } - - @Override - public int getNumberQueued() throws DataAccessException { - - return getNumberQueued(null); - } - - @Override - public int getNumberQueued(String voName) throws DataAccessException { - - Connection dbConnection = getConnection(); - int status = 0; - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - if (voName == null) { - prepStatement = sqlHelper.getQueryNumberQueued(dbConnection); - } else { - prepStatement = sqlHelper.getQueryNumberQueued(dbConnection, voName); - } - - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - - if (res.first()) { - status = res.getInt(1); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return status; - } - - @Override - public int getReadyForTakeOver() throws DataAccessException { - - return getReadyForTakeOver(null); - } - - @Override - public int getReadyForTakeOver(String voName) throws DataAccessException { - - Connection dbConnection = getConnection(); - int status = 0; - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - if (voName == null) { - prepStatement = sqlHelper.getQueryReadyForTakeOver(dbConnection); - } else { - prepStatement = sqlHelper - .getQueryReadyForTakeOver(dbConnection, voName); - } - - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - - if (res.first()) { - status = res.getInt(1); - } - } catch (SQLException e) { - throw new DataAccessException("Error executing query: '" - + prepStatement + "' " + e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return status; - } - - @Override - public List getGroupTasks(UUID groupTaskId) - throws DataAccessException { - - TapeRecallTO task = null; - List taskList = Lists.newArrayList(); - - Connection dbConnection = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper - .getQueryGetGroupTasks(dbConnection, groupTaskId); - - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - - if (!res.first()) { - log.error("No tasks with GroupTaskId='{}'", groupTaskId); - throw new DataAccessException( - "No recall table row retrieved executing query: '" - + prepStatement + "'"); - } - do { - task = new TapeRecallTO(); - setTaskInfo(task, res); - taskList.add(task); - } while (res.next()); - } catch (SQLException e) { - throw new DataAccessException("Error executing query: '" - + prepStatement + "' " + e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return taskList; - } - - @Override - public boolean existsGroupTask(UUID groupTaskId) throws DataAccessException { - - boolean response = false; - - Connection dbConnection = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper - .getQueryGetGroupTasks(dbConnection, groupTaskId); - - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - response = res.first(); - if (!response) { - log.info("No tasks found with GroupTaskId='{}'",groupTaskId); - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error executing query: '" - + prepStatement + "' " + e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return response; - } - - @Override - public TapeRecallTO getTask(UUID taskId, String requestToken) - throws DataAccessException { - - TapeRecallTO task; - Connection dbConnection = getConnection(); - ResultSet res = null; - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper.getQueryGetTask(dbConnection, taskId, - requestToken); - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - - if (!res.first()) { - log.error("No task found for requestToken={} taskId={}. Query={}", requestToken, taskId, prepStatement); - - throw new DataAccessException("No task found for requestToken=" - + requestToken + " " + "taskId=" + taskId + ". Query = " - + prepStatement); - } - task = new TapeRecallTO(); - setTaskInfo(task, res); - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error executing query: '" - + prepStatement + "' " + e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return task; - } - - @Override - public boolean existsTask(UUID taskId, String requestToken) - throws DataAccessException { - - boolean response; - - Connection dbConnection = getConnection(); - ResultSet res = null; - - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper.getQueryGetTask(dbConnection, taskId, - requestToken); - - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - response = res.first(); - } catch (SQLException e) { - log.error(e.getMessage(), e); - throw new DataAccessException("Error executing query: '" - + prepStatement + "' " + e.getMessage(), e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return response; - } - - @Override - public UUID insertCloneTask(TapeRecallTO task, int[] statuses, - UUID proposedGroupTaskId) throws DataAccessException { - - if (task.getTaskId() == null || task.getRequestToken() == null - || task.getRequestToken().getValue().trim().isEmpty()) { - log - .error("received Task insert request with empty primary key field TaskId or RequestToken. TaskId = {}, request token = {}", task.getTaskId(), task.getRequestToken()); - throw new DataAccessException( - "Unable to create insert the task wth the provided UUID and " - + "request token using UUID-namebased algorithm. TaskId = " - + task.getTaskId() + " , request token = " + task.getRequestToken()); - } - Integer status = task.getStatusId(); - - Connection dbConnection = getConnection(); - PreparedStatement prepStat = null; - - try { - dbConnection.setAutoCommit(false); - } catch (SQLException e) { - log.error("Error setting autocommit to false! {}", e.getMessage()); - throw new DataAccessException("Error setting autocommit to false! " - + e.getMessage(), e); - } - - ResultSet res = null; - try { - - if (statuses == null || statuses.length == 0) { - prepStat = sqlHelper.getQueryGetGroupTaskIds(dbConnection, - task.getTaskId()); - } else { - prepStat = sqlHelper.getQueryGetGroupTaskIds(dbConnection, - task.getTaskId(), statuses); - } - log.debug("QUERY: {}", prepStat); - - res = prepStat.executeQuery(); - - if (res.first()) { - /* Take the first, but there can be more than one result */ - String uuidString = res - .getString(TapeRecallMySQLHelper.COL_GROUP_TASK_ID); - status = Integer.valueOf(res.getInt(TapeRecallMySQLHelper.COL_STATUS)); - task.setStatusId(status.intValue()); - task.setGroupTaskId(UUID.fromString(uuidString)); - Calendar calendar = new GregorianCalendar(); - try { - task.forceStatusUpdateInstants( - res.getDate(TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, calendar), - res.getDate(TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE, calendar)); - } catch (IllegalArgumentException e) { - log.error("Unable to set status update timestamps on the coned task"); - } - } else { - log.debug("No task found for taskId={} Creating a new group entry", task.getTaskId()); - task.setGroupTaskId(proposedGroupTaskId); - task.setStatusId(status.intValue()); - } - - prepStat = sqlHelper.getQueryInsertTask(dbConnection, task); - if (prepStat == null) { - // this case is possible if and only if the task is null or empty - log.error("Cannot create the query because the task is null or empty."); - throw new DataAccessException( - "Cannot create the query because the task is null or empty."); - } - try { - log.debug("Query(insert-task)={}", prepStat); - prepStat.executeUpdate(); - commit(dbConnection); - } catch (SQLException e) { - rollback(dbConnection); - throw new DataAccessException("Error executing query : " - + prepStat + " ; " + e.getMessage(), e); - } - } catch (SQLException e) { - rollback(dbConnection); - throw new DataAccessException("Error executing query : " + " ; " - + e.getMessage(), e); - } finally { - releaseConnection(new ResultSet[] { res }, new Statement[] { prepStat }, - dbConnection); - } - return task.getGroupTaskId(); - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.persistence.dao.TapeRecallDAO#purgeCompletedTasks(int) - */ - @Override - public int purgeCompletedTasks(long expirationTime, int numTasks) throws DataAccessException { - - PreparedStatement ps = null; - Connection con = getConnection(); - - int count = 0; - boolean hasLimit = numTasks > 0; - try { - if (hasLimit) { - ps = sqlHelper.getQueryDeleteCompletedTasks(con, expirationTime, numTasks); - } else { - ps = sqlHelper.getQueryDeleteCompletedTasks(con, expirationTime); - } - - count = ps.executeUpdate(); - - } catch (SQLException e) { - throw new DataAccessException("Error executing query: " + ps, e); - } finally { - releaseConnection(null, ps, con); - } - - return count; - } - - @Override - public void setGroupTaskRetryValue(UUID groupTaskId, int value) - throws DataAccessException { - - Connection dbConnection = getConnection(); - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper.getQuerySetGroupTaskRetryValue(dbConnection, - groupTaskId, value); - - prepStatement.executeUpdate(); - } catch (SQLException e) { - throw new DataAccessException("Error executing query: " - + prepStatement, e); - } finally { - releaseConnection(null, prepStatement, dbConnection); - } - - } - - @Override - public TapeRecallTO takeoverTask() throws DataAccessException { - - return takeoverTask(null); - } - - @Override - public TapeRecallTO takeoverTask(String voName) throws DataAccessException { - - List taskList = takeoverTasksWithDoubles(1, voName); - - if (taskList.isEmpty()) { - return null; - } - return taskList.get(0); - } - - @Override - public List takeoverTasksWithDoubles(int numberOfTaks) - throws DataAccessException { - - return takeoverTasksWithDoubles(numberOfTaks, null); - } - - @Override - public List takeoverTasksWithDoubles(int numberOfTaks, - String voName) throws DataAccessException { - - Connection dbConnection = getConnection(); - - List taskList = Lists.newLinkedList(); - TapeRecallTO task = null; - ResultSet res = null; - - PreparedStatement prepStatement = null; - - try { - dbConnection.setAutoCommit(false); - } catch (SQLException e) { - log.error("Error setting autocommit to false! {}", e.getMessage()); - throw new DataAccessException("Error setting autocommit to false! " - + e.getMessage(), e); - } - try { - if (voName == null) { - prepStatement = sqlHelper.getQueryGetTakeoverTasksWithDoubles( - dbConnection, numberOfTaks); - } else { - prepStatement = sqlHelper.getQueryGetTakeoverTasksWithDoubles( - dbConnection, numberOfTaks, voName); - } - // start transaction - log.debug("QUERY: {}", prepStatement); - res = prepStatement.executeQuery(); - if (!res.first()) { - log.info("No tape recall rows ready for takeover"); - return taskList; - } - do { - task = new TapeRecallTO(); - setTaskInfo(task, res); - task.setStatus(TapeRecallStatus.IN_PROGRESS); - taskList.add(task); - } while (res.next()); - if (!taskList.isEmpty()) { - try { - prepStatement = sqlHelper.getQueryUpdateTasksStatus(dbConnection, - taskList, TapeRecallStatus.IN_PROGRESS.getStatusId(), - TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, new Date()); - } catch (IllegalArgumentException e) { - log - .error("Unable to obtain the query to update task status and set status transition timestamp. IllegalArgumentException: " - + e.getMessage()); - throw new DataAccessException( - "Unable to obtain the query to update task status and set status transition timestamp"); - } - prepStatement.executeUpdate(); - } - commit(dbConnection); - } catch (SQLException e) { - rollback(dbConnection); - throw new DataAccessException("Error executing query: " - + prepStatement, e); - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return taskList; - } - - @Override - public List getAllInProgressTasks(int numberOfTaks) - throws DataAccessException { - - Connection dbConnection = getConnection(); - ResultSet res = null; - List taskList = Lists.newArrayList(); - - PreparedStatement prepStatement = null; - - try { - prepStatement = sqlHelper.getQueryGetAllTasksInProgress(dbConnection, - numberOfTaks); - - log.debug("getAllInProgressTasks query: {}", prepStatement); - - res = prepStatement.executeQuery(); - - boolean emptyResultSet = true; - - while (res.next()) { - - emptyResultSet = false; - TapeRecallTO task = new TapeRecallTO(); - setTaskInfo(task, res); - taskList.add(task); - } - - if (emptyResultSet) { - - log.debug("No in progress recall tasks found."); - } - - } catch (Exception e) { - - log.error("Error executing query: {}", prepStatement, e); - throw new DataAccessException("Error executing query: " - + prepStatement, e); - - } finally { - - releaseConnection(res, prepStatement, dbConnection); - } - - return taskList; - } - - private void setTaskInfo(TapeRecallTO task, ResultSet res) - throws DataAccessException { - - if (res == null) { - throw new DataAccessException("Unable to build Task from NULL ResultSet"); - } - - String requestTokenStr = null; - Timestamp insertionInstant; - try { - requestTokenStr = res.getString(TapeRecallMySQLHelper.COL_REQUEST_TOKEN); - insertionInstant = res.getTimestamp(TapeRecallMySQLHelper.COL_DATE); - - } catch (SQLException e) { - throw new DataAccessException( - "Unable to retrieve RequestToken String from ResultSet. " + e); - } - try { - task - .setRequestToken(new TRequestToken(requestTokenStr, insertionInstant)); - } catch (InvalidTRequestTokenAttributesException e) { - throw new DataAccessException( - "Unable to build TRequestToken from token='" + requestTokenStr + "'. " - + e); - } - - UUID groupTaskId = null; - String groupTaskIdStr = null; - try { - groupTaskIdStr = res.getString(TapeRecallMySQLHelper.COL_GROUP_TASK_ID); - if (groupTaskIdStr != null) { - try { - groupTaskId = UUID.fromString(groupTaskIdStr); - task.setGroupTaskId(groupTaskId); - } catch (IllegalArgumentException iae) { - throw new DataAccessException( - "Unable to build UUID from GroupTaskId='" + groupTaskId + "'. " - + iae); - } - } - } catch (SQLException e) { - throw new DataAccessException( - "Unable to retrieve GroupTaskId String from ResultSet. " + e); - } - - // do not set the task ID, it is produced by the setFilename call - - try { - - task.setRequestType(valueOf(res.getString(TapeRecallMySQLHelper.COL_REQUEST_TYPE))); - task.setFileName(res.getString(TapeRecallMySQLHelper.COL_FILE_NAME)); - task.setPinLifetime(res.getInt(TapeRecallMySQLHelper.COL_PIN_LIFETIME)); - task.setStatusId(res.getInt(TapeRecallMySQLHelper.COL_STATUS)); - task.setVoName(res.getString(TapeRecallMySQLHelper.COL_VO_NAME)); - task.setUserID(res.getString(TapeRecallMySQLHelper.COL_USER_ID)); - task.setRetryAttempt(res.getInt(TapeRecallMySQLHelper.COL_RETRY_ATTEMPT)); - Calendar calendar = new GregorianCalendar(); - task.setDeferredRecallInstant(res.getTimestamp( - TapeRecallMySQLHelper.COL_DEFERRED_STARTTIME, calendar)); - task.setInsertionInstant(res.getTimestamp(TapeRecallMySQLHelper.COL_DATE, - calendar)); - try { - task.forceStatusUpdateInstants(res.getTimestamp( - TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, calendar), res - .getTimestamp(TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE, calendar)); - } catch (IllegalArgumentException e) { - log.error("Unable to set status update timestamps on the coned task"); - } - } catch (SQLException e) { - throw new DataAccessException("Unable to getting info from ResultSet. " - + e); - } - } - - @Override - public boolean setGroupTaskStatus(UUID groupTaskId, int newStatusId, - Date timestamp) throws DataAccessException { - - PreparedStatement prepStatement = null; - Connection dbConnection = getConnection(); - - try { - dbConnection.setAutoCommit(false); - } catch (SQLException e) { - log.error("Error setting autocommit to false! {}", e.getMessage()); - throw new DataAccessException("Error setting autocommit to false! " - + e.getMessage(), e); - } - - ResultSet res = null; - boolean ret = false; - int oldStatusId = -1; - - try { - - try { - prepStatement = sqlHelper.getQueryGetGroupTasks(dbConnection, - groupTaskId); - - log.debug("QUERY: {}", prepStatement); - // retrieves the tasks of this task group - res = prepStatement.executeQuery(); - - if (!res.first()) { - log.error("No tasks with GroupTaskId='{}'", groupTaskId); - throw new DataAccessException( - "No recall table row retrieved executing query: '" - + prepStatement + "'"); - } - // verify if their stored status is equal for all - oldStatusId = res.getInt(TapeRecallMySQLHelper.COL_STATUS); - do { - int currentStatusId = res.getInt(TapeRecallMySQLHelper.COL_STATUS); - if (currentStatusId != oldStatusId) { - log.warn("The tasks with groupTaskId {} have different statuses: {} from task {} differs " - + "from expected {}", groupTaskId, currentStatusId, - res.getString(TapeRecallMySQLHelper.COL_TASK_ID), oldStatusId); - break; - } - oldStatusId = currentStatusId; - } while (res.next()); - } catch (SQLException e) { - log - .error("Unable to retrieve groupTaskId related tasks. SQLException: {}", e); - throw new DataAccessException( - "Unable to retrieve groupTaskId related tasks. "); - } - if (oldStatusId != newStatusId) { - // update the task status and if is a valid transition set the relative - // transition timestamp - if (!TapeRecallStatus.getRecallTaskStatus(oldStatusId).precedes( - newStatusId)) { - log - .warn("Requested the update of the status of a recall task group to status {} that is precedent " - + "to the recorded status performing the request the same...", newStatusId, oldStatusId); - } - String timestampColumn = null; - if (TapeRecallStatus.isFinalStatus(newStatusId)) { - timestampColumn = TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE; - } else { - if (TapeRecallStatus.IN_PROGRESS.equals(TapeRecallStatus - .getRecallTaskStatus(newStatusId))) { - timestampColumn = TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE; - } else { - log - .warn("unable to determine the status update timestamp column to use given the new statusId '{}'", newStatusId); - } - } - if (timestampColumn != null) { - try { - prepStatement = sqlHelper.getQueryUpdateGroupTaskStatus( - dbConnection, groupTaskId, newStatusId, timestampColumn, - timestamp); - } catch (IllegalArgumentException e) { - log - .error("Unable to obtain the query to update task status and set status transition timestamp. IllegalArgumentException: {}", e.getMessage()); - throw new DataAccessException( - "Unable to obtain the query to update task status and set status transition timestamp"); - } catch (SQLException e) { - throw new DataAccessException("Error executing query: " - + prepStatement.toString(), e); - } - } else { - try { - prepStatement = sqlHelper.getQuerySetGroupTaskStatus(dbConnection, - groupTaskId, newStatusId); - } catch (SQLException e) { - throw new DataAccessException("Error executing query: " - + prepStatement.toString(), e); - } - } - try { - if (prepStatement.executeUpdate() > 0) { - ret = true; - } - commit(dbConnection); - } catch (SQLException e) { - throw new DataAccessException("Error executing query: " - + prepStatement.toString(), e); - } - } else { - log - .warn("Skipping the status upadate operation, the status already stored is equal to the new one provided"); - } - } finally { - releaseConnection(res, prepStatement, dbConnection); - } - return ret; - } + private static final Logger log = LoggerFactory.getLogger(TapeRecallDAOMySql.class); + + private final TapeRecallMySQLHelper sqlHelper; + + public TapeRecallDAOMySql() { + + sqlHelper = new TapeRecallMySQLHelper(PersistenceDirector.getDataBase().getDbmsVendor()); + } + + @Override + public int getNumberInProgress() throws DataAccessException { + + return getNumberInProgress(null); + } + + @Override + public int getNumberInProgress(String voName) throws DataAccessException { + + Connection dbConnection = getConnection(); + int status = 0; + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + if (voName == null) { + prepStatement = sqlHelper.getQueryNumberInProgress(dbConnection); + } else { + prepStatement = sqlHelper.getQueryNumberInProgress(dbConnection, voName); + } + + log.debug("QUERY: {}", prepStatement); + + res = prepStatement.executeQuery(); + + if (res.first()) { + status = res.getInt(1); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return status; + } + + @Override + public int getNumberQueued() throws DataAccessException { + + return getNumberQueued(null); + } + + @Override + public int getNumberQueued(String voName) throws DataAccessException { + + Connection dbConnection = getConnection(); + int status = 0; + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + if (voName == null) { + prepStatement = sqlHelper.getQueryNumberQueued(dbConnection); + } else { + prepStatement = sqlHelper.getQueryNumberQueued(dbConnection, voName); + } + + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + + if (res.first()) { + status = res.getInt(1); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return status; + } + + @Override + public int getReadyForTakeOver() throws DataAccessException { + + return getReadyForTakeOver(null); + } + + @Override + public int getReadyForTakeOver(String voName) throws DataAccessException { + + Connection dbConnection = getConnection(); + int status = 0; + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + if (voName == null) { + prepStatement = sqlHelper.getQueryReadyForTakeOver(dbConnection); + } else { + prepStatement = sqlHelper.getQueryReadyForTakeOver(dbConnection, voName); + } + + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + + if (res.first()) { + status = res.getInt(1); + } + } catch (SQLException e) { + throw new DataAccessException( + "Error executing query: '" + prepStatement + "' " + e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return status; + } + + @Override + public List getGroupTasks(UUID groupTaskId) throws DataAccessException { + + TapeRecallTO task = null; + List taskList = Lists.newArrayList(); + + Connection dbConnection = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQueryGetGroupTasks(dbConnection, groupTaskId); + + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + + if (!res.first()) { + log.error("No tasks with GroupTaskId='{}'", groupTaskId); + throw new DataAccessException( + "No recall table row retrieved executing query: '" + prepStatement + "'"); + } + do { + task = new TapeRecallTO(); + setTaskInfo(task, res); + taskList.add(task); + } while (res.next()); + } catch (SQLException e) { + throw new DataAccessException( + "Error executing query: '" + prepStatement + "' " + e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return taskList; + } + + @Override + public boolean existsGroupTask(UUID groupTaskId) throws DataAccessException { + + boolean response = false; + + Connection dbConnection = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQueryGetGroupTasks(dbConnection, groupTaskId); + + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + response = res.first(); + if (!response) { + log.info("No tasks found with GroupTaskId='{}'", groupTaskId); + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException( + "Error executing query: '" + prepStatement + "' " + e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return response; + } + + @Override + public TapeRecallTO getTask(UUID taskId, String requestToken) throws DataAccessException { + + TapeRecallTO task; + Connection dbConnection = getConnection(); + ResultSet res = null; + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQueryGetTask(dbConnection, taskId, requestToken); + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + + if (!res.first()) { + log.error( + "No task found for requestToken={} taskId={}. Query={}", + requestToken, + taskId, + prepStatement); + + throw new DataAccessException( + "No task found for requestToken=" + + requestToken + + " " + + "taskId=" + + taskId + + ". Query = " + + prepStatement); + } + task = new TapeRecallTO(); + setTaskInfo(task, res); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException( + "Error executing query: '" + prepStatement + "' " + e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return task; + } + + @Override + public boolean existsTask(UUID taskId, String requestToken) throws DataAccessException { + + boolean response; + + Connection dbConnection = getConnection(); + ResultSet res = null; + + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQueryGetTask(dbConnection, taskId, requestToken); + + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + response = res.first(); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new DataAccessException( + "Error executing query: '" + prepStatement + "' " + e.getMessage(), e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return response; + } + + @Override + public UUID insertCloneTask(TapeRecallTO task, int[] statuses, UUID proposedGroupTaskId) + throws DataAccessException { + + if (task.getTaskId() == null + || task.getRequestToken() == null + || task.getRequestToken().getValue().trim().isEmpty()) { + log.error( + "received Task insert request with empty primary key field TaskId or RequestToken. TaskId = {}, request token = {}", + task.getTaskId(), + task.getRequestToken()); + throw new DataAccessException( + "Unable to create insert the task wth the provided UUID and " + + "request token using UUID-namebased algorithm. TaskId = " + + task.getTaskId() + + " , request token = " + + task.getRequestToken()); + } + Integer status = task.getStatusId(); + + Connection dbConnection = getConnection(); + PreparedStatement prepStat = null; + + try { + dbConnection.setAutoCommit(false); + } catch (SQLException e) { + log.error("Error setting autocommit to false! {}", e.getMessage()); + throw new DataAccessException("Error setting autocommit to false! " + e.getMessage(), e); + } + + ResultSet res = null; + try { + + if (statuses == null || statuses.length == 0) { + prepStat = sqlHelper.getQueryGetGroupTaskIds(dbConnection, task.getTaskId()); + } else { + prepStat = sqlHelper.getQueryGetGroupTaskIds(dbConnection, task.getTaskId(), statuses); + } + log.debug("QUERY: {}", prepStat); + + res = prepStat.executeQuery(); + + if (res.first()) { + /* Take the first, but there can be more than one result */ + String uuidString = res.getString(TapeRecallMySQLHelper.COL_GROUP_TASK_ID); + status = Integer.valueOf(res.getInt(TapeRecallMySQLHelper.COL_STATUS)); + task.setStatusId(status.intValue()); + task.setGroupTaskId(UUID.fromString(uuidString)); + Calendar calendar = new GregorianCalendar(); + try { + task.forceStatusUpdateInstants( + res.getDate(TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, calendar), + res.getDate(TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE, calendar)); + } catch (IllegalArgumentException e) { + log.error("Unable to set status update timestamps on the coned task"); + } + } else { + log.debug("No task found for taskId={} Creating a new group entry", task.getTaskId()); + task.setGroupTaskId(proposedGroupTaskId); + task.setStatusId(status.intValue()); + } + + prepStat = sqlHelper.getQueryInsertTask(dbConnection, task); + if (prepStat == null) { + // this case is possible if and only if the task is null or empty + log.error("Cannot create the query because the task is null or empty."); + throw new DataAccessException("Cannot create the query because the task is null or empty."); + } + try { + log.debug("Query(insert-task)={}", prepStat); + prepStat.executeUpdate(); + commit(dbConnection); + } catch (SQLException e) { + rollback(dbConnection); + throw new DataAccessException( + "Error executing query : " + prepStat + " ; " + e.getMessage(), e); + } + } catch (SQLException e) { + rollback(dbConnection); + throw new DataAccessException("Error executing query : " + " ; " + e.getMessage(), e); + } finally { + releaseConnection(new ResultSet[] {res}, new Statement[] {prepStat}, dbConnection); + } + return task.getGroupTaskId(); + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.persistence.dao.TapeRecallDAO#purgeCompletedTasks(int) + */ + @Override + public int purgeCompletedTasks(long expirationTime, int numTasks) throws DataAccessException { + + PreparedStatement ps = null; + Connection con = getConnection(); + + int count = 0; + boolean hasLimit = numTasks > 0; + try { + if (hasLimit) { + ps = sqlHelper.getQueryDeleteCompletedTasks(con, expirationTime, numTasks); + } else { + ps = sqlHelper.getQueryDeleteCompletedTasks(con, expirationTime); + } + + count = ps.executeUpdate(); + + } catch (SQLException e) { + throw new DataAccessException("Error executing query: " + ps, e); + } finally { + releaseConnection(null, ps, con); + } + + return count; + } + + @Override + public void setGroupTaskRetryValue(UUID groupTaskId, int value) throws DataAccessException { + + Connection dbConnection = getConnection(); + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQuerySetGroupTaskRetryValue(dbConnection, groupTaskId, value); + + prepStatement.executeUpdate(); + } catch (SQLException e) { + throw new DataAccessException("Error executing query: " + prepStatement, e); + } finally { + releaseConnection(null, prepStatement, dbConnection); + } + } + + @Override + public TapeRecallTO takeoverTask() throws DataAccessException { + + return takeoverTask(null); + } + + @Override + public TapeRecallTO takeoverTask(String voName) throws DataAccessException { + + List taskList = takeoverTasksWithDoubles(1, voName); + + if (taskList.isEmpty()) { + return null; + } + return taskList.get(0); + } + + @Override + public List takeoverTasksWithDoubles(int numberOfTaks) throws DataAccessException { + + return takeoverTasksWithDoubles(numberOfTaks, null); + } + + @Override + public List takeoverTasksWithDoubles(int numberOfTaks, String voName) + throws DataAccessException { + + Connection dbConnection = getConnection(); + + List taskList = Lists.newLinkedList(); + TapeRecallTO task = null; + ResultSet res = null; + + PreparedStatement prepStatement = null; + + try { + dbConnection.setAutoCommit(false); + } catch (SQLException e) { + log.error("Error setting autocommit to false! {}", e.getMessage()); + throw new DataAccessException("Error setting autocommit to false! " + e.getMessage(), e); + } + try { + if (voName == null) { + prepStatement = sqlHelper.getQueryGetTakeoverTasksWithDoubles(dbConnection, numberOfTaks); + } else { + prepStatement = + sqlHelper.getQueryGetTakeoverTasksWithDoubles(dbConnection, numberOfTaks, voName); + } + // start transaction + log.debug("QUERY: {}", prepStatement); + res = prepStatement.executeQuery(); + if (!res.first()) { + log.info("No tape recall rows ready for takeover"); + return taskList; + } + do { + task = new TapeRecallTO(); + setTaskInfo(task, res); + task.setStatus(TapeRecallStatus.IN_PROGRESS); + taskList.add(task); + } while (res.next()); + if (!taskList.isEmpty()) { + try { + prepStatement = + sqlHelper.getQueryUpdateTasksStatus( + dbConnection, + taskList, + TapeRecallStatus.IN_PROGRESS.getStatusId(), + TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, + new Date()); + } catch (IllegalArgumentException e) { + log.error( + "Unable to obtain the query to update task status and set status transition timestamp. IllegalArgumentException: " + + e.getMessage()); + throw new DataAccessException( + "Unable to obtain the query to update task status and set status transition timestamp"); + } + prepStatement.executeUpdate(); + } + commit(dbConnection); + } catch (SQLException e) { + rollback(dbConnection); + throw new DataAccessException("Error executing query: " + prepStatement, e); + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return taskList; + } + + @Override + public List getAllInProgressTasks(int numberOfTaks) throws DataAccessException { + + Connection dbConnection = getConnection(); + ResultSet res = null; + List taskList = Lists.newArrayList(); + + PreparedStatement prepStatement = null; + + try { + prepStatement = sqlHelper.getQueryGetAllTasksInProgress(dbConnection, numberOfTaks); + + log.debug("getAllInProgressTasks query: {}", prepStatement); + + res = prepStatement.executeQuery(); + + boolean emptyResultSet = true; + + while (res.next()) { + + emptyResultSet = false; + TapeRecallTO task = new TapeRecallTO(); + setTaskInfo(task, res); + taskList.add(task); + } + + if (emptyResultSet) { + + log.debug("No in progress recall tasks found."); + } + + } catch (Exception e) { + + log.error("Error executing query: {}", prepStatement, e); + throw new DataAccessException("Error executing query: " + prepStatement, e); + + } finally { + + releaseConnection(res, prepStatement, dbConnection); + } + + return taskList; + } + + private void setTaskInfo(TapeRecallTO task, ResultSet res) throws DataAccessException { + + if (res == null) { + throw new DataAccessException("Unable to build Task from NULL ResultSet"); + } + + String requestTokenStr = null; + Timestamp insertionInstant; + try { + requestTokenStr = res.getString(TapeRecallMySQLHelper.COL_REQUEST_TOKEN); + insertionInstant = res.getTimestamp(TapeRecallMySQLHelper.COL_DATE); + + } catch (SQLException e) { + throw new DataAccessException("Unable to retrieve RequestToken String from ResultSet. " + e); + } + try { + task.setRequestToken(new TRequestToken(requestTokenStr, insertionInstant)); + } catch (InvalidTRequestTokenAttributesException e) { + throw new DataAccessException( + "Unable to build TRequestToken from token='" + requestTokenStr + "'. " + e); + } + + UUID groupTaskId = null; + String groupTaskIdStr = null; + try { + groupTaskIdStr = res.getString(TapeRecallMySQLHelper.COL_GROUP_TASK_ID); + if (groupTaskIdStr != null) { + try { + groupTaskId = UUID.fromString(groupTaskIdStr); + task.setGroupTaskId(groupTaskId); + } catch (IllegalArgumentException iae) { + throw new DataAccessException( + "Unable to build UUID from GroupTaskId='" + groupTaskId + "'. " + iae); + } + } + } catch (SQLException e) { + throw new DataAccessException("Unable to retrieve GroupTaskId String from ResultSet. " + e); + } + + // do not set the task ID, it is produced by the setFilename call + + try { + + task.setRequestType(valueOf(res.getString(TapeRecallMySQLHelper.COL_REQUEST_TYPE))); + task.setFileName(res.getString(TapeRecallMySQLHelper.COL_FILE_NAME)); + task.setPinLifetime(res.getInt(TapeRecallMySQLHelper.COL_PIN_LIFETIME)); + task.setStatusId(res.getInt(TapeRecallMySQLHelper.COL_STATUS)); + task.setVoName(res.getString(TapeRecallMySQLHelper.COL_VO_NAME)); + task.setUserID(res.getString(TapeRecallMySQLHelper.COL_USER_ID)); + task.setRetryAttempt(res.getInt(TapeRecallMySQLHelper.COL_RETRY_ATTEMPT)); + Calendar calendar = new GregorianCalendar(); + task.setDeferredRecallInstant( + res.getTimestamp(TapeRecallMySQLHelper.COL_DEFERRED_STARTTIME, calendar)); + task.setInsertionInstant(res.getTimestamp(TapeRecallMySQLHelper.COL_DATE, calendar)); + try { + task.forceStatusUpdateInstants( + res.getTimestamp(TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE, calendar), + res.getTimestamp(TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE, calendar)); + } catch (IllegalArgumentException e) { + log.error("Unable to set status update timestamps on the coned task"); + } + } catch (SQLException e) { + throw new DataAccessException("Unable to getting info from ResultSet. " + e); + } + } + + @Override + public boolean setGroupTaskStatus(UUID groupTaskId, int newStatusId, Date timestamp) + throws DataAccessException { + + PreparedStatement prepStatement = null; + Connection dbConnection = getConnection(); + + try { + dbConnection.setAutoCommit(false); + } catch (SQLException e) { + log.error("Error setting autocommit to false! {}", e.getMessage()); + throw new DataAccessException("Error setting autocommit to false! " + e.getMessage(), e); + } + + ResultSet res = null; + boolean ret = false; + int oldStatusId = -1; + + try { + + try { + prepStatement = sqlHelper.getQueryGetGroupTasks(dbConnection, groupTaskId); + + log.debug("QUERY: {}", prepStatement); + // retrieves the tasks of this task group + res = prepStatement.executeQuery(); + + if (!res.first()) { + log.error("No tasks with GroupTaskId='{}'", groupTaskId); + throw new DataAccessException( + "No recall table row retrieved executing query: '" + prepStatement + "'"); + } + // verify if their stored status is equal for all + oldStatusId = res.getInt(TapeRecallMySQLHelper.COL_STATUS); + do { + int currentStatusId = res.getInt(TapeRecallMySQLHelper.COL_STATUS); + if (currentStatusId != oldStatusId) { + log.warn( + "The tasks with groupTaskId {} have different statuses: {} from task {} differs " + + "from expected {}", + groupTaskId, + currentStatusId, + res.getString(TapeRecallMySQLHelper.COL_TASK_ID), + oldStatusId); + break; + } + oldStatusId = currentStatusId; + } while (res.next()); + } catch (SQLException e) { + log.error("Unable to retrieve groupTaskId related tasks. SQLException: {}", e); + throw new DataAccessException("Unable to retrieve groupTaskId related tasks. "); + } + if (oldStatusId != newStatusId) { + // update the task status and if is a valid transition set the relative + // transition timestamp + if (!TapeRecallStatus.getRecallTaskStatus(oldStatusId).precedes(newStatusId)) { + log.warn( + "Requested the update of the status of a recall task group to status {} that is precedent " + + "to the recorded status performing the request the same...", + newStatusId, + oldStatusId); + } + String timestampColumn = null; + if (TapeRecallStatus.isFinalStatus(newStatusId)) { + timestampColumn = TapeRecallMySQLHelper.COL_FINAL_STATUS_DATE; + } else { + if (TapeRecallStatus.IN_PROGRESS.equals( + TapeRecallStatus.getRecallTaskStatus(newStatusId))) { + timestampColumn = TapeRecallMySQLHelper.COL_IN_PROGRESS_DATE; + } else { + log.warn( + "unable to determine the status update timestamp column to use given the new statusId '{}'", + newStatusId); + } + } + if (timestampColumn != null) { + try { + prepStatement = + sqlHelper.getQueryUpdateGroupTaskStatus( + dbConnection, groupTaskId, newStatusId, timestampColumn, timestamp); + } catch (IllegalArgumentException e) { + log.error( + "Unable to obtain the query to update task status and set status transition timestamp. IllegalArgumentException: {}", + e.getMessage()); + throw new DataAccessException( + "Unable to obtain the query to update task status and set status transition timestamp"); + } catch (SQLException e) { + throw new DataAccessException("Error executing query: " + prepStatement.toString(), e); + } + } else { + try { + prepStatement = + sqlHelper.getQuerySetGroupTaskStatus(dbConnection, groupTaskId, newStatusId); + } catch (SQLException e) { + throw new DataAccessException("Error executing query: " + prepStatement.toString(), e); + } + } + try { + if (prepStatement.executeUpdate() > 0) { + ret = true; + } + commit(dbConnection); + } catch (SQLException e) { + throw new DataAccessException("Error executing query: " + prepStatement.toString(), e); + } + } else { + log.warn( + "Skipping the status upadate operation, the status already stored is equal to the new one provided"); + } + } finally { + releaseConnection(res, prepStatement, dbConnection); + } + return ret; + } } diff --git a/src/main/java/it/grid/storm/persistence/model/GUID.java b/src/main/java/it/grid/storm/persistence/model/GUID.java index 08f59059..05016966 100644 --- a/src/main/java/it/grid/storm/persistence/model/GUID.java +++ b/src/main/java/it/grid/storm/persistence/model/GUID.java @@ -1,160 +1,156 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; import java.io.Serializable; - import java.net.InetAddress; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -//FIXME: Why isn't storm using the standard UUID class? +// FIXME: Why isn't storm using the standard UUID class? /** * GUID Value Object. - *

- * Used to retain/generate a GUID/UUID. + * + *

Used to retain/generate a GUID/UUID. + * *

*/ - public class GUID implements Serializable { - private static final long serialVersionUID = 7241176020077117264L; + private static final long serialVersionUID = 7241176020077117264L; - private static final Logger log = LoggerFactory.getLogger(GUID.class); + private static final Logger log = LoggerFactory.getLogger(GUID.class); - private byte guidValue[] = new byte[16]; + private byte guidValue[] = new byte[16]; - public GUID() { - buildNewGUID(); - } + public GUID() { + buildNewGUID(); + } - public GUID(String guidString) { + public GUID(String guidString) { - int pos = 0; - int count = 0; + int pos = 0; + int count = 0; - while (pos < guidString.length()) { - guidValue[count] = getByteValue(guidString.substring(pos, pos + 2)); - pos += 2; - count++; + while (pos < guidString.length()) { + guidValue[count] = getByteValue(guidString.substring(pos, pos + 2)); + pos += 2; + count++; - if (pos == guidString.length()) { - continue; - } - - if (guidString.charAt(pos) == '-') { - pos++; - } - } - } - - - private byte getByteValue(String hex) { - - return (byte) Integer.parseInt(hex, 16); - } - - private String getHexString(byte val) { - - String hexString; - if (val < 0) { - hexString = Integer.toHexString(val + 256); - } else { - hexString = Integer.toHexString(val); - } - - if (hexString.length() < 2) { - return "0" + hexString.toUpperCase(); - } - return hexString.toUpperCase(); - } - - private void setByteValues(byte[] lg, int startPos, int count) { - - for (int i = 0; i < count; i++) { - guidValue[i + startPos] = lg[i]; - } - } - - private void setByteValues(long lg, int startPos, int count) { - - for (int i = 0; i < count; i++) { - guidValue[i + startPos] = (byte) (lg & 0xFF); - lg = lg / 0xFF; - } - } - - private void buildNewGUID() { - - try { - // The time in milli seconds for six bytes - // gives us until the year 10000ish. - long lg = System.currentTimeMillis(); - setByteValues(lg, 0, 6); - - // The hash code for this object for two bytes (As a why not option?) - lg = this.hashCode(); - setByteValues(lg, 6, 2); - - // The ip address for this computer (as we cannot get to the MAC address) - InetAddress inet = InetAddress.getLocalHost(); - byte[] bytes = inet.getAddress(); - setByteValues(bytes, 8, 4); - - // A random number for two bytes - lg = (long) ((Math.random() * 0xFFFF)); - setByteValues(lg, 12, 2); - - // Another random number for two bytes - lg = (long) ((Math.random() * 0xFFFF)); - setByteValues(lg, 14, 2); - - } catch (Exception e) { - log.error("GUID generation error : {}", e.getMessage(), e); - } - } - - public byte[] getBytes() { - - return guidValue; - } - - /** - * Overrides toString(). Returns the array of bytes in the standard form: - * xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - * - * @return the string format - */ - @Override - public String toString() { - - StringBuilder buf = new StringBuilder(); - - buf.append(getHexString(guidValue[0])); - buf.append(getHexString(guidValue[1])); - buf.append(getHexString(guidValue[2])); - buf.append(getHexString(guidValue[3])); - buf.append('-'); - buf.append(getHexString(guidValue[4])); - buf.append(getHexString(guidValue[5])); - buf.append('-'); - buf.append(getHexString(guidValue[6])); - buf.append(getHexString(guidValue[7])); - buf.append('-'); - buf.append(getHexString(guidValue[8])); - buf.append(getHexString(guidValue[9])); - buf.append('-'); - buf.append(getHexString(guidValue[10])); - buf.append(getHexString(guidValue[11])); - buf.append(getHexString(guidValue[12])); - buf.append(getHexString(guidValue[13])); - buf.append(getHexString(guidValue[14])); - buf.append(getHexString(guidValue[15])); - - return buf.toString(); - } + if (pos == guidString.length()) { + continue; + } + + if (guidString.charAt(pos) == '-') { + pos++; + } + } + } + + private byte getByteValue(String hex) { + + return (byte) Integer.parseInt(hex, 16); + } + + private String getHexString(byte val) { + + String hexString; + if (val < 0) { + hexString = Integer.toHexString(val + 256); + } else { + hexString = Integer.toHexString(val); + } + + if (hexString.length() < 2) { + return "0" + hexString.toUpperCase(); + } + return hexString.toUpperCase(); + } + + private void setByteValues(byte[] lg, int startPos, int count) { + + for (int i = 0; i < count; i++) { + guidValue[i + startPos] = lg[i]; + } + } + + private void setByteValues(long lg, int startPos, int count) { + + for (int i = 0; i < count; i++) { + guidValue[i + startPos] = (byte) (lg & 0xFF); + lg = lg / 0xFF; + } + } + + private void buildNewGUID() { + + try { + // The time in milli seconds for six bytes + // gives us until the year 10000ish. + long lg = System.currentTimeMillis(); + setByteValues(lg, 0, 6); + + // The hash code for this object for two bytes (As a why not option?) + lg = this.hashCode(); + setByteValues(lg, 6, 2); + + // The ip address for this computer (as we cannot get to the MAC address) + InetAddress inet = InetAddress.getLocalHost(); + byte[] bytes = inet.getAddress(); + setByteValues(bytes, 8, 4); + + // A random number for two bytes + lg = (long) ((Math.random() * 0xFFFF)); + setByteValues(lg, 12, 2); + + // Another random number for two bytes + lg = (long) ((Math.random() * 0xFFFF)); + setByteValues(lg, 14, 2); + + } catch (Exception e) { + log.error("GUID generation error : {}", e.getMessage(), e); + } + } + + public byte[] getBytes() { + + return guidValue; + } + + /** + * Overrides toString(). Returns the array of bytes in the standard form: + * xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + * + * @return the string format + */ + @Override + public String toString() { + + StringBuilder buf = new StringBuilder(); + + buf.append(getHexString(guidValue[0])); + buf.append(getHexString(guidValue[1])); + buf.append(getHexString(guidValue[2])); + buf.append(getHexString(guidValue[3])); + buf.append('-'); + buf.append(getHexString(guidValue[4])); + buf.append(getHexString(guidValue[5])); + buf.append('-'); + buf.append(getHexString(guidValue[6])); + buf.append(getHexString(guidValue[7])); + buf.append('-'); + buf.append(getHexString(guidValue[8])); + buf.append(getHexString(guidValue[9])); + buf.append('-'); + buf.append(getHexString(guidValue[10])); + buf.append(getHexString(guidValue[11])); + buf.append(getHexString(guidValue[12])); + buf.append(getHexString(guidValue[13])); + buf.append(getHexString(guidValue[14])); + buf.append(getHexString(guidValue[15])); + + return buf.toString(); + } } diff --git a/src/main/java/it/grid/storm/persistence/model/InvalidPtGChunkDataAttributesException.java b/src/main/java/it/grid/storm/persistence/model/InvalidPtGChunkDataAttributesException.java index 836ec9c8..0cf531df 100644 --- a/src/main/java/it/grid/storm/persistence/model/InvalidPtGChunkDataAttributesException.java +++ b/src/main/java/it/grid/storm/persistence/model/InvalidPtGChunkDataAttributesException.java @@ -1,112 +1,114 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; +import it.grid.storm.common.types.TURLPrefix; +import it.grid.storm.srm.types.TDirOption; +import it.grid.storm.srm.types.TFileStorageType; +import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; -import it.grid.storm.srm.types.TStorageSystemInfo; -import it.grid.storm.srm.types.TLifeTimeInSeconds; -import it.grid.storm.srm.types.TFileStorageType; -import it.grid.storm.srm.types.TSpaceToken; -import it.grid.storm.srm.types.TDirOption; -import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.srm.types.TSizeInBytes; -import it.grid.storm.srm.types.TReturnStatus; +import it.grid.storm.srm.types.TSpaceToken; +import it.grid.storm.srm.types.TStorageSystemInfo; import it.grid.storm.srm.types.TTURL; /** - * This class represents an exceptin thrown when the attributes supplied to the - * constructor of PtGChunkData are invalid, that is if any of the following is - * _null_: requestToken, fromSURL, storageSystemInfo, lifeTime, fileStorageType, - * spaceToken, numOfLevels, TURLPrefix transferProtocols, fileSize, status, - * estimatedWaitTimeOnQueue, estimatedProcessingTime, transferURL, - * remainingPinTime. - * + * This class represents an exceptin thrown when the attributes supplied to the constructor of + * PtGChunkData are invalid, that is if any of the following is _null_: requestToken, fromSURL, + * storageSystemInfo, lifeTime, fileStorageType, spaceToken, numOfLevels, TURLPrefix + * transferProtocols, fileSize, status, estimatedWaitTimeOnQueue, estimatedProcessingTime, + * transferURL, remainingPinTime. + * * @author EGRID - ICTP Trieste * @date March 23rd, 2005 * @version 2.0 */ public class InvalidPtGChunkDataAttributesException extends Exception { - // booleans that indicate whether the corresponding variable is null - private boolean nullRequestToken; - private boolean nullFromSURL; - private boolean nullStorageSystemInfo; - private boolean nullLifeTime; - private boolean nullFileStorageType; - private boolean nullSpaceToken; - private boolean nullDirOption; - private boolean nullTransferProtocols; - private boolean nullFileSize; - private boolean nullStatus; - private boolean nullEstimatedWaitTimeOnQueue; - private boolean nullEstimatedProcessingTime; - private boolean nullTransferURL; - private boolean nullRemainingPinTime; + // booleans that indicate whether the corresponding variable is null + private boolean nullRequestToken; + private boolean nullFromSURL; + private boolean nullStorageSystemInfo; + private boolean nullLifeTime; + private boolean nullFileStorageType; + private boolean nullSpaceToken; + private boolean nullDirOption; + private boolean nullTransferProtocols; + private boolean nullFileSize; + private boolean nullStatus; + private boolean nullEstimatedWaitTimeOnQueue; + private boolean nullEstimatedProcessingTime; + private boolean nullTransferURL; + private boolean nullRemainingPinTime; - /** - * Constructor that requires the attributes that caused the exception to be - * thrown. - */ - public InvalidPtGChunkDataAttributesException(TRequestToken requestToken, - TSURL fromSURL, TStorageSystemInfo storageSystemInfo, - TLifeTimeInSeconds lifeTime, TFileStorageType fileStorageType, - TSpaceToken spaceToken, TDirOption dirOption, TURLPrefix transferProtocols, - TSizeInBytes fileSize, TReturnStatus status, - TLifeTimeInSeconds estimatedWaitTimeOnQueue, - TLifeTimeInSeconds estimatedProcessingTime, TTURL transferURL, - TLifeTimeInSeconds remainingPinTime) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidPtGChunkDataAttributesException( + TRequestToken requestToken, + TSURL fromSURL, + TStorageSystemInfo storageSystemInfo, + TLifeTimeInSeconds lifeTime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TLifeTimeInSeconds estimatedWaitTimeOnQueue, + TLifeTimeInSeconds estimatedProcessingTime, + TTURL transferURL, + TLifeTimeInSeconds remainingPinTime) { - nullRequestToken = requestToken == null; - nullFromSURL = fromSURL == null; - nullStorageSystemInfo = storageSystemInfo == null; - nullLifeTime = lifeTime == null; - nullFileStorageType = fileStorageType == null; - nullSpaceToken = spaceToken == null; - nullDirOption = dirOption == null; - nullTransferProtocols = transferProtocols == null; - nullFileSize = fileSize == null; - nullStatus = status == null; - nullEstimatedWaitTimeOnQueue = estimatedWaitTimeOnQueue == null; - nullEstimatedProcessingTime = estimatedProcessingTime == null; - nullTransferURL = transferURL == null; - nullRemainingPinTime = remainingPinTime == null; - } + nullRequestToken = requestToken == null; + nullFromSURL = fromSURL == null; + nullStorageSystemInfo = storageSystemInfo == null; + nullLifeTime = lifeTime == null; + nullFileStorageType = fileStorageType == null; + nullSpaceToken = spaceToken == null; + nullDirOption = dirOption == null; + nullTransferProtocols = transferProtocols == null; + nullFileSize = fileSize == null; + nullStatus = status == null; + nullEstimatedWaitTimeOnQueue = estimatedWaitTimeOnQueue == null; + nullEstimatedProcessingTime = estimatedProcessingTime == null; + nullTransferURL = transferURL == null; + nullRemainingPinTime = remainingPinTime == null; + } - public String toString() { + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Invalid PtGChunkData attributes: null-requestToken="); - sb.append(nullRequestToken); - sb.append("; nul-fromSURL="); - sb.append(nullFromSURL); - sb.append("; null-storageSystemInfo="); - sb.append(nullStorageSystemInfo); - sb.append("; null-lifeTime="); - sb.append(nullLifeTime); - sb.append("; null-filestorageType="); - sb.append(nullFileStorageType); - sb.append("; null-spaceToken="); - sb.append(nullSpaceToken); - sb.append("; null-dirOption="); - sb.append(nullDirOption); - sb.append("; null-transferProtocols="); - sb.append(nullTransferProtocols); - sb.append("; null-fileSize="); - sb.append(nullFileSize); - sb.append("; null-status="); - sb.append(nullStatus); - sb.append("; null-estimatedWaitTimeOnQueue="); - sb.append(nullEstimatedWaitTimeOnQueue); - sb.append("; null-estimatedProcessingTime="); - sb.append(nullEstimatedProcessingTime); - sb.append("; null-transferURL="); - sb.append(nullTransferURL); - sb.append("; null-remainingPinTime="); - sb.append(nullRemainingPinTime); - sb.append("."); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("Invalid PtGChunkData attributes: null-requestToken="); + sb.append(nullRequestToken); + sb.append("; nul-fromSURL="); + sb.append(nullFromSURL); + sb.append("; null-storageSystemInfo="); + sb.append(nullStorageSystemInfo); + sb.append("; null-lifeTime="); + sb.append(nullLifeTime); + sb.append("; null-filestorageType="); + sb.append(nullFileStorageType); + sb.append("; null-spaceToken="); + sb.append(nullSpaceToken); + sb.append("; null-dirOption="); + sb.append(nullDirOption); + sb.append("; null-transferProtocols="); + sb.append(nullTransferProtocols); + sb.append("; null-fileSize="); + sb.append(nullFileSize); + sb.append("; null-status="); + sb.append(nullStatus); + sb.append("; null-estimatedWaitTimeOnQueue="); + sb.append(nullEstimatedWaitTimeOnQueue); + sb.append("; null-estimatedProcessingTime="); + sb.append(nullEstimatedProcessingTime); + sb.append("; null-transferURL="); + sb.append(nullTransferURL); + sb.append("; null-remainingPinTime="); + sb.append(nullRemainingPinTime); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/persistence/model/InvalidRequestSummaryDataAttributesException.java b/src/main/java/it/grid/storm/persistence/model/InvalidRequestSummaryDataAttributesException.java index 9bbe606d..e41bc4d6 100644 --- a/src/main/java/it/grid/storm/persistence/model/InvalidRequestSummaryDataAttributesException.java +++ b/src/main/java/it/grid/storm/persistence/model/InvalidRequestSummaryDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; @@ -11,58 +10,58 @@ * This class represents an Exception thrown when a RequestSummaryData object is created with any * invalid attributes: null TRequestToken, null TRequestType, totalFilesInThisRequest<0, * numOfQueuedRequests<0, numOfProgessingRequests<0, numFinished<0. - * + * * @author EGRID - ICTP Trieste * @date March 18th, 2005 * @version 2.0 */ public class InvalidRequestSummaryDataAttributesException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1L; + /** */ + private static final long serialVersionUID = 1L; - private final boolean nullRequestToken; - private final boolean nullRequestType; - private final boolean negTotalFilesInThisRequest; - private final boolean negNumOfQueuedRequests; - private final boolean negNumOfProgressingRequests; - private final boolean negNumFinished; + private final boolean nullRequestToken; + private final boolean nullRequestType; + private final boolean negTotalFilesInThisRequest; + private final boolean negNumOfQueuedRequests; + private final boolean negNumOfProgressingRequests; + private final boolean negNumFinished; - /** - * Constructor that requires the attributes that caused the exception to be thrown. - */ - public InvalidRequestSummaryDataAttributesException(TRequestToken requestToken, - TRequestType requestType, int totalFilesInThisRequest, int numOfQueuedRequests, - int numOfProgressingRequests, int numFinished) { + /** Constructor that requires the attributes that caused the exception to be thrown. */ + public InvalidRequestSummaryDataAttributesException( + TRequestToken requestToken, + TRequestType requestType, + int totalFilesInThisRequest, + int numOfQueuedRequests, + int numOfProgressingRequests, + int numFinished) { - nullRequestToken = (requestToken == null); - nullRequestType = (requestType == null); - negTotalFilesInThisRequest = (totalFilesInThisRequest < 0); - negNumOfQueuedRequests = (numOfQueuedRequests < 0); - negNumOfProgressingRequests = (numOfProgressingRequests < 0); - negNumFinished = (numFinished < 0); - } + nullRequestToken = (requestToken == null); + nullRequestType = (requestType == null); + negTotalFilesInThisRequest = (totalFilesInThisRequest < 0); + negNumOfQueuedRequests = (numOfQueuedRequests < 0); + negNumOfProgressingRequests = (numOfProgressingRequests < 0); + negNumFinished = (numFinished < 0); + } - @Override - public String toString() { + @Override + public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Invalid RequestSummaryData attributes exception: "); - sb.append("null-requestToken="); - sb.append(nullRequestToken); - sb.append("; null-requestType="); - sb.append(nullRequestType); - sb.append("; negative-totalFilesInThisRequest="); - sb.append(negTotalFilesInThisRequest); - sb.append("; negative-numOfQueuedRequests="); - sb.append(negNumOfQueuedRequests); - sb.append("; negative-numOfProgressingRequests="); - sb.append(negNumOfProgressingRequests); - sb.append("; negative-numFinished="); - sb.append(negNumFinished); - sb.append("."); - return sb.toString(); - } + StringBuilder sb = new StringBuilder(); + sb.append("Invalid RequestSummaryData attributes exception: "); + sb.append("null-requestToken="); + sb.append(nullRequestToken); + sb.append("; null-requestType="); + sb.append(nullRequestType); + sb.append("; negative-totalFilesInThisRequest="); + sb.append(negTotalFilesInThisRequest); + sb.append("; negative-numOfQueuedRequests="); + sb.append(negNumOfQueuedRequests); + sb.append("; negative-numOfProgressingRequests="); + sb.append(negNumOfProgressingRequests); + sb.append("; negative-numFinished="); + sb.append(negNumFinished); + sb.append("."); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/persistence/model/PtGChunkTO.java b/src/main/java/it/grid/storm/persistence/model/PtGChunkTO.java index 66efefe3..0df92de4 100644 --- a/src/main/java/it/grid/storm/persistence/model/PtGChunkTO.java +++ b/src/main/java/it/grid/storm/persistence/model/PtGChunkTO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; @@ -16,480 +15,471 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TStorageSystemInfo; import it.grid.storm.srm.types.TTURL; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a PrepareToGetChunkData, that is part of a multifile - * PrepareToGet srm request. It contains data about: the requestToken, the - * fromSURL and the storageSystemInfo for that SURL, the requested lifeTime of - * pinning, the requested fileStorageType and any available spaceToken, the - * TDirOption which explains whether the requested SURL is a directory and if it - * must be recursed at all levels, as well as the desired number of levels to - * recurse, the desired transferProtocols in order of preference, the fileSize, - * the estimatedTimeOnQueue, the estimatedProcessingTime, the transferURL for - * the supplied SURL, and the remainingPinTime. - * + * This class represents a PrepareToGetChunkData, that is part of a multifile PrepareToGet srm + * request. It contains data about: the requestToken, the fromSURL and the storageSystemInfo for + * that SURL, the requested lifeTime of pinning, the requested fileStorageType and any available + * spaceToken, the TDirOption which explains whether the requested SURL is a directory and if it + * must be recursed at all levels, as well as the desired number of levels to recurse, the desired + * transferProtocols in order of preference, the fileSize, the estimatedTimeOnQueue, the + * estimatedProcessingTime, the transferURL for the supplied SURL, and the remainingPinTime. + * * @author EGRID - ICTP Trieste * @date March 21st, 2005 * @version 2.0 */ public class PtGChunkTO { - private static final Logger log = LoggerFactory.getLogger(PtGChunkTO.class); - - private TRequestToken requestToken; - - private TSURL fromSURL; - private TStorageSystemInfo storageSystemInfo; - - private TLifeTimeInSeconds lifeTime; // requested lifetime for fromSURL - - // BEWARE!!! It is the pin time!!! - private TFileStorageType fileStorageType; // TFileStorageType requested for - // specific fromSURL to get - private TSpaceToken spaceToken; // SpaceToken to use for fromSURL - private TDirOption dirOption; // specifies if the request regards a directory - // and related info - - private TURLPrefix transferProtocols; // list of desired transport protocols - // for fromSURL - - private TSizeInBytes fileSize; // size of file - private TReturnStatus status; // return status for this chunk of request - private TLifeTimeInSeconds estimatedWaitTimeOnQueue; // estimated time this - // chunk will remain in - // queue - private TLifeTimeInSeconds estimatedProcessingTime; // estimated time this - // chunk will take to be - // processed - private TTURL transferURL; // TURL for picking up the requested file - private TLifeTimeInSeconds remainingPinTime; // estimated time remaining for - // Pin validity - - public PtGChunkTO(TRequestToken requestToken, TSURL fromSURL, - TStorageSystemInfo storageSystemInfo, TLifeTimeInSeconds lifeTime, - TFileStorageType fileStorageType, TSpaceToken spaceToken, - TDirOption dirOption, TURLPrefix transferProtocols, TSizeInBytes fileSize, - TReturnStatus status, TLifeTimeInSeconds estimatedWaitTimeOnQueue, - TLifeTimeInSeconds estimatedProcessingTime, TTURL transferURL, - TLifeTimeInSeconds remainingPinTime) - throws InvalidPtGChunkDataAttributesException { - - boolean ok = requestToken != null && fromSURL != null - && storageSystemInfo != null && lifeTime != null - && fileStorageType != null && spaceToken != null && dirOption != null - && transferProtocols != null && fileSize != null && status != null - && estimatedWaitTimeOnQueue != null && estimatedProcessingTime != null - && transferURL != null && remainingPinTime != null; - - if (!ok) { - throw new InvalidPtGChunkDataAttributesException(requestToken, fromSURL, - storageSystemInfo, lifeTime, fileStorageType, spaceToken, dirOption, - transferProtocols, fileSize, status, estimatedWaitTimeOnQueue, - estimatedProcessingTime, transferURL, remainingPinTime); - } - this.requestToken = requestToken; - this.fromSURL = fromSURL; - this.storageSystemInfo = storageSystemInfo; - this.lifeTime = lifeTime; - this.fileStorageType = fileStorageType; - this.spaceToken = spaceToken; - this.dirOption = dirOption; - this.transferProtocols = transferProtocols; - this.fileSize = fileSize; - this.status = status; - this.estimatedWaitTimeOnQueue = estimatedWaitTimeOnQueue; - this.estimatedProcessingTime = estimatedProcessingTime; - this.transferURL = transferURL; - } - - /** - * Method that returns the requestToken of the srm request to which this chunk - * belongs. - */ - public TRequestToken requestToken() { - - return requestToken; - } - - /** - * Method that returns the fromSURL of the srm request to which this chunk - * belongs. - */ - public TSURL fromSURL() { - - return fromSURL; - } - - /** - * Method that returns the storageSystemInfo of the srm request to which this - * chunk belongs - */ - public TStorageSystemInfo storageSystemInfo() { - - return storageSystemInfo; - } - - /** - * Method that returns the requested pin life time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds lifeTime() { - - return lifeTime; - } - - /** - * Method that returns the filerequested pin life time for this chunk of the - * srm request. - */ - public TFileStorageType fileStorageType() { - - return fileStorageType; - } - - /** - * Method that returns the space token supplied for this chunk of the srm - * request. - */ - public TSpaceToken spaceToken() { - - return spaceToken; - } - - /** - * Method that returns the dirOption specified in the srm request. - */ - public TDirOption dirOption() { - - return dirOption; - } - - /** - * Method that returns a TURLPrefix containing the transfer protocols desired - * for this chunk of the srm request. - */ - public TURLPrefix transferProtocols() { - - return transferProtocols; - } - - /** - * Method that returns the status for this chunk of the srm request. - */ - public TReturnStatus status() { - - return status; - } - - /** - * Method that returns the file size for this chunk of the srm request. - */ - public TSizeInBytes fileSize() { - - return fileSize; - } - - /** - * Method that returns the estimated time in queue for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds estimatedWaitTimeOnQueue() { - - return estimatedWaitTimeOnQueue; - } - - /** - * Method that returns the estimated processing time for this chunk of the srm - * request. - */ - public TLifeTimeInSeconds estimatedProcessingTime() { - - return estimatedProcessingTime; - } - - /** - * Method that returns the TURL for this chunk of the srm request. - */ - public TTURL transferURL() { - - return transferURL; - } - - /** - * Method that returns the estimated remaining pin time for this chunk of the - * srm request. - */ - public TLifeTimeInSeconds remainingPinTime() { - - return remainingPinTime; - } - - /** - * Method that sets the status of this request to SRM_REQUEST_QUEUED; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_REQUEST_QUEUED(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_REQUEST_QUEUED, explanation); - } - - /** - * Method that sets the status of this request to SRM_DONE; it needs the - * explanation String which describes the situation in greater detail; if a - * null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_DONE(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_DONE, explanation); - } - - /** - * Method that sets the status of this request to SRM_INVALID_REQUEST; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_INVALID_REQUEST(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, explanation); - } - - /** - * Method that sets the status of this request to SRM_AUTHORIZATION_FAILURE; - * it needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_AUTHORIZATION_FAILURE(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - explanation); - } - - /** - * Method that sets the status of this request to SRM_ABORTED; it needs the - * explanation String which describes the situation in greater detail; if a - * null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_ABORTED(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_ABORTED, explanation); - } - - /** - * Method that sets the status of this request to SRM_REQUEST_INPROGRESS; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_REQUEST_INPROGRESS(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_REQUEST_INPROGRESS, explanation); - } - - /** - * Method that sets the status of this request to SRM_INTERNAL_ERROR; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_INTERNAL_ERROR(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_INTERNAL_ERROR, explanation); - } - - /** - * Method that sets the status of this request to SRM_FATAL_INTERNAL_ERROR; it - * needs the explanation String which describes the situation in greater - * detail; if a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_FATAL_INTERNAL_ERROR(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_FATAL_INTERNAL_ERROR, - explanation); - } - - /** - * Method that sets the status of this request to SRM_INVALID_PATH; it needs - * the explanation String which describes the situation in greater detail; if - * a null is passed, then an empty String is used as explanation. - */ - public void changeStatusSRM_INVALID_PATH(String explanation) { - - if (explanation == null) { - explanation = ""; - } - status = new TReturnStatus(TStatusCode.SRM_INVALID_PATH, explanation); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("PtGChunkData\n"); - sb.append("RequestToken="); - sb.append(requestToken); - sb.append("; "); - sb.append("fromSURL="); - sb.append(fromSURL); - sb.append("; "); - sb.append("storageSystemInfo="); - sb.append(storageSystemInfo); - sb.append("; "); - sb.append("lifeTime="); - sb.append(lifeTime); - sb.append("; "); - sb.append("fileStorageType="); - sb.append(fileStorageType); - sb.append("; "); - sb.append("spaceToken"); - sb.append(spaceToken); - sb.append("; "); - sb.append("dirOption="); - sb.append(dirOption); - sb.append("; "); - sb.append("transferProtocols="); - sb.append(transferProtocols); - sb.append("; "); - sb.append("fileSize="); - sb.append(fileSize); - sb.append("; "); - sb.append("status="); - sb.append(status); - sb.append("; "); - sb.append("estimatedWaitTimeOnQueue="); - sb.append(estimatedWaitTimeOnQueue); - sb.append("; "); - sb.append("estimatedProcessingTime="); - sb.append(estimatedProcessingTime); - sb.append("; "); - sb.append("transferURL="); - sb.append(transferURL); - sb.append("; "); - sb.append("remainingPinTime="); - sb.append(remainingPinTime); - sb.append("."); - return sb.toString(); - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + requestToken.hashCode(); - hash = 37 * hash + fromSURL.hashCode(); - hash = 37 * hash + storageSystemInfo.hashCode(); - hash = 37 * hash + lifeTime.hashCode(); - hash = 37 * hash + fileStorageType.hashCode(); - hash = 37 * hash + spaceToken.hashCode(); - hash = 37 * hash + dirOption.hashCode(); - hash = 37 * hash + transferProtocols.hashCode(); - hash = 37 * hash + fileSize.hashCode(); - hash = 37 * hash + status.hashCode(); - hash = 37 * hash + estimatedWaitTimeOnQueue.hashCode(); - hash = 37 * hash + estimatedProcessingTime.hashCode(); - hash = 37 * hash + transferURL.hashCode(); - hash = 37 * hash + remainingPinTime.hashCode(); - return hash; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof PtGChunkTO)) { - return false; - } - PtGChunkTO cd = (PtGChunkTO) o; - return requestToken.equals(cd.requestToken) && fromSURL.equals(cd.fromSURL) - && storageSystemInfo.equals(cd.storageSystemInfo) - && lifeTime.equals(cd.lifeTime) - && fileStorageType.equals(cd.fileStorageType) - && spaceToken.equals(cd.spaceToken) && dirOption.equals(cd.dirOption) - && transferProtocols.equals(cd.transferProtocols) - && fileSize.equals(cd.fileSize) && status.equals(cd.status) - && estimatedWaitTimeOnQueue.equals(cd.estimatedWaitTimeOnQueue) - && estimatedProcessingTime.equals(cd.estimatedProcessingTime) - && transferURL.equals(cd.transferURL) - && remainingPinTime.equals(cd.remainingPinTime); - } - - /** - * Method used to set the size of the file corresponding to the requested - * SURL. If the supplied TSizeInByte is null, the nothing gets set! - */ - public TSizeInBytes setFileSize(final TSizeInBytes size) { - - if (size != null) { - fileSize = size; - } - return null; - }; - - /** - * Method used to set the estimated time that the chunk will spend on the - * queue. If the supplied TLifeTimeInSeconds is null, then nothing gets set! - */ - public void setEstimatedWaitTimeOnQueue(final TLifeTimeInSeconds time) { - - if (time != null) { - estimatedWaitTimeOnQueue = time; - } - }; - - /** - * Method used to set the estimated time the processing will take. If the - * supplied TLifeTimeInSeconds is null, then nothing gets set! - */ - public void setEstimatedProcessingTime(final TLifeTimeInSeconds time) { - - if (time != null) { - estimatedProcessingTime = time; - } - }; - - /** - * Method used to set the transferURL associated to the SURL of this chunk. If - * TTURL is null, then nothing gets set! - */ - public void setTransferURL(final TTURL turl) { - - if (turl != null) { - transferURL = turl; - } - }; - - /** - * Method used in the mechanism for suspending and resuming a request. To be - * implemented! For now it always returns 0. - */ - public int getProgressCounter() { - - return 0; - }; + private static final Logger log = LoggerFactory.getLogger(PtGChunkTO.class); + + private TRequestToken requestToken; + + private TSURL fromSURL; + private TStorageSystemInfo storageSystemInfo; + + private TLifeTimeInSeconds lifeTime; // requested lifetime for fromSURL - + // BEWARE!!! It is the pin time!!! + private TFileStorageType fileStorageType; // TFileStorageType requested for + // specific fromSURL to get + private TSpaceToken spaceToken; // SpaceToken to use for fromSURL + private TDirOption dirOption; // specifies if the request regards a directory + // and related info + + private TURLPrefix transferProtocols; // list of desired transport protocols + // for fromSURL + + private TSizeInBytes fileSize; // size of file + private TReturnStatus status; // return status for this chunk of request + private TLifeTimeInSeconds estimatedWaitTimeOnQueue; // estimated time this + // chunk will remain in + // queue + private TLifeTimeInSeconds estimatedProcessingTime; // estimated time this + // chunk will take to be + // processed + private TTURL transferURL; // TURL for picking up the requested file + private TLifeTimeInSeconds remainingPinTime; // estimated time remaining for + // Pin validity + + public PtGChunkTO( + TRequestToken requestToken, + TSURL fromSURL, + TStorageSystemInfo storageSystemInfo, + TLifeTimeInSeconds lifeTime, + TFileStorageType fileStorageType, + TSpaceToken spaceToken, + TDirOption dirOption, + TURLPrefix transferProtocols, + TSizeInBytes fileSize, + TReturnStatus status, + TLifeTimeInSeconds estimatedWaitTimeOnQueue, + TLifeTimeInSeconds estimatedProcessingTime, + TTURL transferURL, + TLifeTimeInSeconds remainingPinTime) + throws InvalidPtGChunkDataAttributesException { + + boolean ok = + requestToken != null + && fromSURL != null + && storageSystemInfo != null + && lifeTime != null + && fileStorageType != null + && spaceToken != null + && dirOption != null + && transferProtocols != null + && fileSize != null + && status != null + && estimatedWaitTimeOnQueue != null + && estimatedProcessingTime != null + && transferURL != null + && remainingPinTime != null; + + if (!ok) { + throw new InvalidPtGChunkDataAttributesException( + requestToken, + fromSURL, + storageSystemInfo, + lifeTime, + fileStorageType, + spaceToken, + dirOption, + transferProtocols, + fileSize, + status, + estimatedWaitTimeOnQueue, + estimatedProcessingTime, + transferURL, + remainingPinTime); + } + this.requestToken = requestToken; + this.fromSURL = fromSURL; + this.storageSystemInfo = storageSystemInfo; + this.lifeTime = lifeTime; + this.fileStorageType = fileStorageType; + this.spaceToken = spaceToken; + this.dirOption = dirOption; + this.transferProtocols = transferProtocols; + this.fileSize = fileSize; + this.status = status; + this.estimatedWaitTimeOnQueue = estimatedWaitTimeOnQueue; + this.estimatedProcessingTime = estimatedProcessingTime; + this.transferURL = transferURL; + } + + /** Method that returns the requestToken of the srm request to which this chunk belongs. */ + public TRequestToken requestToken() { + + return requestToken; + } + + /** Method that returns the fromSURL of the srm request to which this chunk belongs. */ + public TSURL fromSURL() { + + return fromSURL; + } + + /** Method that returns the storageSystemInfo of the srm request to which this chunk belongs */ + public TStorageSystemInfo storageSystemInfo() { + + return storageSystemInfo; + } + + /** Method that returns the requested pin life time for this chunk of the srm request. */ + public TLifeTimeInSeconds lifeTime() { + + return lifeTime; + } + + /** Method that returns the filerequested pin life time for this chunk of the srm request. */ + public TFileStorageType fileStorageType() { + + return fileStorageType; + } + + /** Method that returns the space token supplied for this chunk of the srm request. */ + public TSpaceToken spaceToken() { + + return spaceToken; + } + + /** Method that returns the dirOption specified in the srm request. */ + public TDirOption dirOption() { + + return dirOption; + } + + /** + * Method that returns a TURLPrefix containing the transfer protocols desired for this chunk of + * the srm request. + */ + public TURLPrefix transferProtocols() { + + return transferProtocols; + } + + /** Method that returns the status for this chunk of the srm request. */ + public TReturnStatus status() { + + return status; + } + + /** Method that returns the file size for this chunk of the srm request. */ + public TSizeInBytes fileSize() { + + return fileSize; + } + + /** Method that returns the estimated time in queue for this chunk of the srm request. */ + public TLifeTimeInSeconds estimatedWaitTimeOnQueue() { + + return estimatedWaitTimeOnQueue; + } + + /** Method that returns the estimated processing time for this chunk of the srm request. */ + public TLifeTimeInSeconds estimatedProcessingTime() { + + return estimatedProcessingTime; + } + + /** Method that returns the TURL for this chunk of the srm request. */ + public TTURL transferURL() { + + return transferURL; + } + + /** Method that returns the estimated remaining pin time for this chunk of the srm request. */ + public TLifeTimeInSeconds remainingPinTime() { + + return remainingPinTime; + } + + /** + * Method that sets the status of this request to SRM_REQUEST_QUEUED; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_REQUEST_QUEUED(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_REQUEST_QUEUED, explanation); + } + + /** + * Method that sets the status of this request to SRM_DONE; it needs the explanation String which + * describes the situation in greater detail; if a null is passed, then an empty String is used as + * explanation. + */ + public void changeStatusSRM_DONE(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_DONE, explanation); + } + + /** + * Method that sets the status of this request to SRM_INVALID_REQUEST; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_INVALID_REQUEST(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, explanation); + } + + /** + * Method that sets the status of this request to SRM_AUTHORIZATION_FAILURE; it needs the + * explanation String which describes the situation in greater detail; if a null is passed, then + * an empty String is used as explanation. + */ + public void changeStatusSRM_AUTHORIZATION_FAILURE(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, explanation); + } + + /** + * Method that sets the status of this request to SRM_ABORTED; it needs the explanation String + * which describes the situation in greater detail; if a null is passed, then an empty String is + * used as explanation. + */ + public void changeStatusSRM_ABORTED(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_ABORTED, explanation); + } + + /** + * Method that sets the status of this request to SRM_REQUEST_INPROGRESS; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_REQUEST_INPROGRESS(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_REQUEST_INPROGRESS, explanation); + } + + /** + * Method that sets the status of this request to SRM_INTERNAL_ERROR; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_INTERNAL_ERROR(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_INTERNAL_ERROR, explanation); + } + + /** + * Method that sets the status of this request to SRM_FATAL_INTERNAL_ERROR; it needs the + * explanation String which describes the situation in greater detail; if a null is passed, then + * an empty String is used as explanation. + */ + public void changeStatusSRM_FATAL_INTERNAL_ERROR(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_FATAL_INTERNAL_ERROR, explanation); + } + + /** + * Method that sets the status of this request to SRM_INVALID_PATH; it needs the explanation + * String which describes the situation in greater detail; if a null is passed, then an empty + * String is used as explanation. + */ + public void changeStatusSRM_INVALID_PATH(String explanation) { + + if (explanation == null) { + explanation = ""; + } + status = new TReturnStatus(TStatusCode.SRM_INVALID_PATH, explanation); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("PtGChunkData\n"); + sb.append("RequestToken="); + sb.append(requestToken); + sb.append("; "); + sb.append("fromSURL="); + sb.append(fromSURL); + sb.append("; "); + sb.append("storageSystemInfo="); + sb.append(storageSystemInfo); + sb.append("; "); + sb.append("lifeTime="); + sb.append(lifeTime); + sb.append("; "); + sb.append("fileStorageType="); + sb.append(fileStorageType); + sb.append("; "); + sb.append("spaceToken"); + sb.append(spaceToken); + sb.append("; "); + sb.append("dirOption="); + sb.append(dirOption); + sb.append("; "); + sb.append("transferProtocols="); + sb.append(transferProtocols); + sb.append("; "); + sb.append("fileSize="); + sb.append(fileSize); + sb.append("; "); + sb.append("status="); + sb.append(status); + sb.append("; "); + sb.append("estimatedWaitTimeOnQueue="); + sb.append(estimatedWaitTimeOnQueue); + sb.append("; "); + sb.append("estimatedProcessingTime="); + sb.append(estimatedProcessingTime); + sb.append("; "); + sb.append("transferURL="); + sb.append(transferURL); + sb.append("; "); + sb.append("remainingPinTime="); + sb.append(remainingPinTime); + sb.append("."); + return sb.toString(); + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + requestToken.hashCode(); + hash = 37 * hash + fromSURL.hashCode(); + hash = 37 * hash + storageSystemInfo.hashCode(); + hash = 37 * hash + lifeTime.hashCode(); + hash = 37 * hash + fileStorageType.hashCode(); + hash = 37 * hash + spaceToken.hashCode(); + hash = 37 * hash + dirOption.hashCode(); + hash = 37 * hash + transferProtocols.hashCode(); + hash = 37 * hash + fileSize.hashCode(); + hash = 37 * hash + status.hashCode(); + hash = 37 * hash + estimatedWaitTimeOnQueue.hashCode(); + hash = 37 * hash + estimatedProcessingTime.hashCode(); + hash = 37 * hash + transferURL.hashCode(); + hash = 37 * hash + remainingPinTime.hashCode(); + return hash; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof PtGChunkTO)) { + return false; + } + PtGChunkTO cd = (PtGChunkTO) o; + return requestToken.equals(cd.requestToken) + && fromSURL.equals(cd.fromSURL) + && storageSystemInfo.equals(cd.storageSystemInfo) + && lifeTime.equals(cd.lifeTime) + && fileStorageType.equals(cd.fileStorageType) + && spaceToken.equals(cd.spaceToken) + && dirOption.equals(cd.dirOption) + && transferProtocols.equals(cd.transferProtocols) + && fileSize.equals(cd.fileSize) + && status.equals(cd.status) + && estimatedWaitTimeOnQueue.equals(cd.estimatedWaitTimeOnQueue) + && estimatedProcessingTime.equals(cd.estimatedProcessingTime) + && transferURL.equals(cd.transferURL) + && remainingPinTime.equals(cd.remainingPinTime); + } + + /** + * Method used to set the size of the file corresponding to the requested SURL. If the supplied + * TSizeInByte is null, the nothing gets set! + */ + public TSizeInBytes setFileSize(final TSizeInBytes size) { + + if (size != null) { + fileSize = size; + } + return null; + }; + + /** + * Method used to set the estimated time that the chunk will spend on the queue. If the supplied + * TLifeTimeInSeconds is null, then nothing gets set! + */ + public void setEstimatedWaitTimeOnQueue(final TLifeTimeInSeconds time) { + + if (time != null) { + estimatedWaitTimeOnQueue = time; + } + }; + + /** + * Method used to set the estimated time the processing will take. If the supplied + * TLifeTimeInSeconds is null, then nothing gets set! + */ + public void setEstimatedProcessingTime(final TLifeTimeInSeconds time) { + + if (time != null) { + estimatedProcessingTime = time; + } + }; + + /** + * Method used to set the transferURL associated to the SURL of this chunk. If TTURL is null, then + * nothing gets set! + */ + public void setTransferURL(final TTURL turl) { + + if (turl != null) { + transferURL = turl; + } + }; + + /** + * Method used in the mechanism for suspending and resuming a request. To be implemented! For now + * it always returns 0. + */ + public int getProgressCounter() { + + return 0; + }; } diff --git a/src/main/java/it/grid/storm/persistence/model/PtPChunkTO.java b/src/main/java/it/grid/storm/persistence/model/PtPChunkTO.java index d3347c92..101a3ab2 100644 --- a/src/main/java/it/grid/storm/persistence/model/PtPChunkTO.java +++ b/src/main/java/it/grid/storm/persistence/model/PtPChunkTO.java @@ -1,10 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; - -public class PtPChunkTO { - -} +public class PtPChunkTO {} diff --git a/src/main/java/it/grid/storm/persistence/model/RecallTaskType.java b/src/main/java/it/grid/storm/persistence/model/RecallTaskType.java index 12e092cf..eb3b9123 100644 --- a/src/main/java/it/grid/storm/persistence/model/RecallTaskType.java +++ b/src/main/java/it/grid/storm/persistence/model/RecallTaskType.java @@ -1,6 +1,4 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; - diff --git a/src/main/java/it/grid/storm/persistence/model/RequestSummaryTO.java b/src/main/java/it/grid/storm/persistence/model/RequestSummaryTO.java index 94c2b0a6..acb8c19e 100644 --- a/src/main/java/it/grid/storm/persistence/model/RequestSummaryTO.java +++ b/src/main/java/it/grid/storm/persistence/model/RequestSummaryTO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; @@ -8,245 +7,222 @@ import it.grid.storm.srm.types.TRequestType; /** - * This class represents the SummaryData associated with the SRM request, that - * is it contains info about: TRequestToken, TRequsetType, total files in this - * request, number of files in queue, number of files progressing, number of - * files finished, and whether the request is currently suspended. - * + * This class represents the SummaryData associated with the SRM request, that is it contains info + * about: TRequestToken, TRequsetType, total files in this request, number of files in queue, number + * of files progressing, number of files finished, and whether the request is currently suspended. + * * @author EGRID - ICTP Trieste * @date March 18th, 2005 * @version 3.0 */ public class RequestSummaryTO { - private TRequestToken requestToken = null; // TRequestToken of SRM request - private TRequestType requestType = null; // request type of SRM request - private int totalFilesInThisRequest = 0; // total number of files in SRM - // request - private int numOfQueuedRequests = 0; // number of files in SRM request that - // are in queue - private int numOfProgressingRequests = 0; // number of files in SRM request - // that are still in progress - private int numFinished = 0; // number of files in SRM request whose - // processing has completed - private boolean isSuspended = false; // flag that indicates whether the SRM - // request is suspended - - public RequestSummaryTO(TRequestToken requestToken, TRequestType requestType, - int totalFilesInThisRequest, int numOfQueuedRequests, - int numOfProgressingRequests, int numFinished, boolean isSuspended) - throws InvalidRequestSummaryDataAttributesException { - - boolean ok = requestToken != null && requestType != null - && totalFilesInThisRequest >= 0 && numOfQueuedRequests >= 0 - && numOfProgressingRequests >= 0 && numFinished >= 0; - if (!ok) - throw new InvalidRequestSummaryDataAttributesException(requestToken, - requestType, totalFilesInThisRequest, numOfQueuedRequests, - numOfProgressingRequests, numFinished); - this.requestToken = requestToken; - this.requestType = requestType; - this.totalFilesInThisRequest = totalFilesInThisRequest; - this.numOfQueuedRequests = numOfQueuedRequests; - this.numOfProgressingRequests = numOfProgressingRequests; - this.numFinished = numFinished; - this.isSuspended = isSuspended; - } - - /** - * Method that returns the SRM request TRequestToken - */ - public TRequestToken requestToken() { - - return requestToken; - } - - /** - * Method that returns the type of SRM request - */ - public TRequestType requestType() { - - return requestType; - } - - /** - * Method that returns the total number of files in the SRM request - */ - public int totalFilesInThisRequest() { - - return totalFilesInThisRequest; - } - - /** - * Method that returns the number of files in the SRM request that are - * currently in queue. - */ - public int numOfQueuedRequests() { - - return numOfQueuedRequests; - } - - /** - * Method that returns the number of files in the SRM request that are - * currently in progress. - */ - public int numOfProgressingRequests() { - - return numOfProgressingRequests; - } - - /** - * Method that returns the number of files in the SRM request that are - * currently finished. - */ - public int numFinished() { - - return numFinished; - } - - /** - * Method that tells whether the SRM requst is suspended. - */ - public boolean isSuspended() { - - return isSuspended; - } - - /** - * Method that increments the counter for the number of files in queue. - */ - public void incNumOfQueuedRequests() { - - numOfQueuedRequests++; - } - - /** - * Methos used to decrement the counter fo the number of files in queue. - */ - public void decNumOfQueuedRequests() { - - numOfQueuedRequests--; - } - - /** - * Method used to increment the counter for the number of progressing - * requests. - */ - public void incNumOfProgressingRequests() { - - numOfProgressingRequests++; - } - - /** - * Method used to decrement the counter for the number of progressing - * requests. - */ - public void decNumOfProgressingRequests() { - - numOfProgressingRequests--; - } - - /** - * Method used to increment the counter for the number of total files in the - * request. - */ - public void incTotalFilesInThisRequest() { - - totalFilesInThisRequest++; - } - - /** - * Method used to decrement the counter fot the number of total files in this - * request. - */ - public void decTotalFilesInThisRequest() { - - totalFilesInThisRequest--; - } - - /** - * Method used to increment the counter for the processing of files that are - * currently finished. - */ - public void incNumFinished() { - - numFinished++; - } - - /** - * Method used to decrement the counter that keeps track of the number of - * files that are currently finished. - */ - public void decNumFinished() { - - numFinished--; - } - - /** - * Method used to set the SRM flag that signals the processing of the request - * this RequestSummaryData applies to, is suspended. - */ - public void srmSuspend() { - - isSuspended = true; - } - - /** - * Method used to set the SRM flag that signals the procesing of the request - * this RequestSummaryData applies to, is _not_ suspended - */ - public void srmUnSuspend() { - - isSuspended = false; - } - - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append("SummaryRequestData"); - sb.append("; requestToken="); - sb.append(requestToken); - sb.append("; requestType="); - sb.append(requestType); - sb.append("; totalFilesInThisRequest="); - sb.append(totalFilesInThisRequest); - sb.append("; numOfQueuedRequests="); - sb.append(numOfQueuedRequests); - sb.append("; numOfProgressingRequests="); - sb.append(numOfProgressingRequests); - sb.append("; numFinished="); - sb.append(numFinished); - sb.append("; isSuspended="); - sb.append(isSuspended); - sb.append("."); - return sb.toString(); - } - - public int hashCode() { - - int hash = 17; - hash = 37 * hash + requestToken.hashCode(); - hash = 37 * hash + requestType.hashCode(); - hash = 37 * hash + totalFilesInThisRequest; - hash = 37 * hash + numOfQueuedRequests; - hash = 37 * hash + numOfProgressingRequests; - hash = 37 * hash + numFinished; - hash = (isSuspended) ? (37 * hash + 1) : (37 * hash + 0); - return hash; - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof RequestSummaryTO)) - return false; - RequestSummaryTO rsd = (RequestSummaryTO) o; - return requestToken.equals(rsd.requestToken) - && requestType.equals(rsd.requestType) - && (totalFilesInThisRequest == rsd.totalFilesInThisRequest) - && (numOfQueuedRequests == rsd.numOfQueuedRequests) - && (numOfProgressingRequests == rsd.numOfProgressingRequests) - && (numFinished == rsd.numFinished) && (isSuspended == rsd.isSuspended); - } + private TRequestToken requestToken = null; // TRequestToken of SRM request + private TRequestType requestType = null; // request type of SRM request + private int totalFilesInThisRequest = 0; // total number of files in SRM + // request + private int numOfQueuedRequests = 0; // number of files in SRM request that + // are in queue + private int numOfProgressingRequests = 0; // number of files in SRM request + // that are still in progress + private int numFinished = 0; // number of files in SRM request whose + // processing has completed + private boolean isSuspended = false; // flag that indicates whether the SRM + // request is suspended + + public RequestSummaryTO( + TRequestToken requestToken, + TRequestType requestType, + int totalFilesInThisRequest, + int numOfQueuedRequests, + int numOfProgressingRequests, + int numFinished, + boolean isSuspended) + throws InvalidRequestSummaryDataAttributesException { + + boolean ok = + requestToken != null + && requestType != null + && totalFilesInThisRequest >= 0 + && numOfQueuedRequests >= 0 + && numOfProgressingRequests >= 0 + && numFinished >= 0; + if (!ok) + throw new InvalidRequestSummaryDataAttributesException( + requestToken, + requestType, + totalFilesInThisRequest, + numOfQueuedRequests, + numOfProgressingRequests, + numFinished); + this.requestToken = requestToken; + this.requestType = requestType; + this.totalFilesInThisRequest = totalFilesInThisRequest; + this.numOfQueuedRequests = numOfQueuedRequests; + this.numOfProgressingRequests = numOfProgressingRequests; + this.numFinished = numFinished; + this.isSuspended = isSuspended; + } + + /** Method that returns the SRM request TRequestToken */ + public TRequestToken requestToken() { + + return requestToken; + } + + /** Method that returns the type of SRM request */ + public TRequestType requestType() { + + return requestType; + } + + /** Method that returns the total number of files in the SRM request */ + public int totalFilesInThisRequest() { + + return totalFilesInThisRequest; + } + + /** Method that returns the number of files in the SRM request that are currently in queue. */ + public int numOfQueuedRequests() { + + return numOfQueuedRequests; + } + + /** Method that returns the number of files in the SRM request that are currently in progress. */ + public int numOfProgressingRequests() { + + return numOfProgressingRequests; + } + + /** Method that returns the number of files in the SRM request that are currently finished. */ + public int numFinished() { + + return numFinished; + } + + /** Method that tells whether the SRM requst is suspended. */ + public boolean isSuspended() { + + return isSuspended; + } + + /** Method that increments the counter for the number of files in queue. */ + public void incNumOfQueuedRequests() { + + numOfQueuedRequests++; + } + + /** Methos used to decrement the counter fo the number of files in queue. */ + public void decNumOfQueuedRequests() { + + numOfQueuedRequests--; + } + + /** Method used to increment the counter for the number of progressing requests. */ + public void incNumOfProgressingRequests() { + + numOfProgressingRequests++; + } + + /** Method used to decrement the counter for the number of progressing requests. */ + public void decNumOfProgressingRequests() { + + numOfProgressingRequests--; + } + + /** Method used to increment the counter for the number of total files in the request. */ + public void incTotalFilesInThisRequest() { + + totalFilesInThisRequest++; + } + + /** Method used to decrement the counter fot the number of total files in this request. */ + public void decTotalFilesInThisRequest() { + + totalFilesInThisRequest--; + } + + /** + * Method used to increment the counter for the processing of files that are currently finished. + */ + public void incNumFinished() { + + numFinished++; + } + + /** + * Method used to decrement the counter that keeps track of the number of files that are currently + * finished. + */ + public void decNumFinished() { + + numFinished--; + } + + /** + * Method used to set the SRM flag that signals the processing of the request this + * RequestSummaryData applies to, is suspended. + */ + public void srmSuspend() { + + isSuspended = true; + } + + /** + * Method used to set the SRM flag that signals the procesing of the request this + * RequestSummaryData applies to, is _not_ suspended + */ + public void srmUnSuspend() { + + isSuspended = false; + } + + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append("SummaryRequestData"); + sb.append("; requestToken="); + sb.append(requestToken); + sb.append("; requestType="); + sb.append(requestType); + sb.append("; totalFilesInThisRequest="); + sb.append(totalFilesInThisRequest); + sb.append("; numOfQueuedRequests="); + sb.append(numOfQueuedRequests); + sb.append("; numOfProgressingRequests="); + sb.append(numOfProgressingRequests); + sb.append("; numFinished="); + sb.append(numFinished); + sb.append("; isSuspended="); + sb.append(isSuspended); + sb.append("."); + return sb.toString(); + } + + public int hashCode() { + + int hash = 17; + hash = 37 * hash + requestToken.hashCode(); + hash = 37 * hash + requestType.hashCode(); + hash = 37 * hash + totalFilesInThisRequest; + hash = 37 * hash + numOfQueuedRequests; + hash = 37 * hash + numOfProgressingRequests; + hash = 37 * hash + numFinished; + hash = (isSuspended) ? (37 * hash + 1) : (37 * hash + 0); + return hash; + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof RequestSummaryTO)) return false; + RequestSummaryTO rsd = (RequestSummaryTO) o; + return requestToken.equals(rsd.requestToken) + && requestType.equals(rsd.requestType) + && (totalFilesInThisRequest == rsd.totalFilesInThisRequest) + && (numOfQueuedRequests == rsd.numOfQueuedRequests) + && (numOfProgressingRequests == rsd.numOfProgressingRequests) + && (numFinished == rsd.numFinished) + && (isSuspended == rsd.isSuspended); + } } diff --git a/src/main/java/it/grid/storm/persistence/model/ResourceRuleData.java b/src/main/java/it/grid/storm/persistence/model/ResourceRuleData.java index 6bf4d81a..efbcbd81 100644 --- a/src/main/java/it/grid/storm/persistence/model/ResourceRuleData.java +++ b/src/main/java/it/grid/storm/persistence/model/ResourceRuleData.java @@ -1,8 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; -public class ResourceRuleData { -} +public class ResourceRuleData {} diff --git a/src/main/java/it/grid/storm/persistence/model/StorageSpaceTO.java b/src/main/java/it/grid/storm/persistence/model/StorageSpaceTO.java index d3610617..011ca4d6 100644 --- a/src/main/java/it/grid/storm/persistence/model/StorageSpaceTO.java +++ b/src/main/java/it/grid/storm/persistence/model/StorageSpaceTO.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /* * (c)2004 INFN / ICTP-eGrid This file can be distributed and/or modified under @@ -8,19 +7,15 @@ * visit http://www.cnaf.infn.it/license.html */ -/** - * StorageSpaceTO - */ +/** StorageSpaceTO */ package it.grid.storm.persistence.model; import it.grid.storm.common.types.VO; import it.grid.storm.griduser.AbstractGridUser; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.space.StorageSpaceData; - import java.io.Serializable; import java.util.Date; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,475 +25,435 @@ */ public class StorageSpaceTO implements Serializable, Comparable { - private static final long serialVersionUID = -87317982494792808L; - - private static final Logger log = LoggerFactory - .getLogger(StorageSpaceTO.class); - - // ----- PRIMARY KEY ----// - private Long storageSpaceId = null; // Persistence Object IDentifier - - // ----- FIELDS ----// - private String ownerName = null; - private String voName = null; - private String spaceType = null; // `SPACE_TYPE` VARCHAR(10) NOT NULL default - // '' - private String alias = null; - private String spaceToken = null; - private String spaceFile = null; // `SPACE_FILE` VARCHAR(145) NOT NULL default - // '' - private long lifetime = -1L; // `LIFETIME` bigint(20) default NULL - private String storageInfo = null;// `STORAGE_INFO` VARCHAR(255) default NULL - private Date created = new Date(); - - private long totalSize = 0L; // `TOTAL_SIZE` bigint(20) NOT NULL default '0' - private long guaranteedSize = 0L; // `GUAR_SIZE` bigint(20) NOT NULL default - // '0' - private long freeSize = 0L; // `FREE_SIZE` bigint(20) default NULL - - private long usedSize = -1L; // `USED_SIZE` bigint(20) NOT NULL default '-1' - private long busySize = -1L; // `BUSY_SIZE` bigint(20) NOT NULL default '-1' - private long unavailableSize = -1L; // `UNAVAILABLE_SIZE` bigint(20) NOT NULL - // default '-1' - private long availableSize = -1L; // `AVAILABLE_SIZE` bigint(20) NOT NULL - // default '-1' - private long reservedSize = -1L; // `RESERVED_SIZE` bigint(20) NOT NULL - // default '-1' - private Date updateTime = null; - - // ********************** Constructor methods ********************** // - - /** - * No-arg constructor for JavaBean tools. - */ - public StorageSpaceTO() { - - super(); - } - - /** - * Constructor from Domain Object StorageSpaceData - * - * @param spaceData - * SpaceData - */ - public StorageSpaceTO(StorageSpaceData spaceData) { - - if (spaceData != null) { - log.debug("Building StorageSpaceTO with {}" , spaceData); - if (spaceData.getOwner() != null) { - ownerName = spaceData.getOwner().getDn(); - voName = getVOName(spaceData.getOwner()); - } - if (spaceData.getSpaceType() != null) { - spaceType = (spaceData.getSpaceType()).getValue(); - } - alias = spaceData.getSpaceTokenAlias(); - if (spaceData.getSpaceToken() != null) { - spaceToken = spaceData.getSpaceToken().getValue(); - } - spaceFile = spaceData.getSpaceFileNameString(); - if (spaceData.getTotalSpaceSize() != null) { - totalSize = spaceData.getTotalSpaceSize().value(); - } - if (spaceData.getTotalGuaranteedSize() != null) { - guaranteedSize = spaceData.getTotalGuaranteedSize().value(); - } - if (spaceData.getAvailableSpaceSize() != null) { - availableSize = spaceData.getAvailableSpaceSize().value(); - } - if (spaceData.getUsedSpaceSize() != null) { - usedSize = spaceData.getUsedSpaceSize().value(); - } - if (spaceData.getFreeSpaceSize() != null) { - freeSize = spaceData.getFreeSpaceSize().value(); - } - if (spaceData.getUnavailableSpaceSize() != null) { - unavailableSize = spaceData.getUnavailableSpaceSize().value(); - } - if (spaceData.getBusySpaceSize() != null) { - busySize = spaceData.getBusySpaceSize().value(); - } - if (spaceData.getReservedSpaceSize() != null) { - reservedSize = spaceData.getReservedSpaceSize().value(); - } - if (spaceData.getLifeTime() != null) { - lifetime = spaceData.getLifeTime().value(); - } - if (spaceData.getStorageInfo() != null) { - storageInfo = spaceData.getStorageInfo().getValue(); - } - if (spaceData.getCreationDate() != null) { - created = spaceData.getCreationDate(); - } - } - } - - // ************ HELPER Method *************** // - private String getVOName(GridUserInterface maker) { + private static final long serialVersionUID = -87317982494792808L; + + private static final Logger log = LoggerFactory.getLogger(StorageSpaceTO.class); + + // ----- PRIMARY KEY ----// + private Long storageSpaceId = null; // Persistence Object IDentifier + + // ----- FIELDS ----// + private String ownerName = null; + private String voName = null; + private String spaceType = null; // `SPACE_TYPE` VARCHAR(10) NOT NULL default + // '' + private String alias = null; + private String spaceToken = null; + private String spaceFile = null; // `SPACE_FILE` VARCHAR(145) NOT NULL default + // '' + private long lifetime = -1L; // `LIFETIME` bigint(20) default NULL + private String storageInfo = null; // `STORAGE_INFO` VARCHAR(255) default NULL + private Date created = new Date(); + + private long totalSize = 0L; // `TOTAL_SIZE` bigint(20) NOT NULL default '0' + private long guaranteedSize = 0L; // `GUAR_SIZE` bigint(20) NOT NULL default + // '0' + private long freeSize = 0L; // `FREE_SIZE` bigint(20) default NULL + + private long usedSize = -1L; // `USED_SIZE` bigint(20) NOT NULL default '-1' + private long busySize = -1L; // `BUSY_SIZE` bigint(20) NOT NULL default '-1' + private long unavailableSize = -1L; // `UNAVAILABLE_SIZE` bigint(20) NOT NULL + // default '-1' + private long availableSize = -1L; // `AVAILABLE_SIZE` bigint(20) NOT NULL + // default '-1' + private long reservedSize = -1L; // `RESERVED_SIZE` bigint(20) NOT NULL + // default '-1' + private Date updateTime = null; + + // ********************** Constructor methods ********************** // + + /** No-arg constructor for JavaBean tools. */ + public StorageSpaceTO() { + + super(); + } + + /** + * Constructor from Domain Object StorageSpaceData + * + * @param spaceData SpaceData + */ + public StorageSpaceTO(StorageSpaceData spaceData) { + + if (spaceData != null) { + log.debug("Building StorageSpaceTO with {}", spaceData); + if (spaceData.getOwner() != null) { + ownerName = spaceData.getOwner().getDn(); + voName = getVOName(spaceData.getOwner()); + } + if (spaceData.getSpaceType() != null) { + spaceType = (spaceData.getSpaceType()).getValue(); + } + alias = spaceData.getSpaceTokenAlias(); + if (spaceData.getSpaceToken() != null) { + spaceToken = spaceData.getSpaceToken().getValue(); + } + spaceFile = spaceData.getSpaceFileNameString(); + if (spaceData.getTotalSpaceSize() != null) { + totalSize = spaceData.getTotalSpaceSize().value(); + } + if (spaceData.getTotalGuaranteedSize() != null) { + guaranteedSize = spaceData.getTotalGuaranteedSize().value(); + } + if (spaceData.getAvailableSpaceSize() != null) { + availableSize = spaceData.getAvailableSpaceSize().value(); + } + if (spaceData.getUsedSpaceSize() != null) { + usedSize = spaceData.getUsedSpaceSize().value(); + } + if (spaceData.getFreeSpaceSize() != null) { + freeSize = spaceData.getFreeSpaceSize().value(); + } + if (spaceData.getUnavailableSpaceSize() != null) { + unavailableSize = spaceData.getUnavailableSpaceSize().value(); + } + if (spaceData.getBusySpaceSize() != null) { + busySize = spaceData.getBusySpaceSize().value(); + } + if (spaceData.getReservedSpaceSize() != null) { + reservedSize = spaceData.getReservedSpaceSize().value(); + } + if (spaceData.getLifeTime() != null) { + lifetime = spaceData.getLifeTime().value(); + } + if (spaceData.getStorageInfo() != null) { + storageInfo = spaceData.getStorageInfo().getValue(); + } + if (spaceData.getCreationDate() != null) { + created = spaceData.getCreationDate(); + } + } + } + + // ************ HELPER Method *************** // + private String getVOName(GridUserInterface maker) { + + String voStr = VO.makeNoVo().getValue(); + if (maker instanceof AbstractGridUser) { + voStr = ((AbstractGridUser) maker).getVO().getValue(); + } + return voStr; + } + + // ********************** Accessor Methods ********************** // + + public Long getStorageSpaceId() { + + return storageSpaceId; + } + + public void setStorageSpaceId(Long id) { + + storageSpaceId = id; + } + + // ------------------------------------- + + public String getOwnerName() { - String voStr = VO.makeNoVo().getValue(); - if (maker instanceof AbstractGridUser) { - voStr = ((AbstractGridUser) maker).getVO().getValue(); - } - return voStr; - } + return ownerName; + } - // ********************** Accessor Methods ********************** // + public void setOwnerName(String ownerName) { - public Long getStorageSpaceId() { + this.ownerName = ownerName; + } - return storageSpaceId; - } + // ------------------------------------- - public void setStorageSpaceId(Long id) { + public String getVoName() { - storageSpaceId = id; - } + return voName; + } - // ------------------------------------- + public void setVoName(String voName) { - public String getOwnerName() { + this.voName = voName; + } - return ownerName; - } + // ------------------------------------- - public void setOwnerName(String ownerName) { + public String getSpaceType() { - this.ownerName = ownerName; - } + return spaceType; + } - // ------------------------------------- + public void setSpaceType(String spaceType) { - public String getVoName() { + this.spaceType = spaceType; + } - return voName; - } + // ------------------------------------- - public void setVoName(String voName) { + public long getGuaranteedSize() { - this.voName = voName; - } + return guaranteedSize; + } - // ------------------------------------- + public void setGuaranteedSize(long guaranteedSize) { - public String getSpaceType() { + this.guaranteedSize = guaranteedSize; + } - return spaceType; - } + // ------------------------------------- - public void setSpaceType(String spaceType) { + public long getTotalSize() { - this.spaceType = spaceType; - } + return totalSize; + } - // ------------------------------------- + public void setTotalSize(long totalSize) { - public long getGuaranteedSize() { + this.totalSize = totalSize; + } - return guaranteedSize; - } + // ------------------------------------- - public void setGuaranteedSize(long guaranteedSize) { + public void setSpaceToken(String spaceToken) { - this.guaranteedSize = guaranteedSize; - } + this.spaceToken = spaceToken; + } - // ------------------------------------- + public String getSpaceToken() { - public long getTotalSize() { + return spaceToken; + } - return totalSize; - } + // ------------------------------------- - public void setTotalSize(long totalSize) { + public void setAlias(String alias) { - this.totalSize = totalSize; - } + this.alias = alias; + } - // ------------------------------------- + public String getAlias() { - public void setSpaceToken(String spaceToken) { + return alias; + } - this.spaceToken = spaceToken; - } + // ------------------------------------- - public String getSpaceToken() { + public void setSpaceFile(String spaceFile) { - return spaceToken; - } + this.spaceFile = spaceFile; + } - // ------------------------------------- + public String getSpaceFile() { - public void setAlias(String alias) { + return spaceFile; + } - this.alias = alias; - } + // ------------------------------------- - public String getAlias() { + public long getLifetime() { - return alias; - } + return lifetime; + } - // ------------------------------------- + public void setLifetime(long lifetime) { - public void setSpaceFile(String spaceFile) { + this.lifetime = lifetime; + } - this.spaceFile = spaceFile; - } + // ------------------------------------- - public String getSpaceFile() { + public String getStorageInfo() { - return spaceFile; - } + return storageInfo; + } - // ------------------------------------- + public void setStorageInfo(String storageInfo) { - public long getLifetime() { + this.storageInfo = storageInfo; + } - return lifetime; - } + // ------------------------------------- - public void setLifetime(long lifetime) { + public Date getCreated() { - this.lifetime = lifetime; - } + return created; + } - // ------------------------------------- + public void setCreated(Date date) { - public String getStorageInfo() { + created = date; + } - return storageInfo; - } + // ------------------------------------- - public void setStorageInfo(String storageInfo) { + /** @return the freeSize */ + public final long getFreeSize() { - this.storageInfo = storageInfo; - } + return freeSize; + } - // ------------------------------------- + /** @param freeSize the freeSize to set */ + public final void setFreeSize(long freeSize) { - public Date getCreated() { + this.freeSize = freeSize; + } - return created; - } + /** @return the usedSize */ + public final long getUsedSize() { - public void setCreated(Date date) { + return usedSize; + } - created = date; - } + /** @param usedSize the usedSize to set */ + public final void setUsedSize(long usedSize) { - // ------------------------------------- + this.usedSize = usedSize; + } - /** - * @return the freeSize - */ - public final long getFreeSize() { + /** @return the busySize */ + public final long getBusySize() { - return freeSize; - } + return busySize; + } - /** - * @param freeSize - * the freeSize to set - */ - public final void setFreeSize(long freeSize) { + /** @param busySize the busySize to set */ + public final void setBusySize(long busySize) { - this.freeSize = freeSize; - } + this.busySize = busySize; + } - /** - * @return the usedSize - */ - public final long getUsedSize() { + /** @return the unavailableSize */ + public final long getUnavailableSize() { - return usedSize; - } + return unavailableSize; + } - /** - * @param usedSize - * the usedSize to set - */ - public final void setUsedSize(long usedSize) { + /** @param unavailableSize the unavailableSize to set */ + public final void setUnavailableSize(long unavailableSize) { - this.usedSize = usedSize; - } + this.unavailableSize = unavailableSize; + } - /** - * @return the busySize - */ - public final long getBusySize() { + /** @return the reservedSize */ + public final long getReservedSize() { - return busySize; - } + return reservedSize; + } - /** - * @param busySize - * the busySize to set - */ - public final void setBusySize(long busySize) { + /** @param reservedSize the reservedSize to set */ + public final void setReservedSize(long reservedSize) { - this.busySize = busySize; - } + this.reservedSize = reservedSize; + } - /** - * @return the unavailableSize - */ - public final long getUnavailableSize() { + /** @param availableSize the availableSize to set */ + public void setAvailableSize(long availableSize) { - return unavailableSize; - } + this.availableSize = availableSize; + } - /** - * @param unavailableSize - * the unavailableSize to set - */ - public final void setUnavailableSize(long unavailableSize) { + /** @return the availableSize */ + public long getAvailableSize() { - this.unavailableSize = unavailableSize; - } - - /** - * @return the reservedSize - */ - public final long getReservedSize() { - - return reservedSize; - } - - /** - * @param reservedSize - * the reservedSize to set - */ - public final void setReservedSize(long reservedSize) { - - this.reservedSize = reservedSize; - } - - /** - * @param availableSize - * the availableSize to set - */ - public void setAvailableSize(long availableSize) { - - this.availableSize = availableSize; - } - - /** - * @return the availableSize - */ - public long getAvailableSize() { - - return availableSize; - } - - // ********************** Common Methods ********************** // - - /** - * @param updateTime - * the updateTime to set - */ - public void setUpdateTime(Date updateTime) { - - this.updateTime = updateTime; - } - - /** - * @return the updateTime - */ - public Date getUpdateTime() { - - return updateTime; - } - - @Override - public boolean equals(Object o) { - - if (o == null) { - return false; - } - if (o instanceof StorageSpaceTO) { - if (this == o) { - return true; - } - final StorageSpaceTO storageSpace = (StorageSpaceTO) o; - if (!spaceToken.equals(storageSpace.getSpaceToken())) { - return false; - } - if (!spaceFile.equals(storageSpace.getSpaceFile())) { - return false; - } - return true; - } else { - return false; - } - } - - @Override - public int hashCode() { - - int hash = 17; - hash = 37 * hash + spaceToken.hashCode(); - return hash; - - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append(" ==== STORAGE SPACE (token=" + spaceToken + ") ==== \n"); - sb.append(" STORAGE SPACE ID = " + storageSpaceId); - sb.append("\n"); - sb.append(" OWNER USER NAME = " + ownerName); - sb.append("\n"); - sb.append(" OWNER VO NAME = " + voName); - sb.append("\n"); - sb.append(" SPACE ALIAS NAME = " + alias); - sb.append("\n"); - sb.append(" SPACE TYPE = " + spaceType); - sb.append("\n"); - sb.append(" SPACE TOKEN = " + spaceToken); - sb.append("\n"); - sb.append(" SPACE FILE = " + spaceFile); - sb.append("\n"); - sb.append(" CREATED = " + created); - sb.append("\n"); - sb.append(" TOTAL SIZE = " + totalSize); - sb.append("\n"); - sb.append(" GUARANTEED SIZE = " + guaranteedSize); - sb.append("\n"); - sb.append(" FREE SIZE = " + freeSize); - sb.append("\n"); - sb.append(" USED SIZE = " + usedSize); - sb.append("\n"); - sb.append(" BUSY SIZE = " + busySize); - sb.append("\n"); - sb.append(" AVAILABLE = " + availableSize); - sb.append("\n"); - sb.append(" RESERVED = " + reservedSize); - sb.append("\n"); - sb.append(" UNAVAILABLE = " + unavailableSize); - sb.append("\n"); - sb.append(" LIFETIME (sec) = " + lifetime); - sb.append("\n"); - sb.append(" STORAGE INFO = " + storageInfo); - sb.append("\n"); - sb.append(" UPDATE TIME = " + updateTime); - sb.append("\n"); - sb.append(" NR STOR_FILES = "); - sb.append("\n"); - return sb.toString(); - } - - @Override - public int compareTo(StorageSpaceTO o) { - - if (o instanceof StorageSpaceTO) { - return getCreated().compareTo(((StorageSpaceTO) o).getCreated()); - } - return 0; - } - - // ********************** Business Methods ********************** // + return availableSize; + } + + // ********************** Common Methods ********************** // + + /** @param updateTime the updateTime to set */ + public void setUpdateTime(Date updateTime) { + + this.updateTime = updateTime; + } + + /** @return the updateTime */ + public Date getUpdateTime() { + + return updateTime; + } + + @Override + public boolean equals(Object o) { + + if (o == null) { + return false; + } + if (o instanceof StorageSpaceTO) { + if (this == o) { + return true; + } + final StorageSpaceTO storageSpace = (StorageSpaceTO) o; + if (!spaceToken.equals(storageSpace.getSpaceToken())) { + return false; + } + if (!spaceFile.equals(storageSpace.getSpaceFile())) { + return false; + } + return true; + } else { + return false; + } + } + + @Override + public int hashCode() { + + int hash = 17; + hash = 37 * hash + spaceToken.hashCode(); + return hash; + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append(" ==== STORAGE SPACE (token=" + spaceToken + ") ==== \n"); + sb.append(" STORAGE SPACE ID = " + storageSpaceId); + sb.append("\n"); + sb.append(" OWNER USER NAME = " + ownerName); + sb.append("\n"); + sb.append(" OWNER VO NAME = " + voName); + sb.append("\n"); + sb.append(" SPACE ALIAS NAME = " + alias); + sb.append("\n"); + sb.append(" SPACE TYPE = " + spaceType); + sb.append("\n"); + sb.append(" SPACE TOKEN = " + spaceToken); + sb.append("\n"); + sb.append(" SPACE FILE = " + spaceFile); + sb.append("\n"); + sb.append(" CREATED = " + created); + sb.append("\n"); + sb.append(" TOTAL SIZE = " + totalSize); + sb.append("\n"); + sb.append(" GUARANTEED SIZE = " + guaranteedSize); + sb.append("\n"); + sb.append(" FREE SIZE = " + freeSize); + sb.append("\n"); + sb.append(" USED SIZE = " + usedSize); + sb.append("\n"); + sb.append(" BUSY SIZE = " + busySize); + sb.append("\n"); + sb.append(" AVAILABLE = " + availableSize); + sb.append("\n"); + sb.append(" RESERVED = " + reservedSize); + sb.append("\n"); + sb.append(" UNAVAILABLE = " + unavailableSize); + sb.append("\n"); + sb.append(" LIFETIME (sec) = " + lifetime); + sb.append("\n"); + sb.append(" STORAGE INFO = " + storageInfo); + sb.append("\n"); + sb.append(" UPDATE TIME = " + updateTime); + sb.append("\n"); + sb.append(" NR STOR_FILES = "); + sb.append("\n"); + return sb.toString(); + } + + @Override + public int compareTo(StorageSpaceTO o) { + + if (o instanceof StorageSpaceTO) { + return getCreated().compareTo(((StorageSpaceTO) o).getCreated()); + } + return 0; + } + + // ********************** Business Methods ********************** // } diff --git a/src/main/java/it/grid/storm/persistence/model/TapeRecallTO.java b/src/main/java/it/grid/storm/persistence/model/TapeRecallTO.java index 2f1843d8..d1917860 100644 --- a/src/main/java/it/grid/storm/persistence/model/TapeRecallTO.java +++ b/src/main/java/it/grid/storm/persistence/model/TapeRecallTO.java @@ -1,9 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.model; +import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.BOL; +import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.PTG; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import it.grid.storm.srm.types.InvalidTRequestTokenAttributesException; +import it.grid.storm.srm.types.TRequestToken; +import it.grid.storm.tape.recalltable.model.TapeRecallStatus; import java.io.Serializable; import java.text.Format; import java.text.SimpleDateFormat; @@ -12,438 +18,424 @@ import java.util.GregorianCalendar; import java.util.Random; import java.util.UUID; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.BOL; -import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.PTG; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -import it.grid.storm.srm.types.InvalidTRequestTokenAttributesException; -import it.grid.storm.srm.types.TRequestToken; -import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - public class TapeRecallTO implements Serializable, Comparable { - public enum RecallTaskType { - - PTG, BOL, BACK, RCLL; - } - - private static final Logger log = LoggerFactory.getLogger(TapeRecallTO.class); + public enum RecallTaskType { + PTG, + BOL, + BACK, + RCLL; + } + + private static final Logger log = LoggerFactory.getLogger(TapeRecallTO.class); + + private static final long serialVersionUID = -2907739786996767167L; + + public static final String START_CHAR = ""; + public static final char SEPARATOR_CHAR = '\u0009'; + public static final String DATE_FORMAT = "dd-MM-yyyy HH.mm.ss"; + + private UUID taskId = null; + private TRequestToken requestToken = null; + private RecallTaskType requestType = null; + private String fileName = null; + private String userID = null; + private String voName = null; + private int pinLifetime = 0; + private TapeRecallStatus status = TapeRecallStatus.QUEUED; + private int retryAttempt = 0; + private Date insertionInstant = null; + private Date inProgressInstant = null; + private Date finalStateInstant = null; + private Date deferredRecallInstant = null; + private UUID groupTaskId = null; - private static final long serialVersionUID = -2907739786996767167L; + private final Calendar endOfTheWorld = new GregorianCalendar(2012, Calendar.DECEMBER, 21); - public static final String START_CHAR = ""; - public static final char SEPARATOR_CHAR = '\u0009'; - public static final String DATE_FORMAT = "dd-MM-yyyy HH.mm.ss"; + public static TapeRecallTO createRandom(Date date, String voName) { - private UUID taskId = null; - private TRequestToken requestToken = null; - private RecallTaskType requestType = null; - private String fileName = null; - private String userID = null; - private String voName = null; - private int pinLifetime = 0; - private TapeRecallStatus status = TapeRecallStatus.QUEUED; - private int retryAttempt = 0; - private Date insertionInstant = null; - private Date inProgressInstant = null; - private Date finalStateInstant = null; - private Date deferredRecallInstant = null; - private UUID groupTaskId = null; + TapeRecallTO result = new TapeRecallTO(); + Random r = new Random(); + result.setFileName("/root/" + voName + "/test/" + r.nextInt(1001)); + result.setRequestToken(TRequestToken.getRandom()); + if (r.nextInt(2) == 0) { + result.setRequestType(BOL); + } else { + result.setRequestType(PTG); + } + result.setUserID("FakeId"); + result.setRetryAttempt(0); + result.setPinLifetime(r.nextInt(1001)); + result.setVoName(voName); + result.setInsertionInstant(date); + int deferred = r.nextInt(2); + Date deferredRecallTime = new Date(date.getTime() + (deferred * (long) Math.random())); + result.setDeferredRecallInstant(deferredRecallTime); + result.setGroupTaskId(UUID.randomUUID()); + return result; + } - private final Calendar endOfTheWorld = new GregorianCalendar(2012, Calendar.DECEMBER, 21); + /* + * Implementing the natural order (by age) + */ + public int compareTo(TapeRecallTO arg0) { - public static TapeRecallTO createRandom(Date date, String voName) { + if (arg0 == null) { + return 0; + } + return insertionInstant.compareTo(arg0.getInsertionInstant()); + } - TapeRecallTO result = new TapeRecallTO(); - Random r = new Random(); - result.setFileName("/root/" + voName + "/test/" + r.nextInt(1001)); - result.setRequestToken(TRequestToken.getRandom()); - if (r.nextInt(2) == 0) { - result.setRequestType(BOL); - } else { - result.setRequestType(PTG); - } - result.setUserID("FakeId"); - result.setRetryAttempt(0); - result.setPinLifetime(r.nextInt(1001)); - result.setVoName(voName); - result.setInsertionInstant(date); - int deferred = r.nextInt(2); - Date deferredRecallTime = new Date(date.getTime() + (deferred * (long) Math.random())); - result.setDeferredRecallInstant(deferredRecallTime); - result.setGroupTaskId(UUID.randomUUID()); - return result; - } + public Date getDeferredRecallInstant() { - /* - * Implementing the natural order (by age) - */ - public int compareTo(TapeRecallTO arg0) { + return deferredRecallInstant; + } - if (arg0 == null) { - return 0; - } - return insertionInstant.compareTo(arg0.getInsertionInstant()); - } + public String getFileName() { - public Date getDeferredRecallInstant() { + return fileName; + } - return deferredRecallInstant; - } + public Date getInsertionInstant() { - public String getFileName() { + return insertionInstant; + } - return fileName; - } + public Date getInProgressInstant() { - public Date getInsertionInstant() { + return inProgressInstant; + } - return insertionInstant; - } + public Date getFinalStateInstant() { - public Date getInProgressInstant() { + return finalStateInstant; + } - return inProgressInstant; - } + public int getPinLifetime() { - public Date getFinalStateInstant() { + return pinLifetime; + } - return finalStateInstant; - } + public TapeRecallStatus getStatus() { - public int getPinLifetime() { + return status; + } - return pinLifetime; - } + /** + * RequestToken is the primary key of the table + * + * @return + */ + public TRequestToken getRequestToken() { - public TapeRecallStatus getStatus() { + return requestToken; + } - return status; - } + public RecallTaskType getRequestType() { - /** - * RequestToken is the primary key of the table - * - * @return - */ - public TRequestToken getRequestToken() { + return requestType; + } - return requestToken; - } + public int getRetryAttempt() { - public RecallTaskType getRequestType() { + return retryAttempt; + } - return requestType; - } + @JsonIgnore + public int getStatusId() { - public int getRetryAttempt() { + return status.getStatusId(); + } - return retryAttempt; - } + public UUID getTaskId() { - @JsonIgnore - public int getStatusId() { + buildTaskId(); + return taskId; + } - return status.getStatusId(); - } + public UUID getGroupTaskId() { - public UUID getTaskId() { + return groupTaskId; + } - buildTaskId(); - return taskId; - } + public String getUserID() { - public UUID getGroupTaskId() { + return userID; + } - return groupTaskId; - } + public String getVoName() { - public String getUserID() { + return voName; + } - return userID; - } + public void setDeferredRecallInstant(Date date) { - public String getVoName() { + deferredRecallInstant = date; + } - return voName; - } + public void setFileName(String fileName) { - public void setDeferredRecallInstant(Date date) { + this.fileName = fileName; + buildTaskId(); + } - deferredRecallInstant = date; - } + public void setInsertionInstant(Date date) { - public void setFileName(String fileName) { + insertionInstant = date; + } - this.fileName = fileName; - buildTaskId(); - } + private void setInProgressInstant(Date date) { - public void setInsertionInstant(Date date) { + inProgressInstant = date; + } - insertionInstant = date; - } + private void setFinalStateInstant(Date date) { - private void setInProgressInstant(Date date) { + finalStateInstant = date; + } - inProgressInstant = date; - } + public void setPinLifetime(int pinLifetime) { - private void setFinalStateInstant(Date date) { + this.pinLifetime = pinLifetime; + } - finalStateInstant = date; - } + /** @param requestToken */ + public void setRequestToken(TRequestToken requestToken) { - public void setPinLifetime(int pinLifetime) { + this.requestToken = requestToken; + } - this.pinLifetime = pinLifetime; - } + public void setRequestType(RecallTaskType requestType) { - /** - * - * @param requestToken - */ - public void setRequestToken(TRequestToken requestToken) { + this.requestType = requestType; + } - this.requestToken = requestToken; - } + public void setRetryAttempt(int retryAttempt) { - public void setRequestType(RecallTaskType requestType) { + this.retryAttempt = retryAttempt; + } - this.requestType = requestType; - } + /** + * Sets the status of the recall task and if a transition is performed records the appropriate + * time-stamp + * + * @param status + */ + public void setStatus(TapeRecallStatus status) { - public void setRetryAttempt(int retryAttempt) { + this.status = status; + if (this.status.equals(TapeRecallStatus.IN_PROGRESS) && this.inProgressInstant == null) { + this.setInProgressInstant(new Date()); + } else { + if (TapeRecallStatus.isFinalStatus(this.status.getStatusId()) + && this.inProgressInstant == null) { + this.setFinalStateInstant(new Date()); + } + } + } - this.retryAttempt = retryAttempt; - } + /** @param statusId */ + public void setStatusId(int statusId) { - /** - * Sets the status of the recall task and if a transition is performed records the appropriate - * time-stamp - * - * @param status - */ - public void setStatus(TapeRecallStatus status) { + this.setStatus(TapeRecallStatus.getRecallTaskStatus(statusId)); + } - this.status = status; - if (this.status.equals(TapeRecallStatus.IN_PROGRESS) && this.inProgressInstant == null) { - this.setInProgressInstant(new Date()); - } else { - if (TapeRecallStatus.isFinalStatus(this.status.getStatusId()) - && this.inProgressInstant == null) { - this.setFinalStateInstant(new Date()); - } - } - } + public void setTaskId(UUID taskId) { - /** - * @param statusId - */ - public void setStatusId(int statusId) { + this.taskId = taskId; + } - this.setStatus(TapeRecallStatus.getRecallTaskStatus(statusId)); - } + public void setGroupTaskId(UUID groupTaskId) { - public void setTaskId(UUID taskId) { + this.groupTaskId = groupTaskId; + } - this.taskId = taskId; - } + public void setUserID(String userID) { - public void setGroupTaskId(UUID groupTaskId) { + this.userID = userID; + } - this.groupTaskId = groupTaskId; - } + public void setVoName(String voName) { - public void setUserID(String userID) { - - this.userID = userID; - } - - public void setVoName(String voName) { - - this.voName = voName; - } - - /** - * Does not print the taskId but the group task Id Does not print the state transition time - * stamps - * - * @return - */ - public String toGEMSS() { - - StringBuilder sb = new StringBuilder(); - - sb.append(START_CHAR); - sb.append(groupTaskId); - sb.append(SEPARATOR_CHAR); - - Format formatter = new SimpleDateFormat(DATE_FORMAT); - if (insertionInstant != null) { - sb.append(formatter.format(insertionInstant)); - } else { - insertionInstant = endOfTheWorld.getTime(); - sb.append(formatter.format(insertionInstant)); - } - - sb.append(SEPARATOR_CHAR); - sb.append(requestType); - sb.append(SEPARATOR_CHAR); - sb.append(fileName); - sb.append(SEPARATOR_CHAR); - sb.append(voName); - sb.append(SEPARATOR_CHAR); - sb.append(userID); - sb.append(SEPARATOR_CHAR); - sb.append(retryAttempt); - sb.append(SEPARATOR_CHAR); - sb.append(status); - sb.append(SEPARATOR_CHAR); - - if (deferredRecallInstant != null) { - sb.append(formatter.format(deferredRecallInstant)); - } else { - sb.append(formatter.format(insertionInstant)); - } - - sb.append(SEPARATOR_CHAR); - sb.append(pinLifetime); - sb.append(SEPARATOR_CHAR); - sb.append(requestToken); - sb.append(SEPARATOR_CHAR); - - if (inProgressInstant != null) - sb.append(formatter.format(inProgressInstant)); - else - sb.append("null"); - - return sb.toString(); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - - sb.append(START_CHAR); - sb.append(taskId); - sb.append(SEPARATOR_CHAR); - - Format formatter = new SimpleDateFormat(DATE_FORMAT); - if (insertionInstant != null) { - sb.append(formatter.format(insertionInstant)); - } else { - insertionInstant = endOfTheWorld.getTime(); - sb.append(formatter.format(insertionInstant)); - } - - sb.append(SEPARATOR_CHAR); - sb.append(requestType); - sb.append(SEPARATOR_CHAR); - sb.append(fileName); - sb.append(SEPARATOR_CHAR); - sb.append(voName); - sb.append(SEPARATOR_CHAR); - sb.append(userID); - sb.append(SEPARATOR_CHAR); - sb.append(retryAttempt); - sb.append(SEPARATOR_CHAR); - sb.append(status); - sb.append(SEPARATOR_CHAR); - - if (inProgressInstant != null) { - sb.append(formatter.format(inProgressInstant)); - } else { - sb.append("null"); - } - sb.append(SEPARATOR_CHAR); - - if (finalStateInstant != null) { - sb.append(formatter.format(finalStateInstant)); - } else { - sb.append("null"); - } - sb.append(SEPARATOR_CHAR); - - if (deferredRecallInstant != null) { - sb.append(formatter.format(deferredRecallInstant)); - } else { - sb.append(formatter.format(insertionInstant)); - } - - sb.append(SEPARATOR_CHAR); - sb.append(pinLifetime); - sb.append(SEPARATOR_CHAR); - sb.append(requestToken); - sb.append(SEPARATOR_CHAR); - sb.append(groupTaskId); - return sb.toString(); - } - - /** - * This method generate a TaskId from fileName - * - * @return - */ - private void buildTaskId() { - - if (this.fileName != null) { - this.taskId = buildTaskIdFromFileName(this.fileName); - } else { - log.error("Unable to create taskId because filename is NULL"); - } - } - - public static UUID buildTaskIdFromFileName(String fileName) { - - return UUID.nameUUIDFromBytes(fileName.getBytes()); - } - - /** - * Intended to be used when building this object from a database row NOTE: before to call this - * method, call the set status method - * - * @param inProgressInstant - * @param finalStateInstant - */ - public void forceStatusUpdateInstants(Date inProgressInstant, Date finalStateInstant) { - - if (inProgressInstant != null) { - if (this.status.equals(TapeRecallStatus.IN_PROGRESS) - || TapeRecallStatus.isFinalStatus(this.status.getStatusId())) { - this.inProgressInstant = inProgressInstant; - } else { - log.error("Unable to force the in progress transition time-stamp. " - + "Invalid status: {}", status); - } - } - if (finalStateInstant != null) { - if (TapeRecallStatus.isFinalStatus(this.status.getStatusId())) { - this.finalStateInstant = finalStateInstant; - } else { - log.error("Unable to force the in final status transition time-stamp. " - + "current status {} is not finale", status); - } - } - } - - public void setFakeRequestToken() { - - final String FAKE_PREFIX = "FAKE-"; - try { - this.setRequestToken(new TRequestToken( - FAKE_PREFIX - .concat(UUID.randomUUID().toString().substring(FAKE_PREFIX.length())), - Calendar.getInstance().getTime())); - } catch (InvalidTRequestTokenAttributesException e) { - log.error(e.getMessage(), e); - } - } + this.voName = voName; + } + /** + * Does not print the taskId but the group task Id Does not print the state transition time stamps + * + * @return + */ + public String toGEMSS() { + + StringBuilder sb = new StringBuilder(); + + sb.append(START_CHAR); + sb.append(groupTaskId); + sb.append(SEPARATOR_CHAR); + + Format formatter = new SimpleDateFormat(DATE_FORMAT); + if (insertionInstant != null) { + sb.append(formatter.format(insertionInstant)); + } else { + insertionInstant = endOfTheWorld.getTime(); + sb.append(formatter.format(insertionInstant)); + } + + sb.append(SEPARATOR_CHAR); + sb.append(requestType); + sb.append(SEPARATOR_CHAR); + sb.append(fileName); + sb.append(SEPARATOR_CHAR); + sb.append(voName); + sb.append(SEPARATOR_CHAR); + sb.append(userID); + sb.append(SEPARATOR_CHAR); + sb.append(retryAttempt); + sb.append(SEPARATOR_CHAR); + sb.append(status); + sb.append(SEPARATOR_CHAR); + + if (deferredRecallInstant != null) { + sb.append(formatter.format(deferredRecallInstant)); + } else { + sb.append(formatter.format(insertionInstant)); + } + + sb.append(SEPARATOR_CHAR); + sb.append(pinLifetime); + sb.append(SEPARATOR_CHAR); + sb.append(requestToken); + sb.append(SEPARATOR_CHAR); + + if (inProgressInstant != null) sb.append(formatter.format(inProgressInstant)); + else sb.append("null"); + + return sb.toString(); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + + sb.append(START_CHAR); + sb.append(taskId); + sb.append(SEPARATOR_CHAR); + + Format formatter = new SimpleDateFormat(DATE_FORMAT); + if (insertionInstant != null) { + sb.append(formatter.format(insertionInstant)); + } else { + insertionInstant = endOfTheWorld.getTime(); + sb.append(formatter.format(insertionInstant)); + } + + sb.append(SEPARATOR_CHAR); + sb.append(requestType); + sb.append(SEPARATOR_CHAR); + sb.append(fileName); + sb.append(SEPARATOR_CHAR); + sb.append(voName); + sb.append(SEPARATOR_CHAR); + sb.append(userID); + sb.append(SEPARATOR_CHAR); + sb.append(retryAttempt); + sb.append(SEPARATOR_CHAR); + sb.append(status); + sb.append(SEPARATOR_CHAR); + + if (inProgressInstant != null) { + sb.append(formatter.format(inProgressInstant)); + } else { + sb.append("null"); + } + sb.append(SEPARATOR_CHAR); + + if (finalStateInstant != null) { + sb.append(formatter.format(finalStateInstant)); + } else { + sb.append("null"); + } + sb.append(SEPARATOR_CHAR); + + if (deferredRecallInstant != null) { + sb.append(formatter.format(deferredRecallInstant)); + } else { + sb.append(formatter.format(insertionInstant)); + } + + sb.append(SEPARATOR_CHAR); + sb.append(pinLifetime); + sb.append(SEPARATOR_CHAR); + sb.append(requestToken); + sb.append(SEPARATOR_CHAR); + sb.append(groupTaskId); + return sb.toString(); + } + + /** + * This method generate a TaskId from fileName + * + * @return + */ + private void buildTaskId() { + + if (this.fileName != null) { + this.taskId = buildTaskIdFromFileName(this.fileName); + } else { + log.error("Unable to create taskId because filename is NULL"); + } + } + + public static UUID buildTaskIdFromFileName(String fileName) { + + return UUID.nameUUIDFromBytes(fileName.getBytes()); + } + + /** + * Intended to be used when building this object from a database row NOTE: before to call this + * method, call the set status method + * + * @param inProgressInstant + * @param finalStateInstant + */ + public void forceStatusUpdateInstants(Date inProgressInstant, Date finalStateInstant) { + + if (inProgressInstant != null) { + if (this.status.equals(TapeRecallStatus.IN_PROGRESS) + || TapeRecallStatus.isFinalStatus(this.status.getStatusId())) { + this.inProgressInstant = inProgressInstant; + } else { + log.error( + "Unable to force the in progress transition time-stamp. " + "Invalid status: {}", + status); + } + } + if (finalStateInstant != null) { + if (TapeRecallStatus.isFinalStatus(this.status.getStatusId())) { + this.finalStateInstant = finalStateInstant; + } else { + log.error( + "Unable to force the in final status transition time-stamp. " + + "current status {} is not finale", + status); + } + } + } + + public void setFakeRequestToken() { + + final String FAKE_PREFIX = "FAKE-"; + try { + this.setRequestToken( + new TRequestToken( + FAKE_PREFIX.concat(UUID.randomUUID().toString().substring(FAKE_PREFIX.length())), + Calendar.getInstance().getTime())); + } catch (InvalidTRequestTokenAttributesException e) { + log.error(e.getMessage(), e); + } + } } diff --git a/src/main/java/it/grid/storm/persistence/model/TransferObjectDecodingException.java b/src/main/java/it/grid/storm/persistence/model/TransferObjectDecodingException.java index 0b4e226d..b6fdcc2f 100644 --- a/src/main/java/it/grid/storm/persistence/model/TransferObjectDecodingException.java +++ b/src/main/java/it/grid/storm/persistence/model/TransferObjectDecodingException.java @@ -1,21 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.persistence.model; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class TransferObjectDecodingException extends Exception { - public TransferObjectDecodingException(String string) { - super(string); - } + public TransferObjectDecodingException(String string) { + super(string); + } - private static final long serialVersionUID = -6613354584884266536L; + private static final long serialVersionUID = -6613354584884266536L; } diff --git a/src/main/java/it/grid/storm/persistence/util/db/DBConnection.java b/src/main/java/it/grid/storm/persistence/util/db/DBConnection.java index 2084fbce..cf908499 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/DBConnection.java +++ b/src/main/java/it/grid/storm/persistence/util/db/DBConnection.java @@ -1,89 +1,82 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; import it.grid.storm.persistence.DataSourceConnectionFactory; import it.grid.storm.persistence.exceptions.PersistenceException; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class DBConnection implements DataSourceConnectionFactory - -{ - - private static final Logger log = LoggerFactory.getLogger(DBConnection.class); - private Connection connection = null; - private DataBaseStrategy db; - - public DBConnection(DataBaseStrategy db) throws PersistenceException { - - this.db = db; - - try { - Class.forName(db.getDriverName()).newInstance(); - } catch (Exception ex) { - log.error("Exception while getting JDBC driver: {}", ex.getMessage(), ex); - throw new PersistenceException("Driver loading problem", ex); - } - } - - private void handleSQLException(SQLException e) throws PersistenceException{ - - log.error("SQL Error: {}, SQLState: {}, VendorError: {}.", - e.getMessage(), - e.getSQLState(), - e.getErrorCode(), - e); - - throw new PersistenceException(e); - - } - - public Connection borrowConnection() throws PersistenceException { - - Connection result = null; - try { - result = getConnection(); - } catch (SQLException e) { - handleSQLException(e); - } - return result; - } - - public void giveBackConnection(Connection con) throws PersistenceException { - - if (connection != null) { - try { - shutdown(); - } catch (SQLException e) { - handleSQLException(e); - } - } else { - throw new PersistenceException("Closing NON-Existing connection"); - } - } - - private Connection getConnection() throws SQLException { - - if (connection == null) { - String url = db.getConnectionString(); - connection = DriverManager.getConnection(url, db.getDbUsr(), - db.getDbPwd()); - } - return connection; - } - - private void shutdown() throws SQLException { - - connection.close(); // if there are no other open connection - connection = null; - } - +public class DBConnection implements DataSourceConnectionFactory { + + private static final Logger log = LoggerFactory.getLogger(DBConnection.class); + private Connection connection = null; + private DataBaseStrategy db; + + public DBConnection(DataBaseStrategy db) throws PersistenceException { + + this.db = db; + + try { + Class.forName(db.getDriverName()).newInstance(); + } catch (Exception ex) { + log.error("Exception while getting JDBC driver: {}", ex.getMessage(), ex); + throw new PersistenceException("Driver loading problem", ex); + } + } + + private void handleSQLException(SQLException e) throws PersistenceException { + + log.error( + "SQL Error: {}, SQLState: {}, VendorError: {}.", + e.getMessage(), + e.getSQLState(), + e.getErrorCode(), + e); + + throw new PersistenceException(e); + } + + public Connection borrowConnection() throws PersistenceException { + + Connection result = null; + try { + result = getConnection(); + } catch (SQLException e) { + handleSQLException(e); + } + return result; + } + + public void giveBackConnection(Connection con) throws PersistenceException { + + if (connection != null) { + try { + shutdown(); + } catch (SQLException e) { + handleSQLException(e); + } + } else { + throw new PersistenceException("Closing NON-Existing connection"); + } + } + + private Connection getConnection() throws SQLException { + + if (connection == null) { + String url = db.getConnectionString(); + connection = DriverManager.getConnection(url, db.getDbUsr(), db.getDbPwd()); + } + return connection; + } + + private void shutdown() throws SQLException { + + connection.close(); // if there are no other open connection + connection = null; + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/DBConnectionPool.java b/src/main/java/it/grid/storm/persistence/util/db/DBConnectionPool.java index 7d206950..e3fe810f 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/DBConnectionPool.java +++ b/src/main/java/it/grid/storm/persistence/util/db/DBConnectionPool.java @@ -1,15 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; import it.grid.storm.persistence.DataSourceConnectionFactory; import it.grid.storm.persistence.exceptions.PersistenceException; - import java.sql.Connection; import java.sql.SQLException; - import org.apache.commons.dbcp2.cpdsadapter.DriverAdapterCPDS; import org.apache.commons.dbcp2.datasources.SharedPoolDataSource; import org.slf4j.Logger; @@ -17,146 +14,137 @@ public class DBConnectionPool implements DataSourceConnectionFactory { - private static final Logger log = LoggerFactory - .getLogger(DBConnectionPool.class); - private DataBaseStrategy db; - private static SharedPoolDataSource sharedDatasource; - private static DBConnectionPool instance = new DBConnectionPool(); - private static long handle = -1; - - private DBConnectionPool() { - super(); - } - - public static DBConnectionPool getPoolInstance() { - if (handle == -1) { - return null; - } else { - return instance; - } - } - - public static void initPool(DataBaseStrategy db, int maxActive, int maxWait) - throws PersistenceException { - instance.init(db, maxActive, maxWait); - } - - - private void handleSQLException(SQLException e) throws PersistenceException{ - - log.error("SQL Error: {}, SQLState: {}, VendorError: {}.", - e.getMessage(), - e.getSQLState(), - e.getErrorCode(), - e); - - throw new PersistenceException(e); - - } - public Connection borrowConnection() throws PersistenceException { - - Connection result = null; - if (handle == -1) { - throw new PersistenceException("Connection Pool is not initialized!"); - } - try { - result = sharedDatasource.getConnection(); - } catch (SQLException e) { - handleSQLException(e); - } - return result; - } - - public void giveBackConnection(Connection con) throws PersistenceException { - - if (con != null) { - try { - shutdown(con); - } catch (SQLException e) { - handleSQLException(e); - } - } else { - throw new PersistenceException("Closing NON-Existing connection"); - } - } - - public String getPoolInfo() throws PersistenceException { - - String result = ""; - if (handle == -1) { - throw new PersistenceException("Connection Pool is not initialized!"); - } - if (sharedDatasource.getValidationQuery() != null) { - result += "Validation query = " + sharedDatasource.getValidationQuery() - + "\n"; - } - if (sharedDatasource.getDescription() != null) { - result += "Description = " + sharedDatasource.getDescription() + "\n"; - } - result += "Nr Connection Active = " + sharedDatasource.getNumActive() - + "\n"; - result += "Nr Connection Idle = " + sharedDatasource.getNumIdle() + "\n"; - result += "Nr Max Active Connection = " + sharedDatasource.getMaxTotal() - + "\n"; - - return result; - } - - private void init(DataBaseStrategy db, int maxActive, int maxWait) { - - instance.setDatabaseStrategy(db); - DriverAdapterCPDS connectionPoolDatasource = new DriverAdapterCPDS(); - try { - connectionPoolDatasource.setDriver(db.getDriverName()); - } catch (Exception ex) { - log.error("Exception while getting driver: {}", ex.getMessage(), ex); - } - - String connectionString = db.getConnectionString(); - connectionPoolDatasource.setUrl(connectionString); - log.debug("Database connection string: {}", connectionString); - connectionPoolDatasource.setUser(db.getDbUsr()); - connectionPoolDatasource.setPassword(db.getDbPwd()); - - sharedDatasource = new SharedPoolDataSource(); - sharedDatasource.setConnectionPoolDataSource(connectionPoolDatasource); - - sharedDatasource.setMaxTotal(maxActive); - sharedDatasource.setDefaultMaxWaitMillis(maxWait); - sharedDatasource.setValidationQuery("SELECT 1"); - sharedDatasource.setDefaultTestOnBorrow(true); - - handle = System.currentTimeMillis(); - } - - /** - * - * @throws SQLException - */ - private void shutdown(Connection conn) throws SQLException { - - conn.close(); - conn = null; - } - - public static void printInfo(DBConnectionPool pool) { - - try { - log.info("DATABASE POOL INFO: {}" , pool.getPoolInfo()); - } catch (PersistenceException ex2) { - log.error(ex2.getMessage(),ex2); - } - - } - - public DataBaseStrategy getDatabaseStrategy() { - - return db; - } - - private void setDatabaseStrategy(DataBaseStrategy db) { - - this.db = db; - } - + private static final Logger log = LoggerFactory.getLogger(DBConnectionPool.class); + private DataBaseStrategy db; + private static SharedPoolDataSource sharedDatasource; + private static DBConnectionPool instance = new DBConnectionPool(); + private static long handle = -1; + + private DBConnectionPool() { + super(); + } + + public static DBConnectionPool getPoolInstance() { + if (handle == -1) { + return null; + } else { + return instance; + } + } + + public static void initPool(DataBaseStrategy db, int maxActive, int maxWait) + throws PersistenceException { + instance.init(db, maxActive, maxWait); + } + + private void handleSQLException(SQLException e) throws PersistenceException { + + log.error( + "SQL Error: {}, SQLState: {}, VendorError: {}.", + e.getMessage(), + e.getSQLState(), + e.getErrorCode(), + e); + + throw new PersistenceException(e); + } + + public Connection borrowConnection() throws PersistenceException { + + Connection result = null; + if (handle == -1) { + throw new PersistenceException("Connection Pool is not initialized!"); + } + try { + result = sharedDatasource.getConnection(); + } catch (SQLException e) { + handleSQLException(e); + } + return result; + } + + public void giveBackConnection(Connection con) throws PersistenceException { + + if (con != null) { + try { + shutdown(con); + } catch (SQLException e) { + handleSQLException(e); + } + } else { + throw new PersistenceException("Closing NON-Existing connection"); + } + } + + public String getPoolInfo() throws PersistenceException { + + String result = ""; + if (handle == -1) { + throw new PersistenceException("Connection Pool is not initialized!"); + } + if (sharedDatasource.getValidationQuery() != null) { + result += "Validation query = " + sharedDatasource.getValidationQuery() + "\n"; + } + if (sharedDatasource.getDescription() != null) { + result += "Description = " + sharedDatasource.getDescription() + "\n"; + } + result += "Nr Connection Active = " + sharedDatasource.getNumActive() + "\n"; + result += "Nr Connection Idle = " + sharedDatasource.getNumIdle() + "\n"; + result += "Nr Max Active Connection = " + sharedDatasource.getMaxTotal() + "\n"; + + return result; + } + + private void init(DataBaseStrategy db, int maxActive, int maxWait) { + + instance.setDatabaseStrategy(db); + DriverAdapterCPDS connectionPoolDatasource = new DriverAdapterCPDS(); + try { + connectionPoolDatasource.setDriver(db.getDriverName()); + } catch (Exception ex) { + log.error("Exception while getting driver: {}", ex.getMessage(), ex); + } + + String connectionString = db.getConnectionString(); + connectionPoolDatasource.setUrl(connectionString); + log.debug("Database connection string: {}", connectionString); + connectionPoolDatasource.setUser(db.getDbUsr()); + connectionPoolDatasource.setPassword(db.getDbPwd()); + + sharedDatasource = new SharedPoolDataSource(); + sharedDatasource.setConnectionPoolDataSource(connectionPoolDatasource); + + sharedDatasource.setMaxTotal(maxActive); + sharedDatasource.setDefaultMaxWaitMillis(maxWait); + sharedDatasource.setValidationQuery("SELECT 1"); + sharedDatasource.setDefaultTestOnBorrow(true); + + handle = System.currentTimeMillis(); + } + + /** @throws SQLException */ + private void shutdown(Connection conn) throws SQLException { + + conn.close(); + conn = null; + } + + public static void printInfo(DBConnectionPool pool) { + + try { + log.info("DATABASE POOL INFO: {}", pool.getPoolInfo()); + } catch (PersistenceException ex2) { + log.error(ex2.getMessage(), ex2); + } + } + + public DataBaseStrategy getDatabaseStrategy() { + + return db; + } + + private void setDatabaseStrategy(DataBaseStrategy db) { + + this.db = db; + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/DataBaseStrategy.java b/src/main/java/it/grid/storm/persistence/util/db/DataBaseStrategy.java index 63e583fb..09a8e62e 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/DataBaseStrategy.java +++ b/src/main/java/it/grid/storm/persistence/util/db/DataBaseStrategy.java @@ -1,123 +1,121 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; public class DataBaseStrategy { - private final String dbmsVendor; - private final String driverName; - private final String jdbcPrefix; - private String dbName; - private String dbPrefix; - private String dbHost; - private String dbUsr; - private String dbPwd; - private SQLFormat formatter; - private String properties; + private final String dbmsVendor; + private final String driverName; + private final String jdbcPrefix; + private String dbName; + private String dbPrefix; + private String dbHost; + private String dbUsr; + private String dbPwd; + private SQLFormat formatter; + private String properties; - public DataBaseStrategy(String dbmsVendor, String driverName, String prefix, - SQLFormat formatter) { + public DataBaseStrategy( + String dbmsVendor, String driverName, String prefix, SQLFormat formatter) { - this.dbmsVendor = dbmsVendor; - this.driverName = driverName; - jdbcPrefix = prefix; - this.formatter = formatter; - this.properties = ""; - } + this.dbmsVendor = dbmsVendor; + this.driverName = driverName; + jdbcPrefix = prefix; + this.formatter = formatter; + this.properties = ""; + } + public String getDbmsVendor() { + return dbmsVendor; + } - public String getDbmsVendor() { - return dbmsVendor; - } + public String getDriverName() { + return driverName; + } - public String getDriverName() { - return driverName; - } + public String getJdbcPrefix() { - public String getJdbcPrefix() { + return jdbcPrefix; + } - return jdbcPrefix; - } + public void setDbUsr(String usrDb) { - public void setDbUsr(String usrDb) { + dbUsr = usrDb; + } - dbUsr = usrDb; - } + public String getDbUsr() { - public String getDbUsr() { + return dbUsr; + } - return dbUsr; - } + public void setDbPwd(String pwd) { - public void setDbPwd(String pwd) { + dbPwd = pwd; + } - dbPwd = pwd; - } + public String getDbPwd() { - public String getDbPwd() { + return dbPwd; + } - return dbPwd; - } + public void setDbName(String dbName) { - public void setDbName(String dbName) { + this.dbName = dbName; + } - this.dbName = dbName; - } + public String getDbName() { - public String getDbName() { + return dbName; + } - return dbName; - } + public void setDbPrefix(String dbName) { - public void setDbPrefix(String dbName) { + dbPrefix = dbName; + } - dbPrefix = dbName; - } + public String getDbPrefix() { - public String getDbPrefix() { + return dbPrefix; + } - return dbPrefix; - } + public void setDbHost(String host) { - public void setDbHost(String host) { + dbHost = host; + } - dbHost = host; - } + public String getDbHost() { - public String getDbHost() { + return dbHost; + } - return dbHost; - } + public String getConnectionString() { - public String getConnectionString() { + String connStr = jdbcPrefix + dbHost + "/" + dbName; + if (!properties.isEmpty()) { + connStr += "?" + properties; + } + return connStr; + } - String connStr = jdbcPrefix + dbHost + "/" + dbName; - if (!properties.isEmpty()) { - connStr += "?" + properties; - } - return connStr; - } + public void setFormatter(SQLFormat formatter) { - public void setFormatter(SQLFormat formatter) { + this.formatter = formatter; + } - this.formatter = formatter; - } + public SQLFormat getFormatter() { - public SQLFormat getFormatter() { + return formatter; + } - return formatter; - } + public void setProperties(String encodedProperties) { - public void setProperties(String encodedProperties) { + this.properties = encodedProperties; + } - this.properties = encodedProperties; - } + @Override + public String toString() { - @Override - public String toString() { - - return dbmsVendor; - } + return dbmsVendor; + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/Databases.java b/src/main/java/it/grid/storm/persistence/util/db/Databases.java index 677c1efc..52c065a3 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/Databases.java +++ b/src/main/java/it/grid/storm/persistence/util/db/Databases.java @@ -1,14 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; -import java.util.Map; - import com.google.common.collect.Maps; - import it.grid.storm.config.Configuration; +import java.util.Map; public class Databases { @@ -23,7 +20,8 @@ public class Databases { static { Configuration config = Configuration.getInstance(); - DataBaseStrategy dbs = new DataBaseStrategy(MYSQL_VENDOR, MYSQL_DRIVER, MYSQL_PREFIX, MYSQL_FORMATTER); + DataBaseStrategy dbs = + new DataBaseStrategy(MYSQL_VENDOR, MYSQL_DRIVER, MYSQL_PREFIX, MYSQL_FORMATTER); dbs.setDbUsr(config.getDBUserName()); dbs.setDbPwd(config.getDBPassword()); dbs.setProperties(config.getDBProperties()); @@ -35,5 +33,5 @@ public class Databases { public static DataBaseStrategy getDataBaseStrategy(String vendor) { return DATABASES.get(vendor); -} + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/InsertBuilder.java b/src/main/java/it/grid/storm/persistence/util/db/InsertBuilder.java index 4748fd6d..cf90ab92 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/InsertBuilder.java +++ b/src/main/java/it/grid/storm/persistence/util/db/InsertBuilder.java @@ -1,69 +1,67 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; -import java.util.Map; import java.util.HashMap; import java.util.Iterator; +import java.util.Map; public class InsertBuilder extends SQLBuilder { - private String table; - private Map columnsAndData = new HashMap(); - - public void setTable(String table) { + private String table; + private Map columnsAndData = new HashMap(); - this.table = table; - } + public void setTable(String table) { - public String getTable() { + this.table = table; + } - return table; - } + public String getTable() { - public String getCommand() { + return table; + } - return "INSERT INTO "; - } + public String getCommand() { - public String getCriteria() { + return "INSERT INTO "; + } - return ""; - } + public String getCriteria() { - public String getWhat() { + return ""; + } - StringBuilder columns = new StringBuilder(); - StringBuilder values = new StringBuilder(); - StringBuilder what = new StringBuilder(); + public String getWhat() { - String columnName = null; - Iterator iter = columnsAndData.keySet().iterator(); - while (iter.hasNext()) { - columnName = iter.next(); - columns.append(columnName); - values.append(columnsAndData.get(columnName)); - if (iter.hasNext()) { - columns.append(','); - values.append(','); - } - } + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + StringBuilder what = new StringBuilder(); - what.append(" ("); - what.append(columns); - what.append(") VALUES ("); - what.append(values); - what.append(") "); - return what.toString(); + String columnName = null; + Iterator iter = columnsAndData.keySet().iterator(); + while (iter.hasNext()) { + columnName = iter.next(); + columns.append(columnName); + values.append(columnsAndData.get(columnName)); + if (iter.hasNext()) { + columns.append(','); + values.append(','); + } + } - } + what.append(" ("); + what.append(columns); + what.append(") VALUES ("); + what.append(values); + what.append(") "); + return what.toString(); + } - public void addColumnAndData(String columnName, Object value) { + public void addColumnAndData(String columnName, Object value) { - if (value != null) { - columnsAndData.put(columnName, value); - } - } + if (value != null) { + columnsAndData.put(columnName, value); + } + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/MySqlFormat.java b/src/main/java/it/grid/storm/persistence/util/db/MySqlFormat.java index 3eee51b9..b2789b6c 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/MySqlFormat.java +++ b/src/main/java/it/grid/storm/persistence/util/db/MySqlFormat.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; @@ -8,32 +7,29 @@ public class MySqlFormat implements SQLFormat { - private static final SimpleDateFormat dateFormat = new SimpleDateFormat( - "yyyy-MM-dd HH:mm:ss"); + private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - /** - * Create a string value of fields insertable into the query - * - * @param value - * Object - * @return String - */ - public String format(Object value) { - - if (value == null) { - return null; - } - Class clazz = value.getClass(); - if (Character.class.equals(clazz) || char.class.equals(clazz)) { - value = value.toString(); - } - if (value instanceof String) { - return value.toString(); - } - if (value instanceof java.util.Date) { - return dateFormat.format(value); - } - return value.toString(); - } + /** + * Create a string value of fields insertable into the query + * + * @param value Object + * @return String + */ + public String format(Object value) { + if (value == null) { + return null; + } + Class clazz = value.getClass(); + if (Character.class.equals(clazz) || char.class.equals(clazz)) { + value = value.toString(); + } + if (value instanceof String) { + return value.toString(); + } + if (value instanceof java.util.Date) { + return dateFormat.format(value); + } + return value.toString(); + } } diff --git a/src/main/java/it/grid/storm/persistence/util/db/SQLBuilder.java b/src/main/java/it/grid/storm/persistence/util/db/SQLBuilder.java index b436fcb6..5daaab88 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/SQLBuilder.java +++ b/src/main/java/it/grid/storm/persistence/util/db/SQLBuilder.java @@ -1,22 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; public abstract class SQLBuilder { - public SQLBuilder() { + public SQLBuilder() { - super(); - } + super(); + } - public abstract String getCommand(); + public abstract String getCommand(); - public abstract String getTable(); + public abstract String getTable(); - public abstract String getWhat(); - - public abstract String getCriteria(); + public abstract String getWhat(); + public abstract String getCriteria(); } diff --git a/src/main/java/it/grid/storm/persistence/util/db/SQLFormat.java b/src/main/java/it/grid/storm/persistence/util/db/SQLFormat.java index 2d863e14..e8d827a4 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/SQLFormat.java +++ b/src/main/java/it/grid/storm/persistence/util/db/SQLFormat.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; public interface SQLFormat { - public String format(Object value); - + public String format(Object value); } diff --git a/src/main/java/it/grid/storm/persistence/util/db/SQLHelper.java b/src/main/java/it/grid/storm/persistence/util/db/SQLHelper.java index b09d9665..acedf771 100644 --- a/src/main/java/it/grid/storm/persistence/util/db/SQLHelper.java +++ b/src/main/java/it/grid/storm/persistence/util/db/SQLHelper.java @@ -1,86 +1,76 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.db; public abstract class SQLHelper { - public String dbmsVendor; - private SQLFormat formatter; + public String dbmsVendor; + private SQLFormat formatter; - protected SQLHelper(String dbmsVendor) { + protected SQLHelper(String dbmsVendor) { - this.dbmsVendor = dbmsVendor; - this.formatter = Databases.getDataBaseStrategy(dbmsVendor).getFormatter(); - } + this.dbmsVendor = dbmsVendor; + this.formatter = Databases.getDataBaseStrategy(dbmsVendor).getFormatter(); + } - public String format(Object value) { + public String format(Object value) { - return formatter.format(value); - } + return formatter.format(value); + } - /** - * - * @param value - * boolean - * @return String - */ - public String format(boolean value) { + /** + * @param value boolean + * @return String + */ + public String format(boolean value) { - String result = null; - Boolean boolValue = new Boolean(value); - result = formatter.format(boolValue); - return result; - } + String result = null; + Boolean boolValue = new Boolean(value); + result = formatter.format(boolValue); + return result; + } - /** - * - * @param value - * int - * @return String - */ - public String format(int value) { + /** + * @param value int + * @return String + */ + public String format(int value) { - String result = null; - Integer intValue = null; - try { - intValue = new Integer(value); - } catch (NumberFormatException nfe) { - nfe.printStackTrace(); - } - result = formatter.format(intValue); - return result; - } + String result = null; + Integer intValue = null; + try { + intValue = new Integer(value); + } catch (NumberFormatException nfe) { + nfe.printStackTrace(); + } + result = formatter.format(intValue); + return result; + } - /** - * - * @param value - * long - * @return String - */ - public String format(long value) { + /** + * @param value long + * @return String + */ + public String format(long value) { - String result = null; - Long longValue = null; - try { - longValue = new Long(value); - } catch (NumberFormatException nfe) { - nfe.printStackTrace(); - } - result = formatter.format(longValue); - return result; - } + String result = null; + Long longValue = null; + try { + longValue = new Long(value); + } catch (NumberFormatException nfe) { + nfe.printStackTrace(); + } + result = formatter.format(longValue); + return result; + } - /** - * - * @param date - * Date - * @return String - */ - public String format(java.util.Date date) { - - return formatter.format(date); - } + /** + * @param date Date + * @return String + */ + public String format(java.util.Date date) { + return formatter.format(date); + } } diff --git a/src/main/java/it/grid/storm/persistence/util/helper/StorageSpaceSQLHelper.java b/src/main/java/it/grid/storm/persistence/util/helper/StorageSpaceSQLHelper.java index b94815e1..e29f225c 100644 --- a/src/main/java/it/grid/storm/persistence/util/helper/StorageSpaceSQLHelper.java +++ b/src/main/java/it/grid/storm/persistence/util/helper/StorageSpaceSQLHelper.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.helper; @@ -8,7 +7,6 @@ import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.persistence.model.StorageSpaceTO; import it.grid.storm.persistence.util.db.SQLHelper; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -20,785 +18,779 @@ public class StorageSpaceSQLHelper extends SQLHelper { - private final static String TABLE_NAME = "storage_space"; - private final static HashMap COLS = new HashMap(); - - private static final String[] COLUMN_NAMES = { "SS_ID", "USERDN", "VOGROUP", - "ALIAS", "SPACE_TOKEN", "CREATED", "TOTAL_SIZE", "GUAR_SIZE", "FREE_SIZE", - "SPACE_FILE", "STORAGE_INFO", "LIFETIME", "SPACE_TYPE", "USED_SIZE", - "BUSY_SIZE", "UNAVAILABLE_SIZE", "AVAILABLE_SIZE", "RESERVED_SIZE", - "UPDATE_TIME" }; - - static { - COLS.put("storageSpaceId", "SS_ID"); - COLS.put("ownerName", "USERDN"); - COLS.put("ownerVO", "VOGROUP"); - COLS.put("alias", "ALIAS"); - COLS.put("token", "SPACE_TOKEN"); - COLS.put("created", "CREATED"); - COLS.put("spaceFile", "SPACE_FILE"); - COLS.put("storaqeInfo", "STORAGE_INFO"); - COLS.put("lifeTime", "LIFETIME"); - COLS.put("spaceType", "SPACE_TYPE"); - COLS.put("total_size", "TOTAL_SIZE"); - COLS.put("guar_size", "GUAR_SIZE"); - COLS.put("free_size", "FREE_SIZE"); - COLS.put("used_size", "USED_SIZE"); - COLS.put("busy_size", "BUSY_SIZE"); - COLS.put("unavailable_size", "UNAVAILABLE_SIZE"); - COLS.put("available_size", "AVAILABLE_SIZE"); - COLS.put("reserved_size", "RESERVED_SIZE"); - COLS.put("update_time", "UPDATE_TIME"); - } - - /** - * CONSTRUCTOR - */ - public StorageSpaceSQLHelper(String dbmsVendor) { - - super(dbmsVendor); - } - - /** - * - * @return String[] - */ - public String[] getColumnNames() { - - return COLUMN_NAMES; - } - - /** - * INSERT NEW ROW into TABLE - * - * @param ssTO - * StorageSpaceTO - * @return String - * @throws SQLException - */ - - public PreparedStatement insertQuery(Connection conn, StorageSpaceTO ssTO) - throws SQLException { - - List values = new LinkedList(); - - StringBuilder fields = new StringBuilder("("); - StringBuilder placeholders = new StringBuilder("("); - - if (ssTO != null) { - if (ssTO.getOwnerName() != null) { - fields.append(COLS.get("ownerName") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getOwnerName())); - } - - fields.append(COLS.get("ownerVO") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getVoName())); - - if (ssTO.getAlias() != null) { - fields.append(COLS.get("alias") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getAlias())); - } - if (ssTO.getSpaceToken() != null) { - fields.append(COLS.get("token") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getSpaceToken())); - } - if (ssTO.getCreated() != null) { - fields.append(COLS.get("created") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getCreated())); - } - if (ssTO.getSpaceFile() != null) { - fields.append(COLS.get("spaceFile") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getSpaceFile())); - } - if (ssTO.getStorageInfo() != null) { - fields.append(COLS.get("storaqeInfo") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getStorageInfo())); - } - if (ssTO.getLifetime() != -1) { - fields.append(COLS.get("lifeTime") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getLifetime())); - } - if (ssTO.getSpaceType() != null) { - fields.append(COLS.get("spaceType") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getSpaceType())); - } - if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { - fields.append(COLS.get("total_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getTotalSize())); - } - if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { - fields.append(COLS.get("guar_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getGuaranteedSize())); - } - if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { - fields.append(COLS.get("free_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getFreeSize())); - } - if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { - fields.append(COLS.get("used_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getUsedSize())); - } - if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { - fields.append(COLS.get("busy_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getBusySize())); - } - if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { - fields.append(COLS.get("unavailable_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getUnavailableSize())); - } - - if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { - fields.append(COLS.get("available_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getAvailableSize())); - } - if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { - fields.append(COLS.get("reserved_size") + (",")); - placeholders.append("?,"); - values.add(format(ssTO.getReservedSize())); - } - if (ssTO.getUpdateTime() != null) { - fields.append(COLS.get("update_time").concat(",")); - placeholders.append("?,"); - values.add(format(ssTO.getUpdateTime())); - } - } - - fields.deleteCharAt(fields.length() - 1); - fields.append(")"); - placeholders.deleteCharAt(placeholders.length() - 1); - placeholders.append(")"); - - String str = "INSERT INTO " + TABLE_NAME + " " + fields.toString() - + " VALUES " + placeholders.toString(); - PreparedStatement preparedStatement = conn.prepareStatement(str); - - int index = 1; - for (String val : values) { - preparedStatement.setString(index, val); - index++; - } - - return preparedStatement; - } - - /** - * Create a StorageSpace Transfer Object coming from Result Set - * - * @param res - * ResultSet - * @return StorageSpaceTO - */ - public StorageSpaceTO makeStorageSpaceTO(ResultSet res) { - - StorageSpaceTO ssTO = new StorageSpaceTO(); - - try { - ssTO.setStorageSpaceId(new Long(res.getLong("SS_ID"))); - - ssTO.setOwnerName(res.getString("USERDN")); - ssTO.setVoName(res.getString("VOGROUP")); - ssTO.setAlias(res.getString("ALIAS")); - ssTO.setSpaceToken(res.getString("SPACE_TOKEN")); - - java.sql.Timestamp createdTimeStamp = res.getTimestamp("CREATED"); - Date creationDate = new Date(createdTimeStamp.getTime()); - ssTO.setCreated(creationDate); - - ssTO.setSpaceFile(res.getString("SPACE_FILE")); - ssTO.setStorageInfo(res.getString("STORAGE_INFO")); - long tempLong = res.getLong("LIFETIME"); - if (!res.wasNull()) { - ssTO.setLifetime(tempLong); - } - - ssTO.setSpaceType(res.getString("SPACE_TYPE")); - - // Sizes - tempLong = res.getLong("TOTAL_SIZE"); - if (!res.wasNull()) { - ssTO.setTotalSize(tempLong); - } - tempLong = res.getLong("GUAR_SIZE"); - if (!res.wasNull()) { - ssTO.setGuaranteedSize(tempLong); - } - tempLong = res.getLong("RESERVED_SIZE"); - if (!res.wasNull()) { - ssTO.setReservedSize(tempLong); - } - tempLong = res.getLong("FREE_SIZE"); - if (!res.wasNull()) { - ssTO.setFreeSize(tempLong); - } - tempLong = res.getLong("AVAILABLE_SIZE"); - if (!res.wasNull()) { - ssTO.setAvailableSize(tempLong); - } - tempLong = res.getLong("USED_SIZE"); - if (!res.wasNull()) { - ssTO.setUsedSize(tempLong); - } - tempLong = res.getLong("BUSY_SIZE"); - if (!res.wasNull()) { - ssTO.setBusySize(tempLong); - } - tempLong = res.getLong("UNAVAILABLE_SIZE"); - if (!res.wasNull()) { - ssTO.setUnavailableSize(tempLong); - } - - // Last Update - java.sql.Timestamp updatedTimeStamp = res.getTimestamp("UPDATE_TIME"); - Date updateDate = new Date(updatedTimeStamp.getTime()); - ssTO.setUpdateTime(updateDate); - - } catch (SQLException ex) { - ex.printStackTrace(); - } - - return ssTO; - } - - // ************ HELPER Method *************** // - - /** - * @param vo - * @return - */ - private String getVOName(String vo) { - - String voStr = VO.makeNoVo().getValue(); - if (vo != null && !vo.trim().equals("")) { - voStr = vo.trim(); - } - return voStr; - } - - /** - * - * - * @param token - * String - * @param conn - * @return String - * @throws SQLException - */ - public PreparedStatement selectByTokenQuery(Connection conn, String token) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where space_token=?"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, token); - - return preparedStatement; - } - - /** - * Returns the SQL string for selecting all columns from the table - * 'storage_space' in the 'storm_be_ISAM' database matching 'user' and - * 'spaceAlias'. 'spaceAlias' can be NULL or empty. - * - * @param user - * VomsGridUser. - * @param spaceAlias - * String. - * @return String. - * @throws SQLException - */ - public PreparedStatement selectBySpaceAliasQuery(Connection conn, - GridUserInterface user, String spaceAlias) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - String dn = user.getDn(); - - if ((spaceAlias == null) || (spaceAlias.length() == 0)) { - str = "SELECT * FROM storage_space where userdn=?"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, dn); - } else { - str = "SELECT * FROM storage_space where userdn=? AND alias=?"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, dn); - preparedStatement.setString(2, spaceAlias); - } - return preparedStatement; - } - - /** - * Returns the SQL string for selecting all columns from the table - * 'storage_space' in the 'storm_be_ISAM' database matching 'user' and - * 'spaceAlias'. 'spaceAlias' can be NULL or empty. - * - * @param user - * VomsGridUser. - * @param spaceAlias - * String. - * @return String. - * @throws SQLException - */ - public PreparedStatement selectBySpaceAliasOnlyQuery(Connection conn, - String spaceAlias) throws SQLException { - - /* - * This is to distinguish a client reseve space with a VOSpaceArea both with - * the same token. Only the one made by the namespace process contains a - * fake dn - */ - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where alias=?"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, spaceAlias); - - return preparedStatement; - } - - /** - * Returns the SQL string for selecting all columns from the table - * 'storage_space' in the 'storm_be_ISAM' database matching 'voname'. - * - * @param voname - * string - * @return String. - * @throws SQLException - */ - - public PreparedStatement selectBySpaceType(Connection conn, String voname) - throws SQLException { - - /* - * This is to distinguish a client reseve space with a VOSpaceArea both with - * the same token. Only the one made by the namespace process contains a - * fake dn - */ - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where SPACE_TYPE=?"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, voname); - - return preparedStatement; - } - - /** - * This method return the SQL query to evaluate all expired space reservation - * requests. - * - * @param time - * Current time (in second) to compare to the reservationTime + - * lifetime - * @return String SQL query - * @throws SQLException - */ - public PreparedStatement selectExpiredQuery(Connection conn, - long currentTimeInSecond) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where lifetime is not null and (UNIX_TIMESTAMP(created)+lifetime< ?)"; - preparedStatement = conn.prepareStatement(str); - preparedStatement.setLong(1, currentTimeInSecond); - - return preparedStatement; - - } - - /** - * @param size - * @return - * @throws SQLException - */ - public PreparedStatement selectByUnavailableUsedSpaceSizeQuery( - Connection conn, long unavailableSizeValue) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where " + COLS.get("used_size") - + " IS NULL or " + COLS.get("used_size") + "=?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setLong(1, unavailableSizeValue); - - return preparedStatement; - } - - /** - * @param lastUpdateTimestamp - * @return - * @throws SQLException - */ - - public PreparedStatement selectByPreviousOrNullLastUpdateQuery( - Connection conn, long lastUpdateTimestamp) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM storage_space where " + COLS.get("update_time") - + " IS NULL or UNIX_TIMESTAMP(" + COLS.get("update_time") + ") < ?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setLong(1, lastUpdateTimestamp); - - return preparedStatement; - - } - - /** - * Returns the SQL query for removing a row from the table 'storage_space' in - * the 'storm_be_ISAM' database matching 'userDN' and 'spaceToken'. - * - * @param user - * @param spaceToken - * @return - * @throws SQLException - */ - public PreparedStatement removeByTokenQuery(Connection conn, - GridUserInterface user, String spaceToken) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "DELETE FROM storage_space WHERE ((USERDN=?) AND (SPACE_TOKEN=?))"; - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setString(1, user.getDn()); - preparedStatement.setString(2, spaceToken); - - return preparedStatement; - } - - /** - * Returns the SQL query for removing a row from the table 'storage_space' in - * the 'storm_be_ISAM' database matching 'spaceToken'. - * - * @param spaceToken - * @return - * @throws SQLException - */ - public PreparedStatement removeByTokenQuery(Connection conn, String spaceToken) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "DELETE FROM storage_space WHERE (SPACE_TOKEN=?)"; - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setString(1, spaceToken); - - return preparedStatement; - } - - /** - * Provides a query that updates all row fields accordingly to the provided - * StorageSpaceTO - * - * @param ssTO - * @return - * @throws IllegalArgumentException - * @throws SQLException - */ - public PreparedStatement updateByAliasAndTokenQuery(Connection conn, - StorageSpaceTO ssTO) throws IllegalArgumentException, SQLException { - - List values = new LinkedList(); - - if (ssTO == null) { - throw new IllegalArgumentException(); - } - String query = "UPDATE storage_space SET"; - if (ssTO.getOwnerName() != null) { - query += " " + COLS.get("ownerName") + " = ?" + " ,"; - values.add(format(ssTO.getOwnerName())); - } - - query += " " + COLS.get("ownerVO") + " = ?" + " ,"; - values.add(format(getVOName(ssTO.getVoName()))); - - if (ssTO.getCreated() != null) { - query += " " + COLS.get("created") + " = ?" + " ,"; - values.add(format(ssTO.getCreated())); - } - if (ssTO.getSpaceFile() != null) { - query += " " + COLS.get("spaceFile") + " = ?" + " ,"; - values.add(format(ssTO.getSpaceFile())); - } - if (ssTO.getStorageInfo() != null) { - query += " " + COLS.get("storaqeInfo") + " = ?" + " ,"; - values.add(format(ssTO.getStorageInfo())); - } - if (ssTO.getLifetime() != -1) { - query += " " + COLS.get("lifeTime") + " = ?" + " ,"; - values.add(format(ssTO.getLifetime())); - } - if (ssTO.getSpaceType() != null) { - query += " " + COLS.get("spaceType") + " = ?" + " ,"; - values.add(format(ssTO.getSpaceType())); - } - if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { - query += " " + COLS.get("total_size") + " = ?" + " ,"; - values.add(format(ssTO.getTotalSize())); - } - if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { - query += " " + COLS.get("guar_size") + " = ?" + " ,"; - values.add(format(ssTO.getGuaranteedSize())); - } - if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { - query += " " + COLS.get("free_size") + " = ?" + " ,"; - values.add(format(ssTO.getFreeSize())); - } - if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { - query += " " + COLS.get("used_size") + " = ?" + " ,"; - values.add(format(ssTO.getUsedSize())); - } - if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { - query += " " + COLS.get("busy_size") + " = ?" + " ,"; - values.add(format(ssTO.getBusySize())); - } - if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { - query += " " + COLS.get("unavailable_size") + " = ?" + " ,"; - values.add(format(ssTO.getUnavailableSize())); - } - if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { - query += " " + COLS.get("available_size") + " = ?" + " ,"; - values.add(format(ssTO.getAvailableSize())); - } - if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { - query += " " + COLS.get("reserved_size") + " = ?" + " ,"; - values.add(format(ssTO.getReservedSize())); - } - if (ssTO.getUpdateTime() != null) { - query += " " + COLS.get("update_time") + " = ?" + " ,"; - values.add(format(ssTO.getUpdateTime())); - } - if (query.charAt(query.length() - 1) == ',') { - query = query.substring(0, query.length() - 1); - } - query += " where " + COLS.get("alias") + " = ?" + " and " + COLS.get("token") + " = ?"; - - values.add(format(ssTO.getAlias())); - values.add(format(ssTO.getSpaceToken())); - - PreparedStatement preparedStatement = conn.prepareStatement(query); - - int index = 1; - for (String val : values) { - preparedStatement.setString(index, val); - index++; - } - - return preparedStatement; - } - - /** - * Provides a query that updates all row fields accordingly to the provided - * StorageSpaceTO and using SpaceToken as key - * - * @param ssTO - * @return - * @throws IllegalArgumentException - * @throws SQLException - */ - public PreparedStatement updateByTokenQuery(Connection conn, - StorageSpaceTO ssTO) throws IllegalArgumentException, SQLException { - - List values = new LinkedList(); - - if (ssTO == null) { - throw new IllegalArgumentException(); - } - String query = "UPDATE storage_space SET"; - if (ssTO.getOwnerName() != null) { - query += " " + COLS.get("ownerName") + " = ?" + " ,"; - values.add(format(ssTO.getOwnerName())); - } - - query += " " + COLS.get("ownerVO") + " = ?" + " ,"; - values.add((getVOName(ssTO.getVoName()))); - - if (ssTO.getCreated() != null) { - query += " " + COLS.get("created") + " = ?" + " ,"; - values.add(format(ssTO.getCreated())); - } - if (ssTO.getAlias() != null) { - query += " " + COLS.get("alias") + " = ?" + " ,"; - values.add(format(ssTO.getAlias())); - } - if (ssTO.getSpaceFile() != null) { - query += " " + COLS.get("spaceFile") + " = ?" + " ,"; - values.add(format(ssTO.getSpaceFile())); - } - if (ssTO.getStorageInfo() != null) { - query += " " + COLS.get("storaqeInfo") + " = ?" + " ,"; - values.add(format(ssTO.getStorageInfo())); - } - if (ssTO.getLifetime() != -1) { - query += " " + COLS.get("lifeTime") + " = ?" + " ,"; - values.add(format(ssTO.getLifetime())); - } - if (ssTO.getSpaceType() != null) { - query += " " + COLS.get("spaceType") + " = ?" + " ,"; - values.add(format(ssTO.getSpaceType())); - } - if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { - query += " " + COLS.get("total_size") + " = ?" + " ,"; - values.add(format(ssTO.getTotalSize())); - } - if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { - query += " " + COLS.get("guar_size") + " = ?" + " ,"; - values.add(format(ssTO.getGuaranteedSize())); - } - if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { - query += " " + COLS.get("free_size") + " = ?" + " ,"; - values.add(format(ssTO.getFreeSize())); - } - if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { - query += " " + COLS.get("used_size") + " = ?" + " ,"; - values.add(format(ssTO.getUsedSize())); - } - if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { - query += " " + COLS.get("busy_size") + " = ?" + " ,"; - values.add(format(ssTO.getBusySize())); - } - if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { - query += " " + COLS.get("unavailable_size") + " = ?" + " ,"; - values.add(format(ssTO.getUnavailableSize())); - } - if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { - query += " " + COLS.get("available_size") + " = ?" + " ,"; - values.add(format(ssTO.getAvailableSize())); - } - if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { - query += " " + COLS.get("reserved_size") + " = ?" + " ,"; - values.add(format(ssTO.getReservedSize())); - } - if (ssTO.getUpdateTime() != null) { - query += " " + COLS.get("update_time") + " = ?" + " ,"; - values.add(format(ssTO.getUpdateTime())); - } - if (query.charAt(query.length() - 1) == ',') { - query = query.substring(0, query.length() - 1); - } - query += " where " + COLS.get("token") + " = ?"; - - values.add(format(format(ssTO.getSpaceToken()))); - - PreparedStatement preparedStatement = conn.prepareStatement(query); - - int index = 1; - for (String val : values) { - preparedStatement.setString(index, val); - index++; - } - - return preparedStatement; - } - - /** - * - * @param token - * String - * @param freeSpace - * long - * @return String - * @throws SQLException - */ - public PreparedStatement updateFreeSpaceByTokenQuery(Connection conn, - String token, long freeSpace, Date updateTimestamp) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "UPDATE storage_space SET free_size=?" + " , " + "UPDATE_TIME=?" - + " WHERE space_token=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setLong(1, freeSpace); - preparedStatement.setString(2, format(updateTimestamp)); - preparedStatement.setString(3, token); - - return preparedStatement; - } - - public PreparedStatement increaseUsedSpaceByTokenQuery(Connection conn, - String token, long usedSpaceToAdd) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "UPDATE storage_space " - + " SET USED_SIZE = USED_SIZE + ?, BUSY_SIZE = BUSY_SIZE + ?, " - + " FREE_SIZE = FREE_SIZE - ?, AVAILABLE_SIZE = AVAILABLE_SIZE - ?, " - + " UPDATE_TIME = NOW() " - + " WHERE space_token=? AND USED_SIZE + ? <= TOTAL_SIZE "; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setLong(1, usedSpaceToAdd); - preparedStatement.setLong(2, usedSpaceToAdd); - preparedStatement.setLong(3, usedSpaceToAdd); - preparedStatement.setLong(4, usedSpaceToAdd); - preparedStatement.setString(5, token); - preparedStatement.setLong(6, usedSpaceToAdd); - - return preparedStatement; - - } - - public PreparedStatement decreaseUsedSpaceByTokenQuery(Connection conn, - String token, long usedSpaceToRemove) - throws SQLException { + private static final String TABLE_NAME = "storage_space"; + private static final HashMap COLS = new HashMap(); + + private static final String[] COLUMN_NAMES = { + "SS_ID", + "USERDN", + "VOGROUP", + "ALIAS", + "SPACE_TOKEN", + "CREATED", + "TOTAL_SIZE", + "GUAR_SIZE", + "FREE_SIZE", + "SPACE_FILE", + "STORAGE_INFO", + "LIFETIME", + "SPACE_TYPE", + "USED_SIZE", + "BUSY_SIZE", + "UNAVAILABLE_SIZE", + "AVAILABLE_SIZE", + "RESERVED_SIZE", + "UPDATE_TIME" + }; + + static { + COLS.put("storageSpaceId", "SS_ID"); + COLS.put("ownerName", "USERDN"); + COLS.put("ownerVO", "VOGROUP"); + COLS.put("alias", "ALIAS"); + COLS.put("token", "SPACE_TOKEN"); + COLS.put("created", "CREATED"); + COLS.put("spaceFile", "SPACE_FILE"); + COLS.put("storaqeInfo", "STORAGE_INFO"); + COLS.put("lifeTime", "LIFETIME"); + COLS.put("spaceType", "SPACE_TYPE"); + COLS.put("total_size", "TOTAL_SIZE"); + COLS.put("guar_size", "GUAR_SIZE"); + COLS.put("free_size", "FREE_SIZE"); + COLS.put("used_size", "USED_SIZE"); + COLS.put("busy_size", "BUSY_SIZE"); + COLS.put("unavailable_size", "UNAVAILABLE_SIZE"); + COLS.put("available_size", "AVAILABLE_SIZE"); + COLS.put("reserved_size", "RESERVED_SIZE"); + COLS.put("update_time", "UPDATE_TIME"); + } + + /** CONSTRUCTOR */ + public StorageSpaceSQLHelper(String dbmsVendor) { + + super(dbmsVendor); + } + + /** @return String[] */ + public String[] getColumnNames() { + + return COLUMN_NAMES; + } + + /** + * INSERT NEW ROW into TABLE + * + * @param ssTO StorageSpaceTO + * @return String + * @throws SQLException + */ + public PreparedStatement insertQuery(Connection conn, StorageSpaceTO ssTO) throws SQLException { + + List values = new LinkedList(); + + StringBuilder fields = new StringBuilder("("); + StringBuilder placeholders = new StringBuilder("("); + + if (ssTO != null) { + if (ssTO.getOwnerName() != null) { + fields.append(COLS.get("ownerName") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getOwnerName())); + } + + fields.append(COLS.get("ownerVO") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getVoName())); + + if (ssTO.getAlias() != null) { + fields.append(COLS.get("alias") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getAlias())); + } + if (ssTO.getSpaceToken() != null) { + fields.append(COLS.get("token") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getSpaceToken())); + } + if (ssTO.getCreated() != null) { + fields.append(COLS.get("created") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getCreated())); + } + if (ssTO.getSpaceFile() != null) { + fields.append(COLS.get("spaceFile") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getSpaceFile())); + } + if (ssTO.getStorageInfo() != null) { + fields.append(COLS.get("storaqeInfo") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getStorageInfo())); + } + if (ssTO.getLifetime() != -1) { + fields.append(COLS.get("lifeTime") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getLifetime())); + } + if (ssTO.getSpaceType() != null) { + fields.append(COLS.get("spaceType") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getSpaceType())); + } + if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { + fields.append(COLS.get("total_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getTotalSize())); + } + if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { + fields.append(COLS.get("guar_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getGuaranteedSize())); + } + if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { + fields.append(COLS.get("free_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getFreeSize())); + } + if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { + fields.append(COLS.get("used_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getUsedSize())); + } + if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { + fields.append(COLS.get("busy_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getBusySize())); + } + if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { + fields.append(COLS.get("unavailable_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getUnavailableSize())); + } + + if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { + fields.append(COLS.get("available_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getAvailableSize())); + } + if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { + fields.append(COLS.get("reserved_size") + (",")); + placeholders.append("?,"); + values.add(format(ssTO.getReservedSize())); + } + if (ssTO.getUpdateTime() != null) { + fields.append(COLS.get("update_time").concat(",")); + placeholders.append("?,"); + values.add(format(ssTO.getUpdateTime())); + } + } - String str = null; - PreparedStatement preparedStatement = null; + fields.deleteCharAt(fields.length() - 1); + fields.append(")"); + placeholders.deleteCharAt(placeholders.length() - 1); + placeholders.append(")"); + + String str = + "INSERT INTO " + + TABLE_NAME + + " " + + fields.toString() + + " VALUES " + + placeholders.toString(); + PreparedStatement preparedStatement = conn.prepareStatement(str); + + int index = 1; + for (String val : values) { + preparedStatement.setString(index, val); + index++; + } + + return preparedStatement; + } + + /** + * Create a StorageSpace Transfer Object coming from Result Set + * + * @param res ResultSet + * @return StorageSpaceTO + */ + public StorageSpaceTO makeStorageSpaceTO(ResultSet res) { + + StorageSpaceTO ssTO = new StorageSpaceTO(); + + try { + ssTO.setStorageSpaceId(new Long(res.getLong("SS_ID"))); + + ssTO.setOwnerName(res.getString("USERDN")); + ssTO.setVoName(res.getString("VOGROUP")); + ssTO.setAlias(res.getString("ALIAS")); + ssTO.setSpaceToken(res.getString("SPACE_TOKEN")); + + java.sql.Timestamp createdTimeStamp = res.getTimestamp("CREATED"); + Date creationDate = new Date(createdTimeStamp.getTime()); + ssTO.setCreated(creationDate); + + ssTO.setSpaceFile(res.getString("SPACE_FILE")); + ssTO.setStorageInfo(res.getString("STORAGE_INFO")); + long tempLong = res.getLong("LIFETIME"); + if (!res.wasNull()) { + ssTO.setLifetime(tempLong); + } + + ssTO.setSpaceType(res.getString("SPACE_TYPE")); + + // Sizes + tempLong = res.getLong("TOTAL_SIZE"); + if (!res.wasNull()) { + ssTO.setTotalSize(tempLong); + } + tempLong = res.getLong("GUAR_SIZE"); + if (!res.wasNull()) { + ssTO.setGuaranteedSize(tempLong); + } + tempLong = res.getLong("RESERVED_SIZE"); + if (!res.wasNull()) { + ssTO.setReservedSize(tempLong); + } + tempLong = res.getLong("FREE_SIZE"); + if (!res.wasNull()) { + ssTO.setFreeSize(tempLong); + } + tempLong = res.getLong("AVAILABLE_SIZE"); + if (!res.wasNull()) { + ssTO.setAvailableSize(tempLong); + } + tempLong = res.getLong("USED_SIZE"); + if (!res.wasNull()) { + ssTO.setUsedSize(tempLong); + } + tempLong = res.getLong("BUSY_SIZE"); + if (!res.wasNull()) { + ssTO.setBusySize(tempLong); + } + tempLong = res.getLong("UNAVAILABLE_SIZE"); + if (!res.wasNull()) { + ssTO.setUnavailableSize(tempLong); + } + + // Last Update + java.sql.Timestamp updatedTimeStamp = res.getTimestamp("UPDATE_TIME"); + Date updateDate = new Date(updatedTimeStamp.getTime()); + ssTO.setUpdateTime(updateDate); + + } catch (SQLException ex) { + ex.printStackTrace(); + } - str = "UPDATE storage_space " - + " SET USED_SIZE = USED_SIZE - ?, BUSY_SIZE = BUSY_SIZE - ?, " - + " FREE_SIZE = FREE_SIZE + ?, AVAILABLE_SIZE = AVAILABLE_SIZE + ?, " - + " UPDATE_TIME = NOW() " - + " WHERE space_token=? AND USED_SIZE - ? >= 0 "; + return ssTO; + } + // ************ HELPER Method *************** // + + /** + * @param vo + * @return + */ + private String getVOName(String vo) { + + String voStr = VO.makeNoVo().getValue(); + if (vo != null && !vo.trim().equals("")) { + voStr = vo.trim(); + } + return voStr; + } + + /** + * @param token String + * @param conn + * @return String + * @throws SQLException + */ + public PreparedStatement selectByTokenQuery(Connection conn, String token) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = "SELECT * FROM storage_space where space_token=?"; + preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, token); + + return preparedStatement; + } + + /** + * Returns the SQL string for selecting all columns from the table 'storage_space' in the + * 'storm_be_ISAM' database matching 'user' and 'spaceAlias'. 'spaceAlias' can be NULL or empty. + * + * @param user VomsGridUser. + * @param spaceAlias String. + * @return String. + * @throws SQLException + */ + public PreparedStatement selectBySpaceAliasQuery( + Connection conn, GridUserInterface user, String spaceAlias) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + String dn = user.getDn(); + + if ((spaceAlias == null) || (spaceAlias.length() == 0)) { + str = "SELECT * FROM storage_space where userdn=?"; preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, dn); + } else { + str = "SELECT * FROM storage_space where userdn=? AND alias=?"; + preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, dn); + preparedStatement.setString(2, spaceAlias); + } + return preparedStatement; + } + + /** + * Returns the SQL string for selecting all columns from the table 'storage_space' in the + * 'storm_be_ISAM' database matching 'user' and 'spaceAlias'. 'spaceAlias' can be NULL or empty. + * + * @param user VomsGridUser. + * @param spaceAlias String. + * @return String. + * @throws SQLException + */ + public PreparedStatement selectBySpaceAliasOnlyQuery(Connection conn, String spaceAlias) + throws SQLException { + + /* + * This is to distinguish a client reseve space with a VOSpaceArea both with + * the same token. Only the one made by the namespace process contains a + * fake dn + */ + + String str = null; + PreparedStatement preparedStatement = null; + + str = "SELECT * FROM storage_space where alias=?"; + preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, spaceAlias); + + return preparedStatement; + } + + /** + * Returns the SQL string for selecting all columns from the table 'storage_space' in the + * 'storm_be_ISAM' database matching 'voname'. + * + * @param voname string + * @return String. + * @throws SQLException + */ + public PreparedStatement selectBySpaceType(Connection conn, String voname) throws SQLException { + + /* + * This is to distinguish a client reseve space with a VOSpaceArea both with + * the same token. Only the one made by the namespace process contains a + * fake dn + */ + + String str = null; + PreparedStatement preparedStatement = null; + + str = "SELECT * FROM storage_space where SPACE_TYPE=?"; + preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, voname); + + return preparedStatement; + } + + /** + * This method return the SQL query to evaluate all expired space reservation requests. + * + * @param time Current time (in second) to compare to the reservationTime + lifetime + * @return String SQL query + * @throws SQLException + */ + public PreparedStatement selectExpiredQuery(Connection conn, long currentTimeInSecond) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM storage_space where lifetime is not null and (UNIX_TIMESTAMP(created)+lifetime< ?)"; + preparedStatement = conn.prepareStatement(str); + preparedStatement.setLong(1, currentTimeInSecond); + + return preparedStatement; + } + + /** + * @param size + * @return + * @throws SQLException + */ + public PreparedStatement selectByUnavailableUsedSpaceSizeQuery( + Connection conn, long unavailableSizeValue) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM storage_space where " + + COLS.get("used_size") + + " IS NULL or " + + COLS.get("used_size") + + "=?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setLong(1, unavailableSizeValue); + + return preparedStatement; + } + + /** + * @param lastUpdateTimestamp + * @return + * @throws SQLException + */ + public PreparedStatement selectByPreviousOrNullLastUpdateQuery( + Connection conn, long lastUpdateTimestamp) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM storage_space where " + + COLS.get("update_time") + + " IS NULL or UNIX_TIMESTAMP(" + + COLS.get("update_time") + + ") < ?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setLong(1, lastUpdateTimestamp); + + return preparedStatement; + } + + /** + * Returns the SQL query for removing a row from the table 'storage_space' in the 'storm_be_ISAM' + * database matching 'userDN' and 'spaceToken'. + * + * @param user + * @param spaceToken + * @return + * @throws SQLException + */ + public PreparedStatement removeByTokenQuery( + Connection conn, GridUserInterface user, String spaceToken) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = "DELETE FROM storage_space WHERE ((USERDN=?) AND (SPACE_TOKEN=?))"; + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setString(1, user.getDn()); + preparedStatement.setString(2, spaceToken); + + return preparedStatement; + } + + /** + * Returns the SQL query for removing a row from the table 'storage_space' in the 'storm_be_ISAM' + * database matching 'spaceToken'. + * + * @param spaceToken + * @return + * @throws SQLException + */ + public PreparedStatement removeByTokenQuery(Connection conn, String spaceToken) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; - preparedStatement.setLong(1, usedSpaceToRemove); - preparedStatement.setLong(2, usedSpaceToRemove); - preparedStatement.setLong(3, usedSpaceToRemove); - preparedStatement.setLong(4, usedSpaceToRemove); - preparedStatement.setString(5, token); - preparedStatement.setLong(6, usedSpaceToRemove); + str = "DELETE FROM storage_space WHERE (SPACE_TOKEN=?)"; + preparedStatement = conn.prepareStatement(str); - return preparedStatement; + preparedStatement.setString(1, spaceToken); + return preparedStatement; + } + + /** + * Provides a query that updates all row fields accordingly to the provided StorageSpaceTO + * + * @param ssTO + * @return + * @throws IllegalArgumentException + * @throws SQLException + */ + public PreparedStatement updateByAliasAndTokenQuery(Connection conn, StorageSpaceTO ssTO) + throws IllegalArgumentException, SQLException { + + List values = new LinkedList(); + + if (ssTO == null) { + throw new IllegalArgumentException(); } + String query = "UPDATE storage_space SET"; + if (ssTO.getOwnerName() != null) { + query += " " + COLS.get("ownerName") + " = ?" + " ,"; + values.add(format(ssTO.getOwnerName())); + } + + query += " " + COLS.get("ownerVO") + " = ?" + " ,"; + values.add(format(getVOName(ssTO.getVoName()))); + + if (ssTO.getCreated() != null) { + query += " " + COLS.get("created") + " = ?" + " ,"; + values.add(format(ssTO.getCreated())); + } + if (ssTO.getSpaceFile() != null) { + query += " " + COLS.get("spaceFile") + " = ?" + " ,"; + values.add(format(ssTO.getSpaceFile())); + } + if (ssTO.getStorageInfo() != null) { + query += " " + COLS.get("storaqeInfo") + " = ?" + " ,"; + values.add(format(ssTO.getStorageInfo())); + } + if (ssTO.getLifetime() != -1) { + query += " " + COLS.get("lifeTime") + " = ?" + " ,"; + values.add(format(ssTO.getLifetime())); + } + if (ssTO.getSpaceType() != null) { + query += " " + COLS.get("spaceType") + " = ?" + " ,"; + values.add(format(ssTO.getSpaceType())); + } + if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { + query += " " + COLS.get("total_size") + " = ?" + " ,"; + values.add(format(ssTO.getTotalSize())); + } + if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { + query += " " + COLS.get("guar_size") + " = ?" + " ,"; + values.add(format(ssTO.getGuaranteedSize())); + } + if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { + query += " " + COLS.get("free_size") + " = ?" + " ,"; + values.add(format(ssTO.getFreeSize())); + } + if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { + query += " " + COLS.get("used_size") + " = ?" + " ,"; + values.add(format(ssTO.getUsedSize())); + } + if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { + query += " " + COLS.get("busy_size") + " = ?" + " ,"; + values.add(format(ssTO.getBusySize())); + } + if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { + query += " " + COLS.get("unavailable_size") + " = ?" + " ,"; + values.add(format(ssTO.getUnavailableSize())); + } + if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { + query += " " + COLS.get("available_size") + " = ?" + " ,"; + values.add(format(ssTO.getAvailableSize())); + } + if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { + query += " " + COLS.get("reserved_size") + " = ?" + " ,"; + values.add(format(ssTO.getReservedSize())); + } + if (ssTO.getUpdateTime() != null) { + query += " " + COLS.get("update_time") + " = ?" + " ,"; + values.add(format(ssTO.getUpdateTime())); + } + if (query.charAt(query.length() - 1) == ',') { + query = query.substring(0, query.length() - 1); + } + query += " where " + COLS.get("alias") + " = ?" + " and " + COLS.get("token") + " = ?"; + + values.add(format(ssTO.getAlias())); + values.add(format(ssTO.getSpaceToken())); + + PreparedStatement preparedStatement = conn.prepareStatement(query); + + int index = 1; + for (String val : values) { + preparedStatement.setString(index, val); + index++; + } + + return preparedStatement; + } + + /** + * Provides a query that updates all row fields accordingly to the provided StorageSpaceTO and + * using SpaceToken as key + * + * @param ssTO + * @return + * @throws IllegalArgumentException + * @throws SQLException + */ + public PreparedStatement updateByTokenQuery(Connection conn, StorageSpaceTO ssTO) + throws IllegalArgumentException, SQLException { + + List values = new LinkedList(); + + if (ssTO == null) { + throw new IllegalArgumentException(); + } + String query = "UPDATE storage_space SET"; + if (ssTO.getOwnerName() != null) { + query += " " + COLS.get("ownerName") + " = ?" + " ,"; + values.add(format(ssTO.getOwnerName())); + } + + query += " " + COLS.get("ownerVO") + " = ?" + " ,"; + values.add((getVOName(ssTO.getVoName()))); + + if (ssTO.getCreated() != null) { + query += " " + COLS.get("created") + " = ?" + " ,"; + values.add(format(ssTO.getCreated())); + } + if (ssTO.getAlias() != null) { + query += " " + COLS.get("alias") + " = ?" + " ,"; + values.add(format(ssTO.getAlias())); + } + if (ssTO.getSpaceFile() != null) { + query += " " + COLS.get("spaceFile") + " = ?" + " ,"; + values.add(format(ssTO.getSpaceFile())); + } + if (ssTO.getStorageInfo() != null) { + query += " " + COLS.get("storaqeInfo") + " = ?" + " ,"; + values.add(format(ssTO.getStorageInfo())); + } + if (ssTO.getLifetime() != -1) { + query += " " + COLS.get("lifeTime") + " = ?" + " ,"; + values.add(format(ssTO.getLifetime())); + } + if (ssTO.getSpaceType() != null) { + query += " " + COLS.get("spaceType") + " = ?" + " ,"; + values.add(format(ssTO.getSpaceType())); + } + if ((ssTO.getTotalSize() != 0) || (ssTO.getTotalSize() != -1)) { + query += " " + COLS.get("total_size") + " = ?" + " ,"; + values.add(format(ssTO.getTotalSize())); + } + if ((ssTO.getGuaranteedSize() != 0) || (ssTO.getGuaranteedSize() != -1)) { + query += " " + COLS.get("guar_size") + " = ?" + " ,"; + values.add(format(ssTO.getGuaranteedSize())); + } + if ((ssTO.getFreeSize() != 0) || (ssTO.getFreeSize() != -1)) { + query += " " + COLS.get("free_size") + " = ?" + " ,"; + values.add(format(ssTO.getFreeSize())); + } + if ((ssTO.getUsedSize() != 0) || (ssTO.getUsedSize() != -1)) { + query += " " + COLS.get("used_size") + " = ?" + " ,"; + values.add(format(ssTO.getUsedSize())); + } + if ((ssTO.getBusySize() != 0) || (ssTO.getBusySize() != -1)) { + query += " " + COLS.get("busy_size") + " = ?" + " ,"; + values.add(format(ssTO.getBusySize())); + } + if ((ssTO.getUnavailableSize() != 0) || (ssTO.getUnavailableSize() != -1)) { + query += " " + COLS.get("unavailable_size") + " = ?" + " ,"; + values.add(format(ssTO.getUnavailableSize())); + } + if ((ssTO.getAvailableSize() != 0) || (ssTO.getAvailableSize() != -1)) { + query += " " + COLS.get("available_size") + " = ?" + " ,"; + values.add(format(ssTO.getAvailableSize())); + } + if ((ssTO.getReservedSize() != 0) || (ssTO.getReservedSize() != -1)) { + query += " " + COLS.get("reserved_size") + " = ?" + " ,"; + values.add(format(ssTO.getReservedSize())); + } + if (ssTO.getUpdateTime() != null) { + query += " " + COLS.get("update_time") + " = ?" + " ,"; + values.add(format(ssTO.getUpdateTime())); + } + if (query.charAt(query.length() - 1) == ',') { + query = query.substring(0, query.length() - 1); + } + query += " where " + COLS.get("token") + " = ?"; + + values.add(format(format(ssTO.getSpaceToken()))); + + PreparedStatement preparedStatement = conn.prepareStatement(query); + + int index = 1; + for (String val : values) { + preparedStatement.setString(index, val); + index++; + } + + return preparedStatement; + } + + /** + * @param token String + * @param freeSpace long + * @return String + * @throws SQLException + */ + public PreparedStatement updateFreeSpaceByTokenQuery( + Connection conn, String token, long freeSpace, Date updateTimestamp) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = "UPDATE storage_space SET free_size=?" + " , " + "UPDATE_TIME=?" + " WHERE space_token=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setLong(1, freeSpace); + preparedStatement.setString(2, format(updateTimestamp)); + preparedStatement.setString(3, token); + + return preparedStatement; + } + + public PreparedStatement increaseUsedSpaceByTokenQuery( + Connection conn, String token, long usedSpaceToAdd) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "UPDATE storage_space " + + " SET USED_SIZE = USED_SIZE + ?, BUSY_SIZE = BUSY_SIZE + ?, " + + " FREE_SIZE = FREE_SIZE - ?, AVAILABLE_SIZE = AVAILABLE_SIZE - ?, " + + " UPDATE_TIME = NOW() " + + " WHERE space_token=? AND USED_SIZE + ? <= TOTAL_SIZE "; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setLong(1, usedSpaceToAdd); + preparedStatement.setLong(2, usedSpaceToAdd); + preparedStatement.setLong(3, usedSpaceToAdd); + preparedStatement.setLong(4, usedSpaceToAdd); + preparedStatement.setString(5, token); + preparedStatement.setLong(6, usedSpaceToAdd); + + return preparedStatement; + } + + public PreparedStatement decreaseUsedSpaceByTokenQuery( + Connection conn, String token, long usedSpaceToRemove) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "UPDATE storage_space " + + " SET USED_SIZE = USED_SIZE - ?, BUSY_SIZE = BUSY_SIZE - ?, " + + " FREE_SIZE = FREE_SIZE + ?, AVAILABLE_SIZE = AVAILABLE_SIZE + ?, " + + " UPDATE_TIME = NOW() " + + " WHERE space_token=? AND USED_SIZE - ? >= 0 "; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setLong(1, usedSpaceToRemove); + preparedStatement.setLong(2, usedSpaceToRemove); + preparedStatement.setLong(3, usedSpaceToRemove); + preparedStatement.setLong(4, usedSpaceToRemove); + preparedStatement.setString(5, token); + preparedStatement.setLong(6, usedSpaceToRemove); + return preparedStatement; + } } diff --git a/src/main/java/it/grid/storm/persistence/util/helper/TapeRecallMySQLHelper.java b/src/main/java/it/grid/storm/persistence/util/helper/TapeRecallMySQLHelper.java index 5bb0fb9b..6354af4f 100644 --- a/src/main/java/it/grid/storm/persistence/util/helper/TapeRecallMySQLHelper.java +++ b/src/main/java/it/grid/storm/persistence/util/helper/TapeRecallMySQLHelper.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.persistence.util.helper; import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.persistence.util.db.SQLHelper; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -17,560 +15,720 @@ public class TapeRecallMySQLHelper extends SQLHelper { - private static final String TABLE_NAME = "tape_recall"; - - // primary key COL_TASK_ID + COL_REQUEST_TOKEN - public static final String COL_TASK_ID = "taskId"; - public static final String COL_REQUEST_TOKEN = "requestToken"; - public static final String COL_REQUEST_TYPE = "requestType"; - public static final String COL_FILE_NAME = "fileName"; - public static final String COL_PIN_LIFETIME = "pinLifetime"; - public static final String COL_STATUS = "status"; - public static final String COL_USER_ID = "userID"; - public static final String COL_VO_NAME = "voName"; - public static final String COL_DATE = "timeStamp"; - public static final String COL_RETRY_ATTEMPT = "retryAttempt"; - public static final String COL_DEFERRED_STARTTIME = "deferredStartTime"; - public static final String COL_GROUP_TASK_ID = "groupTaskId"; - public static final String COL_IN_PROGRESS_DATE = "inProgressTime"; - public static final String COL_FINAL_STATUS_DATE = "finalStatusTime"; - - private static final String QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS; - private static final String QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS; - - static { - - QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS = - "DELETE FROM tape_recall WHERE status<>1 AND status<>2 " - + "AND timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND) " - + "LIMIT ?"; - - QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS = - "DELETE FROM tape_recall WHERE status<>1 AND status<>2 " - + "AND timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND) "; - } - - public TapeRecallMySQLHelper(String dbmsVendor) { - - super(dbmsVendor); - } - - /** - * Verifies if the given string is the name of one of the timestamp columns - * - * @param columnName - * @return - */ - private static boolean validTimestampColumnName(String columnName) { - - return COL_DATE.equals(columnName) - || COL_IN_PROGRESS_DATE.equals(columnName) - || COL_FINAL_STATUS_DATE.equals(columnName); - } - - /** - * @param conn - * @param recallTask - * @return a PreparedStatement for the requested query - */ - public PreparedStatement getQueryInsertTask(Connection conn, - TapeRecallTO recallTask) { - - if (recallTask == null) { - return null; - } - - String query = "INSERT INTO " + TABLE_NAME + " (" + COL_TASK_ID + ", " - + COL_REQUEST_TOKEN + ", " + COL_REQUEST_TYPE + ", " + COL_FILE_NAME - + ", " + COL_PIN_LIFETIME + ", " + COL_STATUS + ", " + COL_VO_NAME + ", " - + COL_USER_ID + ", " + COL_RETRY_ATTEMPT + ", " + COL_DEFERRED_STARTTIME - + ", " + COL_DATE + ", " + COL_GROUP_TASK_ID - + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - - try { - PreparedStatement prepStat = conn.prepareStatement(query); - - int idx = 1; - prepStat.setString(idx++, recallTask.getTaskId().toString()); - prepStat.setString(idx++, recallTask.getRequestToken().getValue()); - prepStat.setString(idx++, recallTask.getRequestType().name()); - prepStat.setString(idx++, recallTask.getFileName()); - prepStat.setInt(idx++, recallTask.getPinLifetime()); - prepStat.setInt(idx++, recallTask.getStatusId()); - - prepStat.setString(idx++, recallTask.getVoName()); - prepStat.setString(idx++, recallTask.getUserID()); - prepStat.setInt(idx++, recallTask.getRetryAttempt()); - prepStat.setTimestamp(idx++, new java.sql.Timestamp(recallTask - .getDeferredRecallInstant().getTime())); - prepStat.setTimestamp(idx++, new java.sql.Timestamp(recallTask - .getInsertionInstant().getTime())); - prepStat.setString(idx++, recallTask.getGroupTaskId().toString()); - return prepStat; - - } catch (SQLException e) { - return null; - } - } - - /** - * @param taskId - * @param requestToken - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetTask(Connection conn, UUID taskId, - String requestToken) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_TASK_ID + "=?" - + " AND " + COL_REQUEST_TOKEN + "=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setString(1, taskId.toString()); - preparedStatement.setString(2, requestToken); - - return preparedStatement; - } - - /** - * @param groupTaskId - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetGroupTasks(Connection conn, - UUID groupTaskId) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_GROUP_TASK_ID + "=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setString(1, groupTaskId.toString()); - - return preparedStatement; - } - - /** - * @param taskId - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetGroupTaskIds(Connection conn, UUID taskId) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT DISTINCT " + COL_GROUP_TASK_ID + " , " + COL_STATUS + " , " - + COL_IN_PROGRESS_DATE + " , " + COL_FINAL_STATUS_DATE + " FROM " - + TABLE_NAME + " WHERE " + COL_TASK_ID + "=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setString(1, taskId.toString()); - - return preparedStatement; - } - - /** - * @param taskId - * @param statuses - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetGroupTaskIds(Connection conn, - UUID taskId, int[] statuses) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT DISTINCT " + COL_GROUP_TASK_ID + " , " + COL_STATUS + " , " - + COL_IN_PROGRESS_DATE + " , " + COL_FINAL_STATUS_DATE + " FROM " - + TABLE_NAME + " WHERE " + COL_TASK_ID + "=?" + " AND " + COL_STATUS - + " IN ( "; - - boolean first = true; - for (int status : statuses) { - if (first) { - first = false; - } else { - str += " , "; - } - str += status; - } - str += " )"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setString(1, taskId.toString()); - - return preparedStatement; - } - - /** - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryNumberQueued(Connection conn) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - - return preparedStatement; - } - - /** - * @param voName - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryNumberQueued(Connection conn, String voName) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?" + " AND " + COL_VO_NAME + "=?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - preparedStatement.setString(2, voName); - - return preparedStatement; - } - - /** - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryReadyForTakeOver(Connection conn) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?" + " AND " + COL_DEFERRED_STARTTIME - + "<=NOW()"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - - return preparedStatement; - } - - /** - * @param voName - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryReadyForTakeOver(Connection conn, - String voName) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?" + " AND " + COL_VO_NAME + "=?" + " AND " - + COL_DEFERRED_STARTTIME + "<=NOW()"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - preparedStatement.setString(2, voName); - - return preparedStatement; - } - - /** - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryNumberInProgress(Connection conn) - throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); - - return preparedStatement; - } - - /** - * @param voName - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryNumberInProgress(Connection conn, - String voName) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT COUNT(DISTINCT " + COL_GROUP_TASK_ID + ") FROM " + TABLE_NAME - + " WHERE " + COL_STATUS + "=?" + " AND " + COL_VO_NAME + "=?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); - preparedStatement.setString(2, voName); - - return preparedStatement; - } - - /** - * @param numberOfTasks - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetTakeoverTasksWithDoubles(Connection conn, - int numberOfTasks) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_STATUS + "=?" - + " AND " + COL_DEFERRED_STARTTIME + "<=NOW() ORDER BY " - + COL_DEFERRED_STARTTIME + " LIMIT ?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - preparedStatement.setInt(2, numberOfTasks); - - return preparedStatement; - } - - /** - * @param numberOfTasks - * @param voName - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQueryGetTakeoverTasksWithDoubles(Connection conn, - int numberOfTasks, String voName) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_STATUS + "=?" - + " AND " + COL_VO_NAME + "=?" + " AND " + COL_DEFERRED_STARTTIME - + "<=NOW() ORDER BY " + COL_DEFERRED_STARTTIME + " LIMIT ?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); - preparedStatement.setString(2, voName); - preparedStatement.setInt(3, numberOfTasks); - - return preparedStatement; - } - - /** - * Creates the query string for looking up all the information related to in - * progress tasks in the recall database. - * - * @param numberOfTasks - * the maximum number of task returned - * @return the query string - * @throws SQLException - */ - public PreparedStatement getQueryGetAllTasksInProgress(Connection conn, - int numberOfTasks) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_STATUS + "=?" - + " ORDER BY " + COL_IN_PROGRESS_DATE + " ASC LIMIT ?"; - - preparedStatement = conn.prepareStatement(str); - preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); - preparedStatement.setInt(2, numberOfTasks); - - return preparedStatement; - - } - - /** - * @param taskList - * @param date - * @param j - * @return - * @throws SQLException - */ - public PreparedStatement getQueryUpdateTasksStatus(Connection conn, - List taskList, int statusId, String timestampColumn, - Date timestamp) throws IllegalArgumentException, SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - if (taskList.size() == 0) { - return null; - } - if (validTimestampColumnName(timestampColumn)) { - str = "UPDATE " + TABLE_NAME + " SET " + COL_STATUS + "=?" + " , " - + timestampColumn + "=?" + " WHERE " + COL_GROUP_TASK_ID + "=?"; - - for (int i = 1; i < taskList.size(); i++) { - str += " OR " + COL_GROUP_TASK_ID + "=?"; - } - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setInt(1, statusId); - preparedStatement.setTimestamp(2, - new java.sql.Timestamp(timestamp.getTime())); - preparedStatement.setString(3, taskList.get(0).getGroupTaskId() - .toString()); - - int idx = 4; - for (int i = 1; i < taskList.size(); i++) { - preparedStatement.setString(idx, taskList.get(i).getGroupTaskId() - .toString()); - idx++; - } - } else { - throw new IllegalArgumentException( - "Unable to update row status and timestamp. The priovided timestamp column \'" - + timestampColumn + "\' is not valid"); - } - - return preparedStatement; - } - - /** - * @param groupTaskId - * @param status - * @param timestampColumn - * @param timestamp - * @return - * @throws IllegalArgumentException - * @throws SQLException - */ - public PreparedStatement getQueryUpdateGroupTaskStatus(Connection conn, - UUID groupTaskId, int status, String timestampColumn, Date timestamp) - throws IllegalArgumentException, SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - if (validTimestampColumnName(timestampColumn)) { - str = "UPDATE " + TABLE_NAME + " SET " + COL_STATUS + "=?" + " , " - + timestampColumn + "=?" + " WHERE " + COL_GROUP_TASK_ID + "=?" - + " AND " + COL_STATUS + "!=?"; - - } else { - throw new IllegalArgumentException( - "Unable to update row status and timestamp. The priovided timestamp column \'" - + timestampColumn + "\' is not valid"); - } - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setInt(1, status); - preparedStatement.setTimestamp(2, - new java.sql.Timestamp(timestamp.getTime())); - preparedStatement.setString(3, groupTaskId.toString()); - preparedStatement.setInt(4, status); - - return preparedStatement; - - } - - /** - * @param groupTaskId - * @param status - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQuerySetGroupTaskStatus(Connection conn, - UUID groupTaskId, int status) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "UPDATE " + TABLE_NAME + " SET " + COL_STATUS + "=?" + " WHERE " - + COL_GROUP_TASK_ID + "=?" + " AND " + COL_STATUS + "!=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setInt(1, status); - preparedStatement.setString(2, groupTaskId.toString()); - preparedStatement.setInt(3, status); - - return preparedStatement; - } - - /** - * @param groupTaskId - * @param value - * @return the requested query as string - * @throws SQLException - */ - public PreparedStatement getQuerySetGroupTaskRetryValue(Connection conn, - UUID groupTaskId, int value) throws SQLException { - - String str = null; - PreparedStatement preparedStatement = null; - - str = "UPDATE " + TABLE_NAME + " SET " + COL_RETRY_ATTEMPT + "=?" - + " WHERE " + COL_GROUP_TASK_ID + "=?"; - - preparedStatement = conn.prepareStatement(str); - - preparedStatement.setInt(1, value); - preparedStatement.setString(2, groupTaskId.toString()); - - return preparedStatement; - } - - /** - * @param con - * @param expirationTime - * @return the requested query as @PreparedStatement - * @throws SQLException - */ - public PreparedStatement getQueryDeleteCompletedTasks(Connection con, long expirationTime) - throws SQLException { - - PreparedStatement ps = con.prepareStatement(QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS); - ps.setLong(1, expirationTime); - - return ps; - } - - /** - * @param con - * @param expirationTime - * @param maxNumTasks - * @return the requested query as @PreparedStatement - * @throws SQLException - */ - public PreparedStatement getQueryDeleteCompletedTasks(Connection con, long expirationTime, - int maxNumTasks) throws SQLException { - - PreparedStatement ps = con.prepareStatement(QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS); - - ps.setLong(1, expirationTime); - ps.setInt(2, maxNumTasks); - - return ps; - } - + private static final String TABLE_NAME = "tape_recall"; + + // primary key COL_TASK_ID + COL_REQUEST_TOKEN + public static final String COL_TASK_ID = "taskId"; + public static final String COL_REQUEST_TOKEN = "requestToken"; + public static final String COL_REQUEST_TYPE = "requestType"; + public static final String COL_FILE_NAME = "fileName"; + public static final String COL_PIN_LIFETIME = "pinLifetime"; + public static final String COL_STATUS = "status"; + public static final String COL_USER_ID = "userID"; + public static final String COL_VO_NAME = "voName"; + public static final String COL_DATE = "timeStamp"; + public static final String COL_RETRY_ATTEMPT = "retryAttempt"; + public static final String COL_DEFERRED_STARTTIME = "deferredStartTime"; + public static final String COL_GROUP_TASK_ID = "groupTaskId"; + public static final String COL_IN_PROGRESS_DATE = "inProgressTime"; + public static final String COL_FINAL_STATUS_DATE = "finalStatusTime"; + + private static final String QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS; + private static final String QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS; + + static { + QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS = + "DELETE FROM tape_recall WHERE status<>1 AND status<>2 " + + "AND timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND) " + + "LIMIT ?"; + + QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS = + "DELETE FROM tape_recall WHERE status<>1 AND status<>2 " + + "AND timeStamp <= DATE_SUB(CURRENT_TIMESTAMP(), INTERVAL ? SECOND) "; + } + + public TapeRecallMySQLHelper(String dbmsVendor) { + + super(dbmsVendor); + } + + /** + * Verifies if the given string is the name of one of the timestamp columns + * + * @param columnName + * @return + */ + private static boolean validTimestampColumnName(String columnName) { + + return COL_DATE.equals(columnName) + || COL_IN_PROGRESS_DATE.equals(columnName) + || COL_FINAL_STATUS_DATE.equals(columnName); + } + + /** + * @param conn + * @param recallTask + * @return a PreparedStatement for the requested query + */ + public PreparedStatement getQueryInsertTask(Connection conn, TapeRecallTO recallTask) { + + if (recallTask == null) { + return null; + } + + String query = + "INSERT INTO " + + TABLE_NAME + + " (" + + COL_TASK_ID + + ", " + + COL_REQUEST_TOKEN + + ", " + + COL_REQUEST_TYPE + + ", " + + COL_FILE_NAME + + ", " + + COL_PIN_LIFETIME + + ", " + + COL_STATUS + + ", " + + COL_VO_NAME + + ", " + + COL_USER_ID + + ", " + + COL_RETRY_ATTEMPT + + ", " + + COL_DEFERRED_STARTTIME + + ", " + + COL_DATE + + ", " + + COL_GROUP_TASK_ID + + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + + try { + PreparedStatement prepStat = conn.prepareStatement(query); + + int idx = 1; + prepStat.setString(idx++, recallTask.getTaskId().toString()); + prepStat.setString(idx++, recallTask.getRequestToken().getValue()); + prepStat.setString(idx++, recallTask.getRequestType().name()); + prepStat.setString(idx++, recallTask.getFileName()); + prepStat.setInt(idx++, recallTask.getPinLifetime()); + prepStat.setInt(idx++, recallTask.getStatusId()); + + prepStat.setString(idx++, recallTask.getVoName()); + prepStat.setString(idx++, recallTask.getUserID()); + prepStat.setInt(idx++, recallTask.getRetryAttempt()); + prepStat.setTimestamp( + idx++, new java.sql.Timestamp(recallTask.getDeferredRecallInstant().getTime())); + prepStat.setTimestamp( + idx++, new java.sql.Timestamp(recallTask.getInsertionInstant().getTime())); + prepStat.setString(idx++, recallTask.getGroupTaskId().toString()); + return prepStat; + + } catch (SQLException e) { + return null; + } + } + + /** + * @param taskId + * @param requestToken + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetTask(Connection conn, UUID taskId, String requestToken) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM " + + TABLE_NAME + + " WHERE " + + COL_TASK_ID + + "=?" + + " AND " + + COL_REQUEST_TOKEN + + "=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setString(1, taskId.toString()); + preparedStatement.setString(2, requestToken); + + return preparedStatement; + } + + /** + * @param groupTaskId + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetGroupTasks(Connection conn, UUID groupTaskId) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = "SELECT * FROM " + TABLE_NAME + " WHERE " + COL_GROUP_TASK_ID + "=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setString(1, groupTaskId.toString()); + + return preparedStatement; + } + + /** + * @param taskId + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetGroupTaskIds(Connection conn, UUID taskId) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT DISTINCT " + + COL_GROUP_TASK_ID + + " , " + + COL_STATUS + + " , " + + COL_IN_PROGRESS_DATE + + " , " + + COL_FINAL_STATUS_DATE + + " FROM " + + TABLE_NAME + + " WHERE " + + COL_TASK_ID + + "=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setString(1, taskId.toString()); + + return preparedStatement; + } + + /** + * @param taskId + * @param statuses + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetGroupTaskIds(Connection conn, UUID taskId, int[] statuses) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT DISTINCT " + + COL_GROUP_TASK_ID + + " , " + + COL_STATUS + + " , " + + COL_IN_PROGRESS_DATE + + " , " + + COL_FINAL_STATUS_DATE + + " FROM " + + TABLE_NAME + + " WHERE " + + COL_TASK_ID + + "=?" + + " AND " + + COL_STATUS + + " IN ( "; + + boolean first = true; + for (int status : statuses) { + if (first) { + first = false; + } else { + str += " , "; + } + str += status; + } + str += " )"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setString(1, taskId.toString()); + + return preparedStatement; + } + + /** + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryNumberQueued(Connection conn) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + + return preparedStatement; + } + + /** + * @param voName + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryNumberQueued(Connection conn, String voName) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_VO_NAME + + "=?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + preparedStatement.setString(2, voName); + + return preparedStatement; + } + + /** + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryReadyForTakeOver(Connection conn) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_DEFERRED_STARTTIME + + "<=NOW()"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + + return preparedStatement; + } + + /** + * @param voName + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryReadyForTakeOver(Connection conn, String voName) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_VO_NAME + + "=?" + + " AND " + + COL_DEFERRED_STARTTIME + + "<=NOW()"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + preparedStatement.setString(2, voName); + + return preparedStatement; + } + + /** + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryNumberInProgress(Connection conn) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); + + return preparedStatement; + } + + /** + * @param voName + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryNumberInProgress(Connection conn, String voName) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT COUNT(DISTINCT " + + COL_GROUP_TASK_ID + + ") FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_VO_NAME + + "=?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); + preparedStatement.setString(2, voName); + + return preparedStatement; + } + + /** + * @param numberOfTasks + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetTakeoverTasksWithDoubles(Connection conn, int numberOfTasks) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_DEFERRED_STARTTIME + + "<=NOW() ORDER BY " + + COL_DEFERRED_STARTTIME + + " LIMIT ?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + preparedStatement.setInt(2, numberOfTasks); + + return preparedStatement; + } + + /** + * @param numberOfTasks + * @param voName + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQueryGetTakeoverTasksWithDoubles( + Connection conn, int numberOfTasks, String voName) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " AND " + + COL_VO_NAME + + "=?" + + " AND " + + COL_DEFERRED_STARTTIME + + "<=NOW() ORDER BY " + + COL_DEFERRED_STARTTIME + + " LIMIT ?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.QUEUED.getStatusId()); + preparedStatement.setString(2, voName); + preparedStatement.setInt(3, numberOfTasks); + + return preparedStatement; + } + + /** + * Creates the query string for looking up all the information related to in progress tasks in the + * recall database. + * + * @param numberOfTasks the maximum number of task returned + * @return the query string + * @throws SQLException + */ + public PreparedStatement getQueryGetAllTasksInProgress(Connection conn, int numberOfTasks) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "SELECT * FROM " + + TABLE_NAME + + " WHERE " + + COL_STATUS + + "=?" + + " ORDER BY " + + COL_IN_PROGRESS_DATE + + " ASC LIMIT ?"; + + preparedStatement = conn.prepareStatement(str); + preparedStatement.setInt(1, TapeRecallStatus.IN_PROGRESS.getStatusId()); + preparedStatement.setInt(2, numberOfTasks); + + return preparedStatement; + } + + /** + * @param taskList + * @param date + * @param j + * @return + * @throws SQLException + */ + public PreparedStatement getQueryUpdateTasksStatus( + Connection conn, + List taskList, + int statusId, + String timestampColumn, + Date timestamp) + throws IllegalArgumentException, SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + if (taskList.size() == 0) { + return null; + } + if (validTimestampColumnName(timestampColumn)) { + str = + "UPDATE " + + TABLE_NAME + + " SET " + + COL_STATUS + + "=?" + + " , " + + timestampColumn + + "=?" + + " WHERE " + + COL_GROUP_TASK_ID + + "=?"; + + for (int i = 1; i < taskList.size(); i++) { + str += " OR " + COL_GROUP_TASK_ID + "=?"; + } + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setInt(1, statusId); + preparedStatement.setTimestamp(2, new java.sql.Timestamp(timestamp.getTime())); + preparedStatement.setString(3, taskList.get(0).getGroupTaskId().toString()); + + int idx = 4; + for (int i = 1; i < taskList.size(); i++) { + preparedStatement.setString(idx, taskList.get(i).getGroupTaskId().toString()); + idx++; + } + } else { + throw new IllegalArgumentException( + "Unable to update row status and timestamp. The priovided timestamp column \'" + + timestampColumn + + "\' is not valid"); + } + + return preparedStatement; + } + + /** + * @param groupTaskId + * @param status + * @param timestampColumn + * @param timestamp + * @return + * @throws IllegalArgumentException + * @throws SQLException + */ + public PreparedStatement getQueryUpdateGroupTaskStatus( + Connection conn, UUID groupTaskId, int status, String timestampColumn, Date timestamp) + throws IllegalArgumentException, SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + if (validTimestampColumnName(timestampColumn)) { + str = + "UPDATE " + + TABLE_NAME + + " SET " + + COL_STATUS + + "=?" + + " , " + + timestampColumn + + "=?" + + " WHERE " + + COL_GROUP_TASK_ID + + "=?" + + " AND " + + COL_STATUS + + "!=?"; + + } else { + throw new IllegalArgumentException( + "Unable to update row status and timestamp. The priovided timestamp column \'" + + timestampColumn + + "\' is not valid"); + } + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setInt(1, status); + preparedStatement.setTimestamp(2, new java.sql.Timestamp(timestamp.getTime())); + preparedStatement.setString(3, groupTaskId.toString()); + preparedStatement.setInt(4, status); + + return preparedStatement; + } + + /** + * @param groupTaskId + * @param status + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQuerySetGroupTaskStatus(Connection conn, UUID groupTaskId, int status) + throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "UPDATE " + + TABLE_NAME + + " SET " + + COL_STATUS + + "=?" + + " WHERE " + + COL_GROUP_TASK_ID + + "=?" + + " AND " + + COL_STATUS + + "!=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setInt(1, status); + preparedStatement.setString(2, groupTaskId.toString()); + preparedStatement.setInt(3, status); + + return preparedStatement; + } + + /** + * @param groupTaskId + * @param value + * @return the requested query as string + * @throws SQLException + */ + public PreparedStatement getQuerySetGroupTaskRetryValue( + Connection conn, UUID groupTaskId, int value) throws SQLException { + + String str = null; + PreparedStatement preparedStatement = null; + + str = + "UPDATE " + + TABLE_NAME + + " SET " + + COL_RETRY_ATTEMPT + + "=?" + + " WHERE " + + COL_GROUP_TASK_ID + + "=?"; + + preparedStatement = conn.prepareStatement(str); + + preparedStatement.setInt(1, value); + preparedStatement.setString(2, groupTaskId.toString()); + + return preparedStatement; + } + + /** + * @param con + * @param expirationTime + * @return the requested query as @PreparedStatement + * @throws SQLException + */ + public PreparedStatement getQueryDeleteCompletedTasks(Connection con, long expirationTime) + throws SQLException { + + PreparedStatement ps = con.prepareStatement(QUERY_DELETE_ALL_OLD_AND_COMPLETED_TASKS); + ps.setLong(1, expirationTime); + + return ps; + } + + /** + * @param con + * @param expirationTime + * @param maxNumTasks + * @return the requested query as @PreparedStatement + * @throws SQLException + */ + public PreparedStatement getQueryDeleteCompletedTasks( + Connection con, long expirationTime, int maxNumTasks) throws SQLException { + + PreparedStatement ps = con.prepareStatement(QUERY_DELETE_N_OLD_AND_COMPLETED_TASKS); + + ps.setLong(1, expirationTime); + ps.setInt(2, maxNumTasks); + + return ps; + } } diff --git a/src/main/java/it/grid/storm/rest/JettyThread.java b/src/main/java/it/grid/storm/rest/JettyThread.java index b01bc6ec..03f0d06a 100644 --- a/src/main/java/it/grid/storm/rest/JettyThread.java +++ b/src/main/java/it/grid/storm/rest/JettyThread.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest; @@ -12,21 +11,19 @@ * Thread that starts a Jetty server. The thread is passed an instance of {@link Server}, on which * start and join are called upon starting the thread. This is needed as the join method is * blocking, and would hang a thread calling it directly. - * + * * @author valerioventuri */ public class JettyThread extends Thread { private static final Logger LOG = LoggerFactory.getLogger(JettyThread.class); - /** - * The {@link Server} object. - */ + /** The {@link Server} object. */ private Server server; /** * Constructor. - * + * * @param server the server to start */ public JettyThread(Server server) { diff --git a/src/main/java/it/grid/storm/rest/RestServer.java b/src/main/java/it/grid/storm/rest/RestServer.java index ad392365..532bfe6d 100644 --- a/src/main/java/it/grid/storm/rest/RestServer.java +++ b/src/main/java/it/grid/storm/rest/RestServer.java @@ -1,32 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.rest; import static it.grid.storm.metrics.StormMetricRegistry.METRIC_REGISTRY; -import java.util.EnumSet; - -import javax.servlet.DispatcherType; - -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.FilterHolder; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; -import org.glassfish.jersey.jackson.JacksonFeature; -import org.glassfish.jersey.server.ResourceConfig; -import org.glassfish.jersey.servlet.ServletContainer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.codahale.metrics.jetty8.InstrumentedHandler; import com.codahale.metrics.servlets.MetricsServlet; import com.google.common.base.Preconditions; - import it.grid.storm.authz.remote.resource.AuthorizationResource; import it.grid.storm.authz.remote.resource.AuthorizationResourceCompat_1_0; import it.grid.storm.ea.remote.resource.StormEAResource; @@ -34,21 +16,32 @@ import it.grid.storm.info.remote.resources.SpaceStatusResource; import it.grid.storm.metrics.NamedInstrumentedSelectChannelConnector; import it.grid.storm.metrics.NamedInstrumentedThreadPool; -import it.grid.storm.namespace.remote.resource.VirtualFSResourceCompat_1_3; import it.grid.storm.namespace.remote.resource.VirtualFSResource; import it.grid.storm.namespace.remote.resource.VirtualFSResourceCompat_1_0; import it.grid.storm.namespace.remote.resource.VirtualFSResourceCompat_1_1; import it.grid.storm.namespace.remote.resource.VirtualFSResourceCompat_1_2; +import it.grid.storm.namespace.remote.resource.VirtualFSResourceCompat_1_3; import it.grid.storm.rest.auth.RestTokenFilter; import it.grid.storm.rest.metadata.Metadata; import it.grid.storm.tape.recalltable.providers.TapeRecallTOListMessageBodyWriter; import it.grid.storm.tape.recalltable.resources.TaskResource; import it.grid.storm.tape.recalltable.resources.TasksCardinality; import it.grid.storm.tape.recalltable.resources.TasksResource; +import java.util.EnumSet; +import javax.servlet.DispatcherType; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.glassfish.jersey.jackson.JacksonFeature; +import org.glassfish.jersey.server.ResourceConfig; +import org.glassfish.jersey.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides static methods for starting and stopping the storm-backend restful services. - * + * * @author zappi * @author valerioventuri */ @@ -69,8 +62,8 @@ public class RestServer { boolean running = false; - public RestServer(int restServicePort, int maxThreads, int maxQueueSize, boolean isTokenEnabled, - String token) { + public RestServer( + int restServicePort, int maxThreads, int maxQueueSize, boolean isTokenEnabled, String token) { this.restServicePort = restServicePort; this.maxThreads = maxThreads; @@ -89,11 +82,9 @@ public RestServer(int restServicePort, int maxThreads, int maxQueueSize, boolean /** * Configure the {@link Server}. Install the Jersey {@link ServletContainer} and configure it to * with resources locations. - * + * * @throws RestServiceException - * * @throws Exception - * */ private void configure() { @@ -124,7 +115,6 @@ private void configure() { servletContextHandler.setContextPath("/"); - ServletHolder metrics = new ServletHolder(new MetricsServlet(METRIC_REGISTRY.getRegistry())); servletContextHandler.addServlet(metrics, "/metrics"); @@ -134,14 +124,15 @@ private void configure() { LOG.info("Enabling security filter for rest server requests"); FilterHolder filterHolder = new FilterHolder(new RestTokenFilter()); filterHolder.setInitParameter("token", token); - servletContextHandler.addFilter(filterHolder, "/metadata/*", - EnumSet.of(DispatcherType.REQUEST)); - servletContextHandler.addFilter(filterHolder, "/recalltable/*", - EnumSet.of(DispatcherType.REQUEST)); + servletContextHandler.addFilter( + filterHolder, "/metadata/*", EnumSet.of(DispatcherType.REQUEST)); + servletContextHandler.addFilter( + filterHolder, "/recalltable/*", EnumSet.of(DispatcherType.REQUEST)); } - NamedInstrumentedSelectChannelConnector connector = new NamedInstrumentedSelectChannelConnector( - "rest-connector", restServicePort, METRIC_REGISTRY.getRegistry()); + NamedInstrumentedSelectChannelConnector connector = + new NamedInstrumentedSelectChannelConnector( + "rest-connector", restServicePort, METRIC_REGISTRY.getRegistry()); server.addConnector(connector); @@ -149,23 +140,25 @@ private void configure() { NamedInstrumentedThreadPool tp = new NamedInstrumentedThreadPool("rest", METRIC_REGISTRY.getRegistry()); - tp.setMaxThreads(maxThreads); tp.setMaxQueued(maxQueueSize); server.setThreadPool(tp); - LOG.info("RESTful services threadpool configured: maxThreads={}, maxQueueSize={}", maxThreads, + LOG.info( + "RESTful services threadpool configured: maxThreads={}, maxQueueSize={}", + maxThreads, maxQueueSize); - InstrumentedHandler ih = new InstrumentedHandler(METRIC_REGISTRY.getRegistry(), - servletContextHandler, "rest-handler"); + InstrumentedHandler ih = + new InstrumentedHandler( + METRIC_REGISTRY.getRegistry(), servletContextHandler, "rest-handler"); server.setHandler(ih); } /** * Starts the server. - * + * * @throws Exception */ public synchronized void start() throws Exception { @@ -181,12 +174,11 @@ public synchronized void start() throws Exception { LOG.info("StoRM RESTful services started."); } - } /** * Stops the server. - * + * * @throws Exception */ public synchronized void stop() throws Exception { @@ -205,19 +197,14 @@ public synchronized void stop() throws Exception { return; } - } running = false; LOG.info("StoRM RESTful services is not running"); - } - /** - * Returns if server is running. - * - */ + /** Returns if server is running. */ public boolean isRunning() { return running; diff --git a/src/main/java/it/grid/storm/rest/auth/RestTokenFilter.java b/src/main/java/it/grid/storm/rest/auth/RestTokenFilter.java index 675c0c57..ca9f6af5 100644 --- a/src/main/java/it/grid/storm/rest/auth/RestTokenFilter.java +++ b/src/main/java/it/grid/storm/rest/auth/RestTokenFilter.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.auth; import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED; import java.io.IOException; - import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; @@ -16,61 +14,57 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RestTokenFilter implements Filter { - private static final Logger log = LoggerFactory.getLogger(RestTokenFilter.class); - - public static final String TOKEN_HEADER_NAME = "token"; - public static final String TOKEN_INIT_PARAM_NAME = "token"; - - private String token; + private static final Logger log = LoggerFactory.getLogger(RestTokenFilter.class); - @Override - public void destroy() { + public static final String TOKEN_HEADER_NAME = "token"; + public static final String TOKEN_INIT_PARAM_NAME = "token"; - } + private String token; - @Override - public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) - throws IOException, ServletException { + @Override + public void destroy() {} - HttpServletRequest request = (HttpServletRequest) req; - HttpServletResponse response = (HttpServletResponse) resp; + @Override + public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) + throws IOException, ServletException { - if (isTokenValid(getToken(request))) { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) resp; - log.debug("Token verified!"); - chain.doFilter(request, response); + if (isTokenValid(getToken(request))) { - } else { + log.debug("Token verified!"); + chain.doFilter(request, response); - log.warn("Invalid token provided - request: {}", request); - response.setStatus(SC_UNAUTHORIZED); - response.getWriter().print("Invalid token provided"); - } - } + } else { - private String getToken(HttpServletRequest httpRequest) { + log.warn("Invalid token provided - request: {}", request); + response.setStatus(SC_UNAUTHORIZED); + response.getWriter().print("Invalid token provided"); + } + } - return httpRequest.getHeader(TOKEN_HEADER_NAME); - } + private String getToken(HttpServletRequest httpRequest) { - @Override - public void init(FilterConfig filterConfig) throws ServletException { + return httpRequest.getHeader(TOKEN_HEADER_NAME); + } - token = filterConfig.getInitParameter(TOKEN_INIT_PARAM_NAME); - if (token == null) { - throw new ServletException("Not found init parameter: " + TOKEN_INIT_PARAM_NAME); - } - } + @Override + public void init(FilterConfig filterConfig) throws ServletException { - private boolean isTokenValid(String token) throws ServletException { + token = filterConfig.getInitParameter(TOKEN_INIT_PARAM_NAME); + if (token == null) { + throw new ServletException("Not found init parameter: " + TOKEN_INIT_PARAM_NAME); + } + } - return this.token.equals(token); - } + private boolean isTokenValid(String token) throws ServletException { + return this.token.equals(token); + } } diff --git a/src/main/java/it/grid/storm/rest/metadata/Metadata.java b/src/main/java/it/grid/storm/rest/metadata/Metadata.java index 903ac27d..82491d83 100644 --- a/src/main/java/it/grid/storm/rest/metadata/Metadata.java +++ b/src/main/java/it/grid/storm/rest/metadata/Metadata.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata; @@ -16,17 +15,14 @@ import it.grid.storm.rest.metadata.service.ResourceNotFoundException; import it.grid.storm.rest.metadata.service.ResourceService; import it.grid.storm.rest.metadata.service.StoriMetadataService; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Path("/metadata") public class Metadata { @@ -38,8 +34,10 @@ public class Metadata { public Metadata() throws NamespaceException { NamespaceInterface namespace = NamespaceDirector.getNamespace(); - metadataService = new StoriMetadataService( - new ResourceService(namespace.getAllDefinedVFS(), namespace.getAllDefinedMappingRules())); + metadataService = + new StoriMetadataService( + new ResourceService( + namespace.getAllDefinedVFS(), namespace.getAllDefinedMappingRules())); } public Metadata(StoriMetadataService metadataService) { diff --git a/src/main/java/it/grid/storm/rest/metadata/model/FileAttributes.java b/src/main/java/it/grid/storm/rest/metadata/model/FileAttributes.java index f4b8583d..63700d4e 100644 --- a/src/main/java/it/grid/storm/rest/metadata/model/FileAttributes.java +++ b/src/main/java/it/grid/storm/rest/metadata/model/FileAttributes.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.model; @@ -22,7 +21,7 @@ public class FileAttributes { /** * Constructor with parameters. - * + * * @param pinned Is file pinned. * @param migrated Is file migrated to tape. * @param premigrated Is file only on disk and needs to be migrated to tape. @@ -32,10 +31,14 @@ public class FileAttributes { * @param tsmRecT The recall tasks queued */ @JsonCreator - public FileAttributes(@JsonProperty("pinned") Boolean pinned, - @JsonProperty("migrated") Boolean migrated, @JsonProperty("premigrated") Boolean premigrated, - @JsonProperty("checksum") String checksum, @JsonProperty("TSMRecD") Long tsmRecD, - @JsonProperty("TSMRecR") Integer tsmRecR, @JsonProperty("TSMRecT") String tsmRecT) { + public FileAttributes( + @JsonProperty("pinned") Boolean pinned, + @JsonProperty("migrated") Boolean migrated, + @JsonProperty("premigrated") Boolean premigrated, + @JsonProperty("checksum") String checksum, + @JsonProperty("TSMRecD") Long tsmRecD, + @JsonProperty("TSMRecR") Integer tsmRecR, + @JsonProperty("TSMRecT") String tsmRecT) { this.pinned = pinned; this.migrated = migrated; @@ -76,14 +79,26 @@ public String getTsmRecT() { @Override public String toString() { - return "FileAttributes [pinned=" + pinned + ", migrated=" + migrated + ", premigrated=" - + premigrated + ", checksum=" + checksum + ", tsmRecD=" + tsmRecD + ", tsmRecR=" + tsmRecR - + ", tsmRecT=" + tsmRecT + "]"; + return "FileAttributes [pinned=" + + pinned + + ", migrated=" + + migrated + + ", premigrated=" + + premigrated + + ", checksum=" + + checksum + + ", tsmRecD=" + + tsmRecD + + ", tsmRecR=" + + tsmRecR + + ", tsmRecT=" + + tsmRecT + + "]"; } - /** + /** * Constructor through the builder. - * + * * @param builder The @FileAttributes.Builder instance. */ public FileAttributes(Builder builder) { diff --git a/src/main/java/it/grid/storm/rest/metadata/model/StoriMetadata.java b/src/main/java/it/grid/storm/rest/metadata/model/StoriMetadata.java index fc1a2380..dfaea36f 100644 --- a/src/main/java/it/grid/storm/rest/metadata/model/StoriMetadata.java +++ b/src/main/java/it/grid/storm/rest/metadata/model/StoriMetadata.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.model; @@ -9,7 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.Date; import java.util.List; @@ -17,17 +15,20 @@ public class StoriMetadata { public enum ResourceType { - FILE, FOLDER + FILE, + FOLDER } public enum ResourceStatus { - ONLINE, NEARLINE + ONLINE, + NEARLINE } private String absolutePath; private VirtualFsMetadata filesystem; private ResourceType type; private ResourceStatus status; + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm a z") private Date lastModified; @@ -37,7 +38,7 @@ public enum ResourceStatus { /** * Constructor with params. - * + * * @param absolutePath The absolute path of the resource * @param type The resource type: FILE or FOLDER * @param status The status of the resource in order of latency: ONLINE or NEARLINE @@ -47,8 +48,10 @@ public enum ResourceStatus { * @param children In case of FOLDER, the list of children */ @JsonCreator - public StoriMetadata(@JsonProperty("absolutePath") String absolutePath, - @JsonProperty("type") ResourceType type, @JsonProperty("status") ResourceStatus status, + public StoriMetadata( + @JsonProperty("absolutePath") String absolutePath, + @JsonProperty("type") ResourceType type, + @JsonProperty("status") ResourceStatus status, @JsonProperty("filesystem") VirtualFsMetadata filesystem, @JsonProperty("attributes") FileAttributes attributes, @JsonProperty("lastModified") Date lastModified, @@ -64,7 +67,7 @@ public StoriMetadata(@JsonProperty("absolutePath") String absolutePath, /** * Constructor through the builder. - * + * * @param builder The @StoriMetadata.Builder instance. */ private StoriMetadata(Builder builder) { @@ -107,9 +110,21 @@ public List getChildren() { @Override public String toString() { - return "StoriMetadata [absolutePath=" + absolutePath + ", filesystem=" + filesystem + ", type=" - + type + ", status=" + status + ", lastModified=" + lastModified + ", children=" + children - + ", attributes=" + attributes + "]"; + return "StoriMetadata [absolutePath=" + + absolutePath + + ", filesystem=" + + filesystem + + ", type=" + + type + + ", status=" + + status + + ", lastModified=" + + lastModified + + ", children=" + + children + + ", attributes=" + + attributes + + "]"; } public static Builder builder() { diff --git a/src/main/java/it/grid/storm/rest/metadata/model/VirtualFsMetadata.java b/src/main/java/it/grid/storm/rest/metadata/model/VirtualFsMetadata.java index e243ea55..7c27a939 100644 --- a/src/main/java/it/grid/storm/rest/metadata/model/VirtualFsMetadata.java +++ b/src/main/java/it/grid/storm/rest/metadata/model/VirtualFsMetadata.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.model; diff --git a/src/main/java/it/grid/storm/rest/metadata/service/ResourceNotFoundException.java b/src/main/java/it/grid/storm/rest/metadata/service/ResourceNotFoundException.java index e79c3b97..9434f345 100644 --- a/src/main/java/it/grid/storm/rest/metadata/service/ResourceNotFoundException.java +++ b/src/main/java/it/grid/storm/rest/metadata/service/ResourceNotFoundException.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.service; public class ResourceNotFoundException extends Exception { - private static final long serialVersionUID = 1L; - - public ResourceNotFoundException(String message) { - super(message); - } + private static final long serialVersionUID = 1L; + public ResourceNotFoundException(String message) { + super(message); + } } diff --git a/src/main/java/it/grid/storm/rest/metadata/service/ResourceService.java b/src/main/java/it/grid/storm/rest/metadata/service/ResourceService.java index 1a0ccd1b..fe09fc35 100644 --- a/src/main/java/it/grid/storm/rest/metadata/service/ResourceService.java +++ b/src/main/java/it/grid/storm/rest/metadata/service/ResourceService.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.service; @@ -9,17 +8,15 @@ import static it.grid.storm.namespace.model.StoRIType.FOLDER; import static it.grid.storm.namespace.naming.NamespaceUtil.getWinnerRule; -import java.io.File; -import java.util.Collection; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.StoRI; import it.grid.storm.namespace.model.MappingRule; import it.grid.storm.namespace.model.StoRIType; import it.grid.storm.namespace.model.VirtualFS; +import java.io.File; +import java.util.Collection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ResourceService { @@ -28,8 +25,7 @@ public class ResourceService { protected Collection vfsList; protected Collection rulesList; - public ResourceService(Collection vfsList, - Collection rulesList) { + public ResourceService(Collection vfsList, Collection rulesList) { checkNotNull(vfsList, "Invalid null list of Virtual FS"); checkNotNull(rulesList, "Invalid null list of Mapping Rules"); diff --git a/src/main/java/it/grid/storm/rest/metadata/service/StoriMetadataService.java b/src/main/java/it/grid/storm/rest/metadata/service/StoriMetadataService.java index c0bd3ae1..ba9dd851 100644 --- a/src/main/java/it/grid/storm/rest/metadata/service/StoriMetadataService.java +++ b/src/main/java/it/grid/storm/rest/metadata/service/StoriMetadataService.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata.service; @@ -10,16 +9,7 @@ import static it.grid.storm.rest.metadata.model.StoriMetadata.ResourceType.FILE; import static it.grid.storm.rest.metadata.model.StoriMetadata.ResourceType.FOLDER; -import java.io.File; -import java.io.IOException; -import java.util.Date; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; - import it.grid.storm.ea.StormEA; import it.grid.storm.filesystem.FSException; import it.grid.storm.filesystem.LocalFile; @@ -31,6 +21,12 @@ import it.grid.storm.rest.metadata.model.StoriMetadata; import it.grid.storm.rest.metadata.model.VirtualFsMetadata; import it.grid.storm.srm.types.TDirOption; +import java.io.File; +import java.io.IOException; +import java.util.Date; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class StoriMetadataService { @@ -76,17 +72,25 @@ private StoriMetadata buildFileMetadata(StoRI stori) throws IOException, FSExcep log.debug("{} is an empty directory", stori.getLocalFile()); } } else { - attributes = FileAttributes.builder().pinned(StormEA.isPinned(canonicalPath)) - .migrated(StormEA.getMigrated(canonicalPath)) - .premigrated(StormEA.getPremigrated(canonicalPath)) - .checksum(StormEA.getChecksum(canonicalPath, ADLER32)) - .tsmRecD(StormEA.getTSMRecD(canonicalPath)).tsmRecR(StormEA.getTSMRecR(canonicalPath)) - .tsmRecT(StormEA.getTSMRecT(canonicalPath)).build(); + attributes = + FileAttributes.builder() + .pinned(StormEA.isPinned(canonicalPath)) + .migrated(StormEA.getMigrated(canonicalPath)) + .premigrated(StormEA.getPremigrated(canonicalPath)) + .checksum(StormEA.getChecksum(canonicalPath, ADLER32)) + .tsmRecD(StormEA.getTSMRecD(canonicalPath)) + .tsmRecR(StormEA.getTSMRecR(canonicalPath)) + .tsmRecT(StormEA.getTSMRecT(canonicalPath)) + .build(); } - return StoriMetadata.builder().absolutePath(stori.getAbsolutePath()) + return StoriMetadata.builder() + .absolutePath(stori.getAbsolutePath()) .lastModified(new Date((new File(canonicalPath)).lastModified())) .type(stori.getLocalFile().isDirectory() ? FOLDER : FILE) - .status(stori.getLocalFile().isOnDisk() ? ONLINE : NEARLINE).filesystem(vfsMeta) - .attributes(attributes).children(children).build(); + .status(stori.getLocalFile().isOnDisk() ? ONLINE : NEARLINE) + .filesystem(vfsMeta) + .attributes(attributes) + .children(children) + .build(); } } diff --git a/src/main/java/it/grid/storm/scheduler/Chooser.java b/src/main/java/it/grid/storm/scheduler/Chooser.java index 79ac09bc..6bb64060 100644 --- a/src/main/java/it/grid/storm/scheduler/Chooser.java +++ b/src/main/java/it/grid/storm/scheduler/Chooser.java @@ -1,32 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 - * */ - public interface Chooser { - public void choose(Streets s); + public void choose(Streets s); } diff --git a/src/main/java/it/grid/storm/scheduler/ChunkScheduler.java b/src/main/java/it/grid/storm/scheduler/ChunkScheduler.java index 10ce38a0..3e04ed10 100644 --- a/src/main/java/it/grid/storm/scheduler/ChunkScheduler.java +++ b/src/main/java/it/grid/storm/scheduler/ChunkScheduler.java @@ -1,34 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; import it.grid.storm.config.Configuration; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 - * */ public class ChunkScheduler implements Scheduler, Streets { @@ -75,7 +64,6 @@ private ChunkScheduler(Configuration configuration) { bolSchedulerStatus.setCorePoolSize(bolWorkerCorePoolSize); bolSchedulerStatus.setMaxPoolSize(bolWorkerMaxPoolSize); bolSchedulerStatus.setQueueSize(bolQueueSize); - } public static ChunkScheduler getInstance() { @@ -168,7 +156,6 @@ public void ptpStreet(Delegable chunk) { } catch (SchedulerException e) { log.error(e.getMessage(), e); } - } public void bolStreet(Delegable chunk) { @@ -190,5 +177,4 @@ public void abort(Delegable task) throws SchedulerException { public void suspend(Delegable task) throws SchedulerException { log.warn("suspend is not implemented"); } - } diff --git a/src/main/java/it/grid/storm/scheduler/ChunkTask.java b/src/main/java/it/grid/storm/scheduler/ChunkTask.java index ae6def5c..038fb68e 100644 --- a/src/main/java/it/grid/storm/scheduler/ChunkTask.java +++ b/src/main/java/it/grid/storm/scheduler/ChunkTask.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; @@ -13,241 +12,225 @@ import it.grid.storm.health.HealthDirector; import it.grid.storm.health.LogEvent; import it.grid.storm.health.OperationType; - import java.util.ArrayList; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo - * * @author Michele Dibenedetto * @version 1.1 - * */ public class ChunkTask extends Task { - private static final Logger log = LoggerFactory - .getLogger(ChunkTask.class); - - private final Delegable todo; - private final String userDN; - private final String surl; - private final String requestToken; - - private boolean successResult = false; - - public ChunkTask(Delegable todo) { - - super(todo.getName()); - this.todo = todo; - if (todo instanceof Request) { - userDN = ((Request) todo).getUserDN(); - surl = ((Request) todo).getSURL(); - if (todo instanceof PersistentRequestChunk) { - requestToken = ((PersistentRequestChunk) todo).getRequestToken(); - } else { - requestToken = "Empty"; - } - } else { - userDN = "unknonw"; - surl = "unknonw"; - requestToken = "unknonw"; - } - } - - private boolean isAsynchTask() { - - return todo instanceof PersistentRequestChunk; - } - - private boolean isChunkTask() { - - return todo instanceof RequestChunk; - } - - public void setResult(boolean result) { - - this.successResult = result; - } - - /** - * Compares this object with the specified object for order. Note that this - * method is used by priority queue. - * - * @param o - * the Object to be compared. - * @return a negative integer, zero, or a positive integer as this object is - * less than, equal to, or greater than the specified object. - * @todo Implement this java.lang.Comparable method. In this implementation - * all chunk tasks are considered equals. - * - */ - @Override - public int compareTo(Object o) { - - return 0; - } - - /** - * When an object implementing interface Runnable is used to - * create a thread, starting the thread causes the object's run - * method to be called in that separately executing thread. - */ - @Override - public void run() { - - runEvent(); - todo.doIt(); - endEvent(); - logExecution(); - } - - protected void endEvent() { - - super.endEvent(); - if (todo instanceof Request) { - successResult = ((Request) todo).isResultSuccess(); - } - if (isAsynchTask()) { - ((PersistentRequestChunk) todo).persistStatus(); - } - if (isChunkTask()) { - ((RequestChunk) todo).updateGlobalStatus(); - } - } - - /** - * Method used to book the execution of this chunk - */ - public void logExecution() { - - if (!isAsynchTask()) { - log.debug("logExecution disabled for synch chuncks"); - return; - } - - ArrayList bks = HealthDirector.getHealthMonitor() - .getBookKeepers(); - if (bks.isEmpty()) { - return; - } - LogEvent event = new LogEvent(buildOperationType(), userDN, surl, - getStartExecutionTime(), howlongInExecution(), - requestToken, successResult); - log.debug("Booking Asynch event {}", event); - for (int i = 0; i < bks.size(); i++) { - bks.get(i).addLogEvent(event); - } - } - - /** - * @return - */ - private OperationType buildOperationType() { - - if (todo instanceof PtP) { - return OperationType.PTP; - } - if (todo instanceof PtG) { - return OperationType.PTG; - } - if (todo instanceof BoL) { - return OperationType.BOL; - } - return OperationType.UNDEF; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + (isAsynchTask() ? 1231 : 1237); - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - result = prime * result + (successResult ? 1231 : 1237); - result = prime * result + ((surl == null) ? 0 : surl.hashCode()); - result = prime * result + ((todo == null) ? 0 : todo.hashCode()); - result = prime * result + ((userDN == null) ? 0 : userDN.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - ChunkTask other = (ChunkTask) obj; - if (isAsynchTask() != other.isAsynchTask()) { - return false; - } - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - if (successResult != other.successResult) { - return false; - } - if (surl == null) { - if (other.surl != null) { - return false; - } - } else if (!surl.equals(other.surl)) { - return false; - } - if (todo == null) { - if (other.todo != null) { - return false; - } - } else if (!todo.equals(other.todo)) { - return false; - } - if (userDN == null) { - if (other.userDN != null) { - return false; - } - } else if (!userDN.equals(other.userDN)) { - return false; - } - return true; - } - + private static final Logger log = LoggerFactory.getLogger(ChunkTask.class); + + private final Delegable todo; + private final String userDN; + private final String surl; + private final String requestToken; + + private boolean successResult = false; + + public ChunkTask(Delegable todo) { + + super(todo.getName()); + this.todo = todo; + if (todo instanceof Request) { + userDN = ((Request) todo).getUserDN(); + surl = ((Request) todo).getSURL(); + if (todo instanceof PersistentRequestChunk) { + requestToken = ((PersistentRequestChunk) todo).getRequestToken(); + } else { + requestToken = "Empty"; + } + } else { + userDN = "unknonw"; + surl = "unknonw"; + requestToken = "unknonw"; + } + } + + private boolean isAsynchTask() { + + return todo instanceof PersistentRequestChunk; + } + + private boolean isChunkTask() { + + return todo instanceof RequestChunk; + } + + public void setResult(boolean result) { + + this.successResult = result; + } + + /** + * Compares this object with the specified object for order. Note that this method is used by + * priority queue. + * + * @param o the Object to be compared. + * @return a negative integer, zero, or a positive integer as this object is less than, equal to, + * or greater than the specified object. + * @todo Implement this java.lang.Comparable method. In this implementation all chunk tasks are + * considered equals. + */ + @Override + public int compareTo(Object o) { + + return 0; + } + + /** + * When an object implementing interface Runnable is used to create a thread, + * starting the thread causes the object's run method to be called in that separately + * executing thread. + */ + @Override + public void run() { + + runEvent(); + todo.doIt(); + endEvent(); + logExecution(); + } + + protected void endEvent() { + + super.endEvent(); + if (todo instanceof Request) { + successResult = ((Request) todo).isResultSuccess(); + } + if (isAsynchTask()) { + ((PersistentRequestChunk) todo).persistStatus(); + } + if (isChunkTask()) { + ((RequestChunk) todo).updateGlobalStatus(); + } + } + + /** Method used to book the execution of this chunk */ + public void logExecution() { + + if (!isAsynchTask()) { + log.debug("logExecution disabled for synch chuncks"); + return; + } + + ArrayList bks = HealthDirector.getHealthMonitor().getBookKeepers(); + if (bks.isEmpty()) { + return; + } + LogEvent event = + new LogEvent( + buildOperationType(), + userDN, + surl, + getStartExecutionTime(), + howlongInExecution(), + requestToken, + successResult); + log.debug("Booking Asynch event {}", event); + for (int i = 0; i < bks.size(); i++) { + bks.get(i).addLogEvent(event); + } + } + + /** @return */ + private OperationType buildOperationType() { + + if (todo instanceof PtP) { + return OperationType.PTP; + } + if (todo instanceof PtG) { + return OperationType.PTG; + } + if (todo instanceof BoL) { + return OperationType.BOL; + } + return OperationType.UNDEF; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + (isAsynchTask() ? 1231 : 1237); + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + result = prime * result + (successResult ? 1231 : 1237); + result = prime * result + ((surl == null) ? 0 : surl.hashCode()); + result = prime * result + ((todo == null) ? 0 : todo.hashCode()); + result = prime * result + ((userDN == null) ? 0 : userDN.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ChunkTask other = (ChunkTask) obj; + if (isAsynchTask() != other.isAsynchTask()) { + return false; + } + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + if (successResult != other.successResult) { + return false; + } + if (surl == null) { + if (other.surl != null) { + return false; + } + } else if (!surl.equals(other.surl)) { + return false; + } + if (todo == null) { + if (other.todo != null) { + return false; + } + } else if (!todo.equals(other.todo)) { + return false; + } + if (userDN == null) { + if (other.userDN != null) { + return false; + } + } else if (!userDN.equals(other.userDN)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/scheduler/ChunkType.java b/src/main/java/it/grid/storm/scheduler/ChunkType.java index 0c02fcbf..8a5f2ca3 100644 --- a/src/main/java/it/grid/storm/scheduler/ChunkType.java +++ b/src/main/java/it/grid/storm/scheduler/ChunkType.java @@ -1,78 +1,63 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005-2007 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005-2007 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.1 - * */ public class ChunkType { - private String chunkTypeDesc = "undef"; - private String chunkName = "undef"; - public int chunkType = -1; + private String chunkTypeDesc = "undef"; + private String chunkName = "undef"; + public int chunkType = -1; - public static final ChunkType GENERIC = new ChunkType(0, "generic", - "Generic Chunk Type"); - public static final ChunkType PREPARE_TO_GET = new ChunkType(1, "PTG", - "srmPrepareToGet Type"); - public static final ChunkType PREPARE_TO_PUT = new ChunkType(2, "PTP", - "srmPrepareToPut Type"); - public static final ChunkType COPY = new ChunkType(3, "COPY", "srmCopy Type"); - public static final ChunkType BOL = new ChunkType(4, "BOL", - "srmBringOnLine Type"); + public static final ChunkType GENERIC = new ChunkType(0, "generic", "Generic Chunk Type"); + public static final ChunkType PREPARE_TO_GET = new ChunkType(1, "PTG", "srmPrepareToGet Type"); + public static final ChunkType PREPARE_TO_PUT = new ChunkType(2, "PTP", "srmPrepareToPut Type"); + public static final ChunkType COPY = new ChunkType(3, "COPY", "srmCopy Type"); + public static final ChunkType BOL = new ChunkType(4, "BOL", "srmBringOnLine Type"); - private ChunkType(int type, String chunkName, String description) { + private ChunkType(int type, String chunkName, String description) { - this.chunkTypeDesc = description; - this.chunkName = chunkName; - this.chunkType = type; - } + this.chunkTypeDesc = description; + this.chunkName = chunkName; + this.chunkType = type; + } - public int getIndex() { + public int getIndex() { - return this.chunkType; - } + return this.chunkType; + } - public String toString() { + public String toString() { - return chunkTypeDesc; - } + return chunkTypeDesc; + } - public int hashCode() { + public int hashCode() { - return this.chunkType; - } + return this.chunkType; + } - public boolean equals(Object obj) { - - boolean result = false; - if (obj instanceof ChunkType) { - ChunkType other = (ChunkType) obj; - if (other.chunkType == this.chunkType) { - result = true; - } - } - return result; - } + public boolean equals(Object obj) { + boolean result = false; + if (obj instanceof ChunkType) { + ChunkType other = (ChunkType) obj; + if (other.chunkType == this.chunkType) { + result = true; + } + } + return result; + } } diff --git a/src/main/java/it/grid/storm/scheduler/CruncherTask.java b/src/main/java/it/grid/storm/scheduler/CruncherTask.java index 984f2ecb..7812c653 100644 --- a/src/main/java/it/grid/storm/scheduler/CruncherTask.java +++ b/src/main/java/it/grid/storm/scheduler/CruncherTask.java @@ -1,108 +1,83 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005-2007 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005-2007 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 - * */ - public class CruncherTask extends Task { - private Delegable todo = null; + private Delegable todo = null; - public CruncherTask(Delegable todo) { + public CruncherTask(Delegable todo) { - super(); - this.todo = todo; - this.taskName = todo.getName(); - } + super(); + this.todo = todo; + this.taskName = todo.getName(); + } - /** - * Compares this object with the specified object for order. - * - * @param o - * the Object to be compared. - * @return a negative integer, zero, or a positive integer as this object is - * less than, equal to, or greater than the specified object. - * @todo Implement this java.lang.Comparable method - */ - public int compareTo(Object o) { + /** + * Compares this object with the specified object for order. + * + * @param o the Object to be compared. + * @return a negative integer, zero, or a positive integer as this object is less than, equal to, + * or greater than the specified object. + * @todo Implement this java.lang.Comparable method + */ + public int compareTo(Object o) { - /** - * @todo : make the implementation! - */ - return 0; - } + /** @todo : make the implementation! */ + return 0; + } - /** - * When an object implementing interface Runnable is used to - * create a thread, starting the thread causes the object's run - * method to be called in that separately executing thread. - * - * @todo Implement this java.lang.Runnable method - */ - public void run() { + /** + * When an object implementing interface Runnable is used to create a thread, + * starting the thread causes the object's run method to be called in that separately + * executing thread. + * + * @todo Implement this java.lang.Runnable method + */ + public void run() { - this.runEvent(); - todo.doIt(); - this.endEvent(); - } + this.runEvent(); + todo.doIt(); + this.endEvent(); + } - /** - * Two CruncherTask are equals if and only - * - * if the inner Delegable object are equals AND if the name of the Task are - * equals - * - * @param o - * Object - * @return boolean - */ - public boolean equals(Object obj) { + /** + * Two CruncherTask are equals if and only + * + *

if the inner Delegable object are equals AND if the name of the Task are equals + * + * @param o Object + * @return boolean + */ + public boolean equals(Object obj) { - if (obj == this) - return true; - if (!(obj instanceof CruncherTask)) - return false; - CruncherTask other = (CruncherTask) obj; - if (!(other.getName().equals(this.getName()))) - return false; - if (!(other.todo.equals(this.todo))) - return false; - else - return true; - } + if (obj == this) return true; + if (!(obj instanceof CruncherTask)) return false; + CruncherTask other = (CruncherTask) obj; + if (!(other.getName().equals(this.getName()))) return false; + if (!(other.todo.equals(this.todo))) return false; + else return true; + } - /** - * - * @return int - */ - public int hashCode() { + /** @return int */ + public int hashCode() { - int hash = 17; - if (this.taskName.length() != 0) - hash = 37 * hash + taskName.hashCode(); - hash = 37 * hash + this.todo.hashCode(); - return hash; - } + int hash = 17; + if (this.taskName.length() != 0) hash = 37 * hash + taskName.hashCode(); + hash = 37 * hash + this.todo.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/scheduler/CrusherScheduler.java b/src/main/java/it/grid/storm/scheduler/CrusherScheduler.java index b7faa52d..3b6972bf 100644 --- a/src/main/java/it/grid/storm/scheduler/CrusherScheduler.java +++ b/src/main/java/it/grid/storm/scheduler/CrusherScheduler.java @@ -1,30 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; import it.grid.storm.config.Configuration; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * - *

* Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ - public class CrusherScheduler implements Scheduler { private static final Logger log = LoggerFactory.getLogger(CrusherScheduler.class); @@ -90,9 +81,7 @@ public SchedulerStatus getStatus() { * @param task Delegable * @throws SchedulerException */ - public void abort(Delegable task) throws SchedulerException { - - } + public void abort(Delegable task) throws SchedulerException {} /** * @param task Delegable @@ -102,5 +91,4 @@ public void suspend(Delegable task) throws SchedulerException { throw new SchedulerException("CruscherScheduler", "Suspend request not implemented yet!"); } - } diff --git a/src/main/java/it/grid/storm/scheduler/Delegable.java b/src/main/java/it/grid/storm/scheduler/Delegable.java index ab9ef6b9..7be0a0e8 100644 --- a/src/main/java/it/grid/storm/scheduler/Delegable.java +++ b/src/main/java/it/grid/storm/scheduler/Delegable.java @@ -1,37 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ public interface Delegable { - public void doIt(); + public void doIt(); - public String getName(); - - public boolean equals(Object obj); + public String getName(); + public boolean equals(Object obj); } diff --git a/src/main/java/it/grid/storm/scheduler/PersistentRequestChunk.java b/src/main/java/it/grid/storm/scheduler/PersistentRequestChunk.java index 827ab398..5ea8007b 100644 --- a/src/main/java/it/grid/storm/scheduler/PersistentRequestChunk.java +++ b/src/main/java/it/grid/storm/scheduler/PersistentRequestChunk.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; @@ -8,8 +7,7 @@ public interface PersistentRequestChunk extends RequestChunk { - public String getRequestToken(); - - public void persistStatus(); + public String getRequestToken(); + public void persistStatus(); } diff --git a/src/main/java/it/grid/storm/scheduler/Scheduler.java b/src/main/java/it/grid/storm/scheduler/Scheduler.java index 2d7794d3..ebef318e 100644 --- a/src/main/java/it/grid/storm/scheduler/Scheduler.java +++ b/src/main/java/it/grid/storm/scheduler/Scheduler.java @@ -1,62 +1,43 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ - public interface Scheduler { - /** - * Method that accepts a Task for scheduling. - * - * @param t - * Delegable - * @throws SchedulerException - */ - public void schedule(Delegable t) throws SchedulerException; - - /** - * - * @param task - * Delegable - * @throws SchedulerException - */ - public void abort(Delegable task) throws SchedulerException; + /** + * Method that accepts a Task for scheduling. + * + * @param t Delegable + * @throws SchedulerException + */ + public void schedule(Delegable t) throws SchedulerException; - /** - * - * @param task - * Delegable - * @throws SchedulerException - */ - public void suspend(Delegable task) throws SchedulerException; + /** + * @param task Delegable + * @throws SchedulerException + */ + public void abort(Delegable task) throws SchedulerException; - /** - * @return SchedulerStatus - */ - public SchedulerStatus getStatus(); + /** + * @param task Delegable + * @throws SchedulerException + */ + public void suspend(Delegable task) throws SchedulerException; + /** @return SchedulerStatus */ + public SchedulerStatus getStatus(); } diff --git a/src/main/java/it/grid/storm/scheduler/SchedulerException.java b/src/main/java/it/grid/storm/scheduler/SchedulerException.java index 8d200fb3..d063004f 100644 --- a/src/main/java/it/grid/storm/scheduler/SchedulerException.java +++ b/src/main/java/it/grid/storm/scheduler/SchedulerException.java @@ -1,59 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ public class SchedulerException extends Exception { - private String whichScheduler; + private String whichScheduler; - public SchedulerException(String whichSched) { + public SchedulerException(String whichSched) { - super(); - whichScheduler = whichSched; - } + super(); + whichScheduler = whichSched; + } - public SchedulerException(String whichSched, String message) { + public SchedulerException(String whichSched, String message) { - super(message); - } + super(message); + } - public SchedulerException(Throwable cause) { + public SchedulerException(Throwable cause) { - super(cause); - } + super(cause); + } - public SchedulerException(String message, Throwable cause) { + public SchedulerException(String message, Throwable cause) { - super(message, cause); - } + super(message, cause); + } - public String toString() { - - return "Exception occurred within scheduler type = " + whichScheduler; - } + public String toString() { + return "Exception occurred within scheduler type = " + whichScheduler; + } } diff --git a/src/main/java/it/grid/storm/scheduler/SchedulerStatus.java b/src/main/java/it/grid/storm/scheduler/SchedulerStatus.java index efba43bf..85a53a99 100644 --- a/src/main/java/it/grid/storm/scheduler/SchedulerStatus.java +++ b/src/main/java/it/grid/storm/scheduler/SchedulerStatus.java @@ -1,172 +1,166 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ - public class SchedulerStatus { - // represent the name of scheduler - private String name = null; - - // nr. of threads that are actively executing tasks. (approximate) - private int activeCount = -1; + // represent the name of scheduler + private String name = null; - // Total number of tasks that have completed execution - private long completedTaskCount = -1; + // nr. of threads that are actively executing tasks. (approximate) + private int activeCount = -1; - // Core number of threads - private int corePoolSize = -1; + // Total number of tasks that have completed execution + private long completedTaskCount = -1; - // Largest number of threads - private int largestPoolSize = -1; + // Core number of threads + private int corePoolSize = -1; - // Maximum allowed number of threads - private int maxPoolSize = -1; + // Largest number of threads + private int largestPoolSize = -1; - // Actual pool size - private int poolSize = -1; + // Maximum allowed number of threads + private int maxPoolSize = -1; - // Actual number of Task scheduled - private long taskCount = -1; + // Actual pool size + private int poolSize = -1; - // Queue Size - private int queueSize = -1; + // Actual number of Task scheduled + private long taskCount = -1; - // Queue : remaining capacity - private int remainingCapacity = -1; + // Queue Size + private int queueSize = -1; - protected SchedulerStatus(String name) { + // Queue : remaining capacity + private int remainingCapacity = -1; - this.name = name; - } + protected SchedulerStatus(String name) { - public int getActiveCount() { + this.name = name; + } - return activeCount; - } + public int getActiveCount() { - public long getCompletedTaskCount() { + return activeCount; + } - return completedTaskCount; - } + public long getCompletedTaskCount() { - public int getCorePoolSize() { + return completedTaskCount; + } - return corePoolSize; - } + public int getCorePoolSize() { - public int getLargestPoolSize() { + return corePoolSize; + } - return largestPoolSize; - } + public int getLargestPoolSize() { - public int getMaxPoolSize() { + return largestPoolSize; + } - return maxPoolSize; - } + public int getMaxPoolSize() { - public int getPoolSize() { + return maxPoolSize; + } - return poolSize; - } + public int getPoolSize() { - public long getTaskCount() { + return poolSize; + } - return taskCount; - } + public long getTaskCount() { - public int getQueueSize() { + return taskCount; + } - return this.queueSize; - } + public int getQueueSize() { - public int getRemainingSize() { + return this.queueSize; + } - return this.remainingCapacity; - } + public int getRemainingSize() { - protected void setActiveCount(int activeCount) { + return this.remainingCapacity; + } - this.activeCount = activeCount; - } + protected void setActiveCount(int activeCount) { - protected void setCompletedTaskCount(long completedTaskCount) { + this.activeCount = activeCount; + } - this.completedTaskCount = completedTaskCount; - } + protected void setCompletedTaskCount(long completedTaskCount) { - protected void setCorePoolSize(int corePoolSize) { + this.completedTaskCount = completedTaskCount; + } - this.corePoolSize = corePoolSize; - } + protected void setCorePoolSize(int corePoolSize) { - protected void setLargestPoolSize(int largestPoolSize) { + this.corePoolSize = corePoolSize; + } - this.largestPoolSize = largestPoolSize; - } + protected void setLargestPoolSize(int largestPoolSize) { - protected void setMaxPoolSize(int maxPoolSize) { + this.largestPoolSize = largestPoolSize; + } - this.maxPoolSize = maxPoolSize; - } + protected void setMaxPoolSize(int maxPoolSize) { - protected void setPoolSize(int poolSize) { + this.maxPoolSize = maxPoolSize; + } - this.poolSize = poolSize; - } + protected void setPoolSize(int poolSize) { - protected void setTaskCount(long taskCount) { + this.poolSize = poolSize; + } - this.taskCount = taskCount; - } + protected void setTaskCount(long taskCount) { - protected void setQueueSize(int queueSize) { + this.taskCount = taskCount; + } - this.queueSize = queueSize; - } + protected void setQueueSize(int queueSize) { - protected void setRemainingCapacity(int remainingCapacity) { + this.queueSize = queueSize; + } - this.remainingCapacity = remainingCapacity; - } + protected void setRemainingCapacity(int remainingCapacity) { - @Override - public String toString() { + this.remainingCapacity = remainingCapacity; + } - StringBuilder sb = new StringBuilder(); - sb.append("WorkerPool (Sched-Name="); - sb.append(this.name); - sb.append(") = [core:" + corePoolSize + " ; largest:" + maxPoolSize - + " ; size:" + poolSize + "]" + "\n"); - sb.append("Queue (Sched-Name="); - sb.append(this.name); - sb.append(") = [size:" + queueSize + " ; remaining capacity:" - + remainingCapacity + "]"); - return sb.toString(); - } + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("WorkerPool (Sched-Name="); + sb.append(this.name); + sb.append( + ") = [core:" + + corePoolSize + + " ; largest:" + + maxPoolSize + + " ; size:" + + poolSize + + "]" + + "\n"); + sb.append("Queue (Sched-Name="); + sb.append(this.name); + sb.append(") = [size:" + queueSize + " ; remaining capacity:" + remainingCapacity + "]"); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/scheduler/Streets.java b/src/main/java/it/grid/storm/scheduler/Streets.java index 67eafee3..87de5a0f 100644 --- a/src/main/java/it/grid/storm/scheduler/Streets.java +++ b/src/main/java/it/grid/storm/scheduler/Streets.java @@ -1,37 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ - public interface Streets { - public void ptgStreet(Delegable d); + public void ptgStreet(Delegable d); - public void ptpStreet(Delegable d); + public void ptpStreet(Delegable d); - public void bolStreet(Delegable d); + public void bolStreet(Delegable d); } diff --git a/src/main/java/it/grid/storm/scheduler/Task.java b/src/main/java/it/grid/storm/scheduler/Task.java index 7092a3fb..3974578f 100644 --- a/src/main/java/it/grid/storm/scheduler/Task.java +++ b/src/main/java/it/grid/storm/scheduler/Task.java @@ -1,110 +1,99 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005 - *

- * - *

- * Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005 + * + *

Company: Project 'Grid.it' for INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.0 * @date - * */ - public abstract class Task implements Runnable, Comparable { - private static String UNDEF_TASKNAME = "undefined"; - private long creationTime = System.currentTimeMillis(); - private long enqueueTime = 0L; - private long startExecutionTime = 0L; - private long endExecutionTime = 0L; - private long abortingEventTime = 0L; - private long suspendingEventTime = 0L; - protected String taskName = null; + private static String UNDEF_TASKNAME = "undefined"; + private long creationTime = System.currentTimeMillis(); + private long enqueueTime = 0L; + private long startExecutionTime = 0L; + private long endExecutionTime = 0L; + private long abortingEventTime = 0L; + private long suspendingEventTime = 0L; + protected String taskName = null; - protected Task() { + protected Task() { - this(UNDEF_TASKNAME); - } + this(UNDEF_TASKNAME); + } - protected Task(String name) { + protected Task(String name) { - taskName = name; - if (taskName == null) { - taskName = UNDEF_TASKNAME; - } - creationTime = System.currentTimeMillis(); - } + taskName = name; + if (taskName == null) { + taskName = UNDEF_TASKNAME; + } + creationTime = System.currentTimeMillis(); + } - public long getStartExecutionTime() { + public long getStartExecutionTime() { - return this.startExecutionTime; - } + return this.startExecutionTime; + } - public long howlongBeforeUnqueue() { + public long howlongBeforeUnqueue() { - return enqueueTime - creationTime; - } + return enqueueTime - creationTime; + } - public long howlongInQueue() { + public long howlongInQueue() { - return startExecutionTime - enqueueTime; - } + return startExecutionTime - enqueueTime; + } - public long howlongInExecution() { + public long howlongInExecution() { - return endExecutionTime - startExecutionTime; - } + return endExecutionTime - startExecutionTime; + } - protected void enqueueEvent() { + protected void enqueueEvent() { - this.enqueueTime = System.currentTimeMillis(); - } + this.enqueueTime = System.currentTimeMillis(); + } - protected void abortEvent() { + protected void abortEvent() { - this.abortingEventTime = System.currentTimeMillis(); - } + this.abortingEventTime = System.currentTimeMillis(); + } - protected void suspendEvent() { + protected void suspendEvent() { - this.suspendingEventTime = System.currentTimeMillis(); - } + this.suspendingEventTime = System.currentTimeMillis(); + } - protected void runEvent() { + protected void runEvent() { - this.startExecutionTime = System.currentTimeMillis(); - } + this.startExecutionTime = System.currentTimeMillis(); + } - protected void endEvent() { + protected void endEvent() { - this.endExecutionTime = System.currentTimeMillis(); - } + this.endExecutionTime = System.currentTimeMillis(); + } - protected String getName() { + protected String getName() { - return taskName; - } + return taskName; + } - public abstract void run(); + public abstract void run(); - public abstract int compareTo(Object o); + public abstract int compareTo(Object o); - public abstract boolean equals(Object o); + public abstract boolean equals(Object o); } diff --git a/src/main/java/it/grid/storm/scheduler/WorkerPool.java b/src/main/java/it/grid/storm/scheduler/WorkerPool.java index d3ce991c..8468d635 100644 --- a/src/main/java/it/grid/storm/scheduler/WorkerPool.java +++ b/src/main/java/it/grid/storm/scheduler/WorkerPool.java @@ -1,41 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.scheduler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - *

* Title: - *

- * - *

- * Description: - *

- * - *

- * Copyright: Copyright (c) 2005-2007 - *

- * - *

- * Company: INFN-CNAF, Bologna, Italy - *

- * + * + *

Description: + * + *

Copyright: Copyright (c) 2005-2007 + * + *

Company: INFN-CNAF, Bologna, Italy + * * @author Zappi Riccardo * @version 1.2 * @date - * */ - public class WorkerPool { public static final int DEFAULT_CORE_POOL_SIZE = 10; @@ -48,7 +36,6 @@ public class WorkerPool { private BlockingQueue taskQueue; private int queueSize = 100; - private long keepAliveTime = 10000; // 10 seconds. private TimeUnit unit = TimeUnit.MILLISECONDS; @@ -65,7 +52,6 @@ public WorkerPool(int poolSize, int maxPoolSize, int queueSize) { } /** - * * @param task Task * @throws SchedulerException */ @@ -87,7 +73,6 @@ public void submit(Task task) throws SchedulerException { } /** - * * @param task Task * @throws SchedulerException */ @@ -98,7 +83,7 @@ public void remove(Task task) { log.debug("Aborting task {}", task.getName()); if (workers.remove(task)) { // Remove task named "Future task" from internal Queue. - workers.purge(); + workers.purge(); } } diff --git a/src/main/java/it/grid/storm/space/DUResult.java b/src/main/java/it/grid/storm/space/DUResult.java index 7510606a..277e3702 100644 --- a/src/main/java/it/grid/storm/space/DUResult.java +++ b/src/main/java/it/grid/storm/space/DUResult.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; @@ -15,7 +14,6 @@ public class DUResult { private final Instant end; private final ExitStatus status; private final String detail; - /** * @param size @@ -24,8 +22,13 @@ public class DUResult { * @param durationTime * @param cmdResult */ - private DUResult(long sizeInBytes, String absRootPath, Instant start, Instant end, - ExitStatus status, String detail) { + private DUResult( + long sizeInBytes, + String absRootPath, + Instant start, + Instant end, + ExitStatus status, + String detail) { this.sizeInBytes = sizeInBytes; this.absRootPath = absRootPath; @@ -35,41 +38,31 @@ private DUResult(long sizeInBytes, String absRootPath, Instant start, Instant en this.detail = detail; } - /** - * @return the size - */ + /** @return the size */ public final long getSizeInBytes() { return sizeInBytes; } - /** - * @return the absRootPath - */ + /** @return the absRootPath */ public final String getAbsRootPath() { return absRootPath; } - /** - * @return the start @Instant - */ + /** @return the start @Instant */ public final Instant getStart() { return start; } - /** - * @return the end @Instant - */ + /** @return the end @Instant */ public final Instant getEnd() { return end; } - /** - * @return the end @Instant - */ + /** @return the end @Instant */ public final long getDurationInMillis() { return Duration.between(start, end).toMillis(); @@ -80,17 +73,13 @@ public boolean isSuccess() { return ExitStatus.SUCCESS.equals(status); } - /** - * @return the exit status - */ + /** @return the exit status */ public final ExitStatus getStatus() { return status; } - /** - * @return the exit status detailed message - */ + /** @return the exit status detailed message */ public final String getDetail() { return detail; @@ -120,11 +109,18 @@ public static DUResult success(String absRootPath, Instant start, Instant end, l return get(absRootPath, start, end, sizeInBytes, ExitStatus.SUCCESS, ""); } - public static DUResult failure(String absRootPath, Instant start, Instant end, String errorMessage) { + public static DUResult failure( + String absRootPath, Instant start, Instant end, String errorMessage) { return get(absRootPath, start, end, -1, ExitStatus.FAILURE, errorMessage); } - public static DUResult get(String absRootPath, Instant start, Instant end, long sizeInBytes, ExitStatus status, String statusMessage) { + public static DUResult get( + String absRootPath, + Instant start, + Instant end, + long sizeInBytes, + ExitStatus status, + String statusMessage) { return new DUResult(sizeInBytes, absRootPath, start, end, status, statusMessage); } } diff --git a/src/main/java/it/grid/storm/space/ExitStatus.java b/src/main/java/it/grid/storm/space/ExitStatus.java index 6b4bad4a..59f4828f 100644 --- a/src/main/java/it/grid/storm/space/ExitStatus.java +++ b/src/main/java/it/grid/storm/space/ExitStatus.java @@ -1,9 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; public enum ExitStatus { - SUCCESS, IO_ERROR, INTERRUPTED, TIMEOUT, UNDEFINED, PARTIAL_SUCCESS, FAILURE; + SUCCESS, + IO_ERROR, + INTERRUPTED, + TIMEOUT, + UNDEFINED, + PARTIAL_SUCCESS, + FAILURE; } diff --git a/src/main/java/it/grid/storm/space/IllegalSRMSpaceParameter.java b/src/main/java/it/grid/storm/space/IllegalSRMSpaceParameter.java index baf53297..654c7e47 100644 --- a/src/main/java/it/grid/storm/space/IllegalSRMSpaceParameter.java +++ b/src/main/java/it/grid/storm/space/IllegalSRMSpaceParameter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; @@ -9,32 +8,34 @@ import it.grid.storm.srm.types.TSizeInBytes; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class IllegalSRMSpaceParameter extends Exception { - private boolean nullAuth = true; - private boolean nullSpaceDes = true; - private boolean nullRetentionPolicyInfo = true; + private boolean nullAuth = true; + private boolean nullSpaceDes = true; + private boolean nullRetentionPolicyInfo = true; - public IllegalSRMSpaceParameter(GridUserInterface guser, - TSizeInBytes spaceDes, TRetentionPolicyInfo retentionPolicyInfo) { + public IllegalSRMSpaceParameter( + GridUserInterface guser, TSizeInBytes spaceDes, TRetentionPolicyInfo retentionPolicyInfo) { - nullAuth = (guser == null); - nullSpaceDes = (spaceDes == null); - nullRetentionPolicyInfo = (retentionPolicyInfo == null); - } + nullAuth = (guser == null); + nullSpaceDes = (spaceDes == null); + nullRetentionPolicyInfo = (retentionPolicyInfo == null); + } - public String toString() { + public String toString() { - return "The Problem is: null-Auth= " + nullAuth + ", nullSpaceDesired= " - + nullSpaceDes + ", nullRetentionPolicyInfo= " + nullRetentionPolicyInfo; - } + return "The Problem is: null-Auth= " + + nullAuth + + ", nullSpaceDesired= " + + nullSpaceDes + + ", nullRetentionPolicyInfo= " + + nullRetentionPolicyInfo; + } } diff --git a/src/main/java/it/grid/storm/space/NullSpaceUpdaterHelper.java b/src/main/java/it/grid/storm/space/NullSpaceUpdaterHelper.java index a41bc12e..c069d6ad 100644 --- a/src/main/java/it/grid/storm/space/NullSpaceUpdaterHelper.java +++ b/src/main/java/it/grid/storm/space/NullSpaceUpdaterHelper.java @@ -1,31 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; +import it.grid.storm.namespace.model.VirtualFS; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.namespace.model.VirtualFS; - public class NullSpaceUpdaterHelper implements SpaceUpdaterHelperInterface { - private static final Logger log = LoggerFactory - .getLogger(NullSpaceUpdaterHelper.class); - - @Override - public boolean increaseUsedSpace(VirtualFS vfs, long size) { + private static final Logger log = LoggerFactory.getLogger(NullSpaceUpdaterHelper.class); - log.debug("NullSpaceUpdaterHelper doesn't increase used size!"); - return true; - } + @Override + public boolean increaseUsedSpace(VirtualFS vfs, long size) { - @Override - public boolean decreaseUsedSpace(VirtualFS vfs, long size) { + log.debug("NullSpaceUpdaterHelper doesn't increase used size!"); + return true; + } - log.debug("NullSpaceUpdaterHelper doesn't decrease used size!"); - return true; - } + @Override + public boolean decreaseUsedSpace(VirtualFS vfs, long size) { + log.debug("NullSpaceUpdaterHelper doesn't decrease used size!"); + return true; + } } diff --git a/src/main/java/it/grid/storm/space/SimpleSpaceUpdaterHelper.java b/src/main/java/it/grid/storm/space/SimpleSpaceUpdaterHelper.java index 096be813..3b339841 100644 --- a/src/main/java/it/grid/storm/space/SimpleSpaceUpdaterHelper.java +++ b/src/main/java/it/grid/storm/space/SimpleSpaceUpdaterHelper.java @@ -1,83 +1,75 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.namespace.model.VirtualFS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SimpleSpaceUpdaterHelper implements SpaceUpdaterHelperInterface { - private static final Logger log = LoggerFactory - .getLogger(SimpleSpaceUpdaterHelper.class); - - private ReservedSpaceCatalog rsc; - - public SimpleSpaceUpdaterHelper() { - rsc = new ReservedSpaceCatalog(); - } - - private StorageSpaceData getStorageSpaceDataForVFS(VirtualFS vfs) { - - return rsc.getStorageSpaceByAlias(vfs.getSpaceTokenDescription()); - } - - @Override - public boolean increaseUsedSpace(VirtualFS vfs, long size) { - - log.debug("Increase {} used space: {} bytes ", vfs.getAliasName(), size); - - if (size < 0) { - log.error("Size to add is a negative value: {}", size); - return false; - } - if (size == 0) { - log.debug("Size is zero, vfs {} used space won't be increased!", - vfs.getAliasName()); - return true; - } - - log.debug("Get StorageSpaceData from vfs ..."); - StorageSpaceData ssd = getStorageSpaceDataForVFS(vfs); - - if (ssd == null) { - log.error("Unable to get StorageSpaceData from alias name {}", - vfs.getAliasName()); - return false; - } - - return rsc.increaseUsedSpace(ssd.getSpaceToken().getValue(), size); - } - - @Override - public boolean decreaseUsedSpace(VirtualFS vfs, long size) { - - log.debug("Decrease {} used space: {} bytes ", vfs.getAliasName(), size); - - if (size < 0) { - log.error("Size to remove is a negative value: {}", size); - return false; - } - if (size == 0) { - log.debug("Size is zero, vfs {} used space won't be decreased!", - vfs.getAliasName()); - return true; - } - - log.debug("Get StorageSpaceData from vfs ..."); - StorageSpaceData ssd = getStorageSpaceDataForVFS(vfs); - - if (ssd == null) { - log.error("Unable to get StorageSpaceData from alias name {}", - vfs.getAliasName()); - return false; - } - - return rsc.decreaseUsedSpace(ssd.getSpaceToken().getValue(), size); - } + private static final Logger log = LoggerFactory.getLogger(SimpleSpaceUpdaterHelper.class); + + private ReservedSpaceCatalog rsc; + + public SimpleSpaceUpdaterHelper() { + rsc = new ReservedSpaceCatalog(); + } + + private StorageSpaceData getStorageSpaceDataForVFS(VirtualFS vfs) { + + return rsc.getStorageSpaceByAlias(vfs.getSpaceTokenDescription()); + } + + @Override + public boolean increaseUsedSpace(VirtualFS vfs, long size) { + + log.debug("Increase {} used space: {} bytes ", vfs.getAliasName(), size); + + if (size < 0) { + log.error("Size to add is a negative value: {}", size); + return false; + } + if (size == 0) { + log.debug("Size is zero, vfs {} used space won't be increased!", vfs.getAliasName()); + return true; + } + + log.debug("Get StorageSpaceData from vfs ..."); + StorageSpaceData ssd = getStorageSpaceDataForVFS(vfs); + + if (ssd == null) { + log.error("Unable to get StorageSpaceData from alias name {}", vfs.getAliasName()); + return false; + } + + return rsc.increaseUsedSpace(ssd.getSpaceToken().getValue(), size); + } + + @Override + public boolean decreaseUsedSpace(VirtualFS vfs, long size) { + + log.debug("Decrease {} used space: {} bytes ", vfs.getAliasName(), size); + + if (size < 0) { + log.error("Size to remove is a negative value: {}", size); + return false; + } + if (size == 0) { + log.debug("Size is zero, vfs {} used space won't be decreased!", vfs.getAliasName()); + return true; + } + + log.debug("Get StorageSpaceData from vfs ..."); + StorageSpaceData ssd = getStorageSpaceDataForVFS(vfs); + + if (ssd == null) { + log.error("Unable to get StorageSpaceData from alias name {}", vfs.getAliasName()); + return false; + } + return rsc.decreaseUsedSpace(ssd.getSpaceToken().getValue(), size); + } } diff --git a/src/main/java/it/grid/storm/space/SpaceHelper.java b/src/main/java/it/grid/storm/space/SpaceHelper.java index ddbc40d6..9b80b8d1 100644 --- a/src/main/java/it/grid/storm/space/SpaceHelper.java +++ b/src/main/java/it/grid/storm/space/SpaceHelper.java @@ -1,15 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; -import java.util.Iterator; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.InvalidRetrievedDataException; import it.grid.storm.catalogs.InvalidSpaceDataAttributesException; import it.grid.storm.catalogs.MultipleDataEntriesException; @@ -31,368 +24,365 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.srm.types.TSpaceType; +import java.util.Iterator; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SpaceHelper { - private static final int ADD_FREE_SPACE = 0; - private static final int REMOVE_FREE_SPACE = 1; - private Configuration config; - private static final Logger log = LoggerFactory.getLogger(SpaceHelper.class); - public static GridUserInterface storageAreaOwner = GridUserManager - .makeSAGridUser(); - - public SpaceHelper() { - - config = Configuration.getInstance(); - } - - public boolean isSAFull(Logger log, StoRI stori) { - - log.debug("Checking if the Storage Area is full"); - - VirtualFS fs = stori.getVirtualFileSystem(); - ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); - - // Get StorageSpaceData from the database - String ssDesc = fs.getSpaceTokenDescription(); - StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); - - if ((spaceData != null) && (spaceData.getAvailableSpaceSize().value() == 0)) { - log.debug("AvailableSize={}" , spaceData.getAvailableSpaceSize().value()); - return true; - } else { - return false; - } - - } - - public long getSAFreeSpace(Logger log, StoRI stori) { - - log.debug("Checking if the Storage Area is full"); - - VirtualFS fs = stori.getVirtualFileSystem(); - ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); - - // Get StorageSpaceData from the database - String ssDesc = fs.getSpaceTokenDescription(); - StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); - - if (spaceData != null) { - return spaceData.getAvailableSpaceSize().value(); - } else { - return -1; - } - - } - - /** - * Verifies if the storage area to which the provided stori belongs has been - * initialized The verification is made on used space field - * - * @param log - * @param stori - * @return - */ - public boolean isSAInitialized(Logger log, StoRI stori) { - - log.debug("Checking if the Storage Area is initialized"); - if (stori == null) { - throw new IllegalArgumentException( - "Unable to perform the SA initialization check, provided null parameters: log : " - + log + " , stori : " + stori); - } - boolean response = false; - VirtualFS fs = stori.getVirtualFileSystem(); - ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); - // Get StorageSpaceData from the database - String ssDesc = fs.getSpaceTokenDescription(); - - StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); - - if (spaceData != null && spaceData.getUsedSpaceSize() != null - && !spaceData.getUsedSpaceSize().isEmpty() - && spaceData.getUsedSpaceSize().value() >= 0) { - - response = true; - } - log.debug("The storage area is initialized with token alias {} is {} initialized" - , spaceData.getSpaceTokenAlias() , (response ? "" : "not")); - return response; - } - - /** - * - * @param log - * @param stori - * @return - */ - public TSpaceToken getTokenFromStoRI(Logger log, StoRI stori) { - - log.debug("SpaceHelper: getting space token from StoRI"); - VirtualFS fs = stori.getVirtualFileSystem(); - return fs.getSpaceToken(); - - } - - /** - * Returns the spaceTokens associated to the 'user' AND 'spaceAlias'. If - * 'spaceAlias' is NULL or an empty string then this method returns all the - * space tokens this 'user' owns. - * - * @param user - * VomsGridUser user. - * @param spaceAlias - * User space token description. - */ - private Boolean isDefaultSpaceToken(TSpaceToken token) { - - Boolean found = false; - - config = Configuration.getInstance(); - List tokens = config.getListOfDefaultSpaceToken(); - for (int i = 0; i < tokens.size(); i++) { - if ((tokens.get(i)).toLowerCase().equals(token.getValue().toLowerCase())) { - found = true; - } - } - - return found; - } - - /** - * This method is used by the namespace parser component to insert a new Space - * Token Description data into the space catalog. In this way a standard Space - * Token is created, making it work for the GetSpaceMetaData request an - * SrmPreparateToPut with SpaceToken. - * - * The following code check if a SA_token with the same space description is - * already present into the catalog, if no data are found the new data are - * inserted, if yes the new data and the data already present are compared, - * and if needed an update operation is performed. - * - * The mandatory parameters are: - * - * @param spaceTokenAlias - * the space token description the user have to specify into the - * namespace.xml file - * @param totalOnLineSize - * the size the user have to specify into the namespace.xml file - * @param date - * @param spaceFileName - * the space file name will be used to get the free size. It is the - * StFNRoot. - */ - - public TSpaceToken createVOSA_Token(String spaceTokenAlias, - TSizeInBytes totalOnLineSize, String spaceFileName) { - - // TODO errors are not managed in this function - TSpaceToken spaceToken = null; - ArrayOfTSpaceToken tokenArray; - ReservedSpaceCatalog spaceCatalog = new ReservedSpaceCatalog(); - - // Try with fake user, if it does not work remove it and use different - // method - - // First, check if the same VOSpaceArea already exists - tokenArray = spaceCatalog.getSpaceTokensByAlias(spaceTokenAlias); - - if (tokenArray == null || tokenArray.size() == 0) { - // the VOSpaceArea does not exist yet - SpaceHelper.log.debug("VoSpaceArea {} still does not exists. Start creation process." , spaceTokenAlias); - - PFN sfname = null; - try { - sfname = PFN.make(spaceFileName); - } catch (InvalidPFNAttributeException e1) { - log.error("Error building PFN with {} : " , spaceFileName , e1); - } - - StorageSpaceData ssd = null; - - try { - ssd = new StorageSpaceData(storageAreaOwner, TSpaceType.VOSPACE, - spaceTokenAlias, totalOnLineSize, totalOnLineSize, - TLifeTimeInSeconds.makeInfinite(), null, null, sfname); - // ssd.setReservedSpaceSize(totalOnLineSize); - try { - ssd.setUnavailableSpaceSize(TSizeInBytes.make(0, SizeUnit.BYTES)); - ssd.setReservedSpaceSize(TSizeInBytes.make(0, SizeUnit.BYTES)); - - } catch (InvalidTSizeAttributesException e) { - // never thrown - log.error("Unexpected InvalidTSizeAttributesException: {}" - , e.getMessage(),e); - } - spaceToken = ssd.getSpaceToken(); - } catch (InvalidSpaceDataAttributesException e) { - log.error("Error building StorageSpaceData: " , e); - } - - try { - spaceCatalog.addStorageSpace(ssd); - } catch (DataAccessException e) { - log.error("Error storing StorageSpaceData on the DB: " , e); - } - // Track into global set to remove obsolete SA_token - ReservedSpaceCatalog.addSpaceToken(spaceToken); - - } else { - /* - * the VOspaceArea already exists. Compare new data and data already - * present to check if the parameter has changed or not, and then perform - * update operation into catalog if it is needed. Only static information - * changes determine an update of the exeisting row - */ - SpaceHelper.log.debug("VOSpaceArea for space token description " - + spaceTokenAlias + " already present into DB."); - - boolean equal = false; - spaceToken = tokenArray.getTSpaceToken(0); - StorageSpaceData catalog_ssd = null; - try { - catalog_ssd = spaceCatalog.getStorageSpace(spaceToken); - } catch (TransferObjectDecodingException e) { - log - .error("Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: {}" - , e.getMessage(),e); - } catch (DataAccessException e) { - log.error("Unable to build get StorageSpaceTO. DataAccessException: {}" - , e.getMessage(),e); - } - - if (catalog_ssd != null) { - - if (catalog_ssd.getOwner().getDn().equals(storageAreaOwner.getDn()) - && (catalog_ssd.getSpaceTokenAlias().equals(spaceTokenAlias)) - && (catalog_ssd.getTotalSpaceSize().value() == totalOnLineSize - .value()) - && (catalog_ssd.getSpaceFileName().toString().equals(spaceFileName))) { - equal = true; - } - - } - - // false otherwise - if (equal) { - // Do nothing if equals, everything are already present into - // the DB - SpaceHelper.log.debug("VOSpaceArea for space token description {} is already up to date." - , spaceTokenAlias); - ReservedSpaceCatalog.addSpaceToken(spaceToken); - - } else { - // If the new data has been modified, update the data into the - // catalog - SpaceHelper.log.debug("VOSpaceArea for space token description {} is different in some parameters. Updating the catalog." - , spaceTokenAlias); - try { - catalog_ssd.setOwner(storageAreaOwner); - catalog_ssd.setTotalSpaceSize(totalOnLineSize); - catalog_ssd.setTotalGuaranteedSize(totalOnLineSize); - - PFN sfn = null; - try { - sfn = PFN.make(spaceFileName); - } catch (InvalidPFNAttributeException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - catalog_ssd.setSpaceFileName(sfn); - - spaceCatalog.updateAllStorageSpace(catalog_ssd); - ReservedSpaceCatalog.addSpaceToken(spaceToken); - - } catch (NoDataFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InvalidRetrievedDataException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MultipleDataEntriesException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - // Warning. CHeck if there are multiple token with same alisa, this - // is not allowed - if (tokenArray.size() > 1) { - SpaceHelper.log - .error("Error: multiple Space Token found for the same space Alias: {}. Only one has been evaluated!" - , spaceTokenAlias); - } - - } - return spaceToken; - - } - - /** - * This method should be use at the end of the namespace insert process - * (through the createVO_SA_token(...)) to remmove from the database the old - * VO_SA_token inserted from the previous namsespace.xml configuration - * - */ - public void purgeOldVOSA_token() { - - purgeOldVOSA_token(SpaceHelper.log); - } - - public void purgeOldVOSA_token(Logger log) { - - ReservedSpaceCatalog spacec = new ReservedSpaceCatalog(); - log.debug("VO SA: garbage collecting obsolete VOSA_token"); - - Iterator iter = ReservedSpaceCatalog.getTokenSet().iterator(); - while (iter.hasNext()) { - log.debug("VO SA token REGISTRED: {}" , iter.next().getValue()); - } - - GridUserInterface stormServiceUser = GridUserManager.makeSAGridUser(); - - // Remove obsolete space - ArrayOfTSpaceToken token_a = spacec.getSpaceTokens(stormServiceUser, null); - for (int i = 0; i < token_a.size(); i++) { - log.debug("VO SA token IN CATALOG: {}" , token_a.getTSpaceToken(i).getValue()); - } - - if ((token_a != null) && (token_a.size() > 0)) { - for (int i = 0; i < token_a.size(); i++) { - - if (!ReservedSpaceCatalog.getTokenSet().contains( - token_a.getTSpaceToken(i))) { - // This VOSA_token is no more used, removing it from persistence - TSpaceToken tokenToRemove = token_a.getTSpaceToken(i); - log.debug("VO SA token {} is no more used, removing it from persistence." , tokenToRemove); - spacec.release(stormServiceUser, tokenToRemove); - } - } - } else { - log - .warn("Space Catalog garbage SA_Token: no SA TOKENs specified. Please check your namespace.xml file."); - } - - ReservedSpaceCatalog.clearTokenSet(); - - } - - /** - * @param spaceData - * @return - */ - public static boolean isStorageArea(StorageSpaceData spaceData) - throws IllegalArgumentException { - - if (spaceData == null) { - log.error("Received null spaceData parameter"); - throw new IllegalArgumentException("Received null spaceData parameter"); - } - boolean result = false; - if (spaceData.getOwner() != null) { - result = spaceData.getOwner().equals(SpaceHelper.storageAreaOwner); - } - return result; - } + private static final int ADD_FREE_SPACE = 0; + private static final int REMOVE_FREE_SPACE = 1; + private Configuration config; + private static final Logger log = LoggerFactory.getLogger(SpaceHelper.class); + public static GridUserInterface storageAreaOwner = GridUserManager.makeSAGridUser(); + + public SpaceHelper() { + + config = Configuration.getInstance(); + } + + public boolean isSAFull(Logger log, StoRI stori) { + + log.debug("Checking if the Storage Area is full"); + + VirtualFS fs = stori.getVirtualFileSystem(); + ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); + + // Get StorageSpaceData from the database + String ssDesc = fs.getSpaceTokenDescription(); + StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); + + if ((spaceData != null) && (spaceData.getAvailableSpaceSize().value() == 0)) { + log.debug("AvailableSize={}", spaceData.getAvailableSpaceSize().value()); + return true; + } else { + return false; + } + } + + public long getSAFreeSpace(Logger log, StoRI stori) { + + log.debug("Checking if the Storage Area is full"); + + VirtualFS fs = stori.getVirtualFileSystem(); + ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); + + // Get StorageSpaceData from the database + String ssDesc = fs.getSpaceTokenDescription(); + StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); + + if (spaceData != null) { + return spaceData.getAvailableSpaceSize().value(); + } else { + return -1; + } + } + + /** + * Verifies if the storage area to which the provided stori belongs has been initialized The + * verification is made on used space field + * + * @param log + * @param stori + * @return + */ + public boolean isSAInitialized(Logger log, StoRI stori) { + + log.debug("Checking if the Storage Area is initialized"); + if (stori == null) { + throw new IllegalArgumentException( + "Unable to perform the SA initialization check, provided null parameters: log : " + + log + + " , stori : " + + stori); + } + boolean response = false; + VirtualFS fs = stori.getVirtualFileSystem(); + ReservedSpaceCatalog catalog = new ReservedSpaceCatalog(); + // Get StorageSpaceData from the database + String ssDesc = fs.getSpaceTokenDescription(); + + StorageSpaceData spaceData = catalog.getStorageSpaceByAlias(ssDesc); + + if (spaceData != null + && spaceData.getUsedSpaceSize() != null + && !spaceData.getUsedSpaceSize().isEmpty() + && spaceData.getUsedSpaceSize().value() >= 0) { + + response = true; + } + log.debug( + "The storage area is initialized with token alias {} is {} initialized", + spaceData.getSpaceTokenAlias(), + (response ? "" : "not")); + return response; + } + + /** + * @param log + * @param stori + * @return + */ + public TSpaceToken getTokenFromStoRI(Logger log, StoRI stori) { + + log.debug("SpaceHelper: getting space token from StoRI"); + VirtualFS fs = stori.getVirtualFileSystem(); + return fs.getSpaceToken(); + } + + /** + * Returns the spaceTokens associated to the 'user' AND 'spaceAlias'. If 'spaceAlias' is NULL or + * an empty string then this method returns all the space tokens this 'user' owns. + * + * @param user VomsGridUser user. + * @param spaceAlias User space token description. + */ + private Boolean isDefaultSpaceToken(TSpaceToken token) { + + Boolean found = false; + + config = Configuration.getInstance(); + List tokens = config.getListOfDefaultSpaceToken(); + for (int i = 0; i < tokens.size(); i++) { + if ((tokens.get(i)).toLowerCase().equals(token.getValue().toLowerCase())) { + found = true; + } + } + + return found; + } + + /** + * This method is used by the namespace parser component to insert a new Space Token Description + * data into the space catalog. In this way a standard Space Token is created, making it work for + * the GetSpaceMetaData request an SrmPreparateToPut with SpaceToken. + * + *

The following code check if a SA_token with the same space description is already present + * into the catalog, if no data are found the new data are inserted, if yes the new data and the + * data already present are compared, and if needed an update operation is performed. + * + *

The mandatory parameters are: + * + * @param spaceTokenAlias the space token description the user have to specify into the + * namespace.xml file + * @param totalOnLineSize the size the user have to specify into the namespace.xml file + * @param date + * @param spaceFileName the space file name will be used to get the free size. It is the StFNRoot. + */ + public TSpaceToken createVOSA_Token( + String spaceTokenAlias, TSizeInBytes totalOnLineSize, String spaceFileName) { + + // TODO errors are not managed in this function + TSpaceToken spaceToken = null; + ArrayOfTSpaceToken tokenArray; + ReservedSpaceCatalog spaceCatalog = new ReservedSpaceCatalog(); + + // Try with fake user, if it does not work remove it and use different + // method + + // First, check if the same VOSpaceArea already exists + tokenArray = spaceCatalog.getSpaceTokensByAlias(spaceTokenAlias); + + if (tokenArray == null || tokenArray.size() == 0) { + // the VOSpaceArea does not exist yet + SpaceHelper.log.debug( + "VoSpaceArea {} still does not exists. Start creation process.", spaceTokenAlias); + + PFN sfname = null; + try { + sfname = PFN.make(spaceFileName); + } catch (InvalidPFNAttributeException e1) { + log.error("Error building PFN with {} : ", spaceFileName, e1); + } + + StorageSpaceData ssd = null; + + try { + ssd = + new StorageSpaceData( + storageAreaOwner, + TSpaceType.VOSPACE, + spaceTokenAlias, + totalOnLineSize, + totalOnLineSize, + TLifeTimeInSeconds.makeInfinite(), + null, + null, + sfname); + // ssd.setReservedSpaceSize(totalOnLineSize); + try { + ssd.setUnavailableSpaceSize(TSizeInBytes.make(0, SizeUnit.BYTES)); + ssd.setReservedSpaceSize(TSizeInBytes.make(0, SizeUnit.BYTES)); + + } catch (InvalidTSizeAttributesException e) { + // never thrown + log.error("Unexpected InvalidTSizeAttributesException: {}", e.getMessage(), e); + } + spaceToken = ssd.getSpaceToken(); + } catch (InvalidSpaceDataAttributesException e) { + log.error("Error building StorageSpaceData: ", e); + } + + try { + spaceCatalog.addStorageSpace(ssd); + } catch (DataAccessException e) { + log.error("Error storing StorageSpaceData on the DB: ", e); + } + // Track into global set to remove obsolete SA_token + ReservedSpaceCatalog.addSpaceToken(spaceToken); + + } else { + /* + * the VOspaceArea already exists. Compare new data and data already + * present to check if the parameter has changed or not, and then perform + * update operation into catalog if it is needed. Only static information + * changes determine an update of the exeisting row + */ + SpaceHelper.log.debug( + "VOSpaceArea for space token description " + + spaceTokenAlias + + " already present into DB."); + + boolean equal = false; + spaceToken = tokenArray.getTSpaceToken(0); + StorageSpaceData catalog_ssd = null; + try { + catalog_ssd = spaceCatalog.getStorageSpace(spaceToken); + } catch (TransferObjectDecodingException e) { + log.error( + "Unable to build StorageSpaceData from StorageSpaceTO. TransferObjectDecodingException: {}", + e.getMessage(), + e); + } catch (DataAccessException e) { + log.error("Unable to build get StorageSpaceTO. DataAccessException: {}", e.getMessage(), e); + } + + if (catalog_ssd != null) { + + if (catalog_ssd.getOwner().getDn().equals(storageAreaOwner.getDn()) + && (catalog_ssd.getSpaceTokenAlias().equals(spaceTokenAlias)) + && (catalog_ssd.getTotalSpaceSize().value() == totalOnLineSize.value()) + && (catalog_ssd.getSpaceFileName().toString().equals(spaceFileName))) { + equal = true; + } + } + + // false otherwise + if (equal) { + // Do nothing if equals, everything are already present into + // the DB + SpaceHelper.log.debug( + "VOSpaceArea for space token description {} is already up to date.", spaceTokenAlias); + ReservedSpaceCatalog.addSpaceToken(spaceToken); + + } else { + // If the new data has been modified, update the data into the + // catalog + SpaceHelper.log.debug( + "VOSpaceArea for space token description {} is different in some parameters. Updating the catalog.", + spaceTokenAlias); + try { + catalog_ssd.setOwner(storageAreaOwner); + catalog_ssd.setTotalSpaceSize(totalOnLineSize); + catalog_ssd.setTotalGuaranteedSize(totalOnLineSize); + + PFN sfn = null; + try { + sfn = PFN.make(spaceFileName); + } catch (InvalidPFNAttributeException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + catalog_ssd.setSpaceFileName(sfn); + + spaceCatalog.updateAllStorageSpace(catalog_ssd); + ReservedSpaceCatalog.addSpaceToken(spaceToken); + + } catch (NoDataFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (InvalidRetrievedDataException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (MultipleDataEntriesException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + // Warning. CHeck if there are multiple token with same alisa, this + // is not allowed + if (tokenArray.size() > 1) { + SpaceHelper.log.error( + "Error: multiple Space Token found for the same space Alias: {}. Only one has been evaluated!", + spaceTokenAlias); + } + } + return spaceToken; + } + + /** + * This method should be use at the end of the namespace insert process (through the + * createVO_SA_token(...)) to remmove from the database the old VO_SA_token inserted from the + * previous namsespace.xml configuration + */ + public void purgeOldVOSA_token() { + + purgeOldVOSA_token(SpaceHelper.log); + } + + public void purgeOldVOSA_token(Logger log) { + + ReservedSpaceCatalog spacec = new ReservedSpaceCatalog(); + log.debug("VO SA: garbage collecting obsolete VOSA_token"); + + Iterator iter = ReservedSpaceCatalog.getTokenSet().iterator(); + while (iter.hasNext()) { + log.debug("VO SA token REGISTRED: {}", iter.next().getValue()); + } + + GridUserInterface stormServiceUser = GridUserManager.makeSAGridUser(); + + // Remove obsolete space + ArrayOfTSpaceToken token_a = spacec.getSpaceTokens(stormServiceUser, null); + for (int i = 0; i < token_a.size(); i++) { + log.debug("VO SA token IN CATALOG: {}", token_a.getTSpaceToken(i).getValue()); + } + + if ((token_a != null) && (token_a.size() > 0)) { + for (int i = 0; i < token_a.size(); i++) { + + if (!ReservedSpaceCatalog.getTokenSet().contains(token_a.getTSpaceToken(i))) { + // This VOSA_token is no more used, removing it from persistence + TSpaceToken tokenToRemove = token_a.getTSpaceToken(i); + log.debug( + "VO SA token {} is no more used, removing it from persistence.", tokenToRemove); + spacec.release(stormServiceUser, tokenToRemove); + } + } + } else { + log.warn( + "Space Catalog garbage SA_Token: no SA TOKENs specified. Please check your namespace.xml file."); + } + + ReservedSpaceCatalog.clearTokenSet(); + } + + /** + * @param spaceData + * @return + */ + public static boolean isStorageArea(StorageSpaceData spaceData) throws IllegalArgumentException { + + if (spaceData == null) { + log.error("Received null spaceData parameter"); + throw new IllegalArgumentException("Received null spaceData parameter"); + } + boolean result = false; + if (spaceData.getOwner() != null) { + result = spaceData.getOwner().equals(SpaceHelper.storageAreaOwner); + } + return result; + } } diff --git a/src/main/java/it/grid/storm/space/SpaceUpdaterHelperFactory.java b/src/main/java/it/grid/storm/space/SpaceUpdaterHelperFactory.java index 6a6233ad..4844f945 100644 --- a/src/main/java/it/grid/storm/space/SpaceUpdaterHelperFactory.java +++ b/src/main/java/it/grid/storm/space/SpaceUpdaterHelperFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; @@ -8,34 +7,32 @@ import it.grid.storm.namespace.model.Quota; import it.grid.storm.namespace.model.VirtualFS; - public class SpaceUpdaterHelperFactory { - public static SpaceUpdaterHelperInterface getSpaceUpdaterHelper(VirtualFS vfs) { - - if (vfs == null) { - throw new IllegalArgumentException("VirtualFS null!"); - } - - String fsType = vfs.getFSType(); - Capability cap = null; - Quota quota = null; - - if (fsType != null) { - if (fsType.trim().toLowerCase().equals("gpfs")) { - cap = vfs.getCapabilities(); - if (cap != null) { - quota = cap.getQuota(); - } - if (quota != null) { - if ((quota.getDefined()) && (quota.getEnabled())) { - return new NullSpaceUpdaterHelper(); - } - } - } - } - - return new SimpleSpaceUpdaterHelper(); - } - + public static SpaceUpdaterHelperInterface getSpaceUpdaterHelper(VirtualFS vfs) { + + if (vfs == null) { + throw new IllegalArgumentException("VirtualFS null!"); + } + + String fsType = vfs.getFSType(); + Capability cap = null; + Quota quota = null; + + if (fsType != null) { + if (fsType.trim().toLowerCase().equals("gpfs")) { + cap = vfs.getCapabilities(); + if (cap != null) { + quota = cap.getQuota(); + } + if (quota != null) { + if ((quota.getDefined()) && (quota.getEnabled())) { + return new NullSpaceUpdaterHelper(); + } + } + } + } + + return new SimpleSpaceUpdaterHelper(); + } } diff --git a/src/main/java/it/grid/storm/space/SpaceUpdaterHelperInterface.java b/src/main/java/it/grid/storm/space/SpaceUpdaterHelperInterface.java index 0fd3c3bd..789866ba 100644 --- a/src/main/java/it/grid/storm/space/SpaceUpdaterHelperInterface.java +++ b/src/main/java/it/grid/storm/space/SpaceUpdaterHelperInterface.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; @@ -8,8 +7,7 @@ public interface SpaceUpdaterHelperInterface { - public boolean increaseUsedSpace(VirtualFS vfs, long size); - - public boolean decreaseUsedSpace(VirtualFS vfs, long size); + public boolean increaseUsedSpace(VirtualFS vfs, long size); + public boolean decreaseUsedSpace(VirtualFS vfs, long size); } diff --git a/src/main/java/it/grid/storm/space/StorageSpaceData.java b/src/main/java/it/grid/storm/space/StorageSpaceData.java index aba06f6b..e5c144a4 100644 --- a/src/main/java/it/grid/storm/space/StorageSpaceData.java +++ b/src/main/java/it/grid/storm/space/StorageSpaceData.java @@ -1,25 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. and must be storef into persistence. - * + * This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. and must be storef into + * persistence. + * * @author Magnoni Luca / Riccardo Zappi * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.space; -import java.util.Date; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.InvalidSpaceDataAttributesException; import it.grid.storm.common.types.InvalidPFNAttributeException; import it.grid.storm.common.types.PFN; @@ -38,6 +31,9 @@ import it.grid.storm.srm.types.TSpaceType; import it.grid.storm.srm.types.TStorageSystemInfo; import it.grid.storm.srm.types.TUserID; +import java.util.Date; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class StorageSpaceData { @@ -55,7 +51,7 @@ public class StorageSpaceData { private TSizeInBytes availableSpaceSize = null; // available size private boolean availableSpaceSizeForced = false; // available size = total - - // busy + // busy private TSizeInBytes usedSpaceSize = null; // used size // For now do not consider the reserved space, a better management is needed // private TSizeInBytes freeSpaceSize = null; // free size = total - used - @@ -65,19 +61,17 @@ public class StorageSpaceData { private TSizeInBytes unavailableSpaceSize = null; private TSizeInBytes reservedSpaceSize = null; // reserved size private TSizeInBytes busySpaceSize = null; // busy size = used + reserved + - // unavailable + // unavailable private boolean busySpaceSizeForced = false; private static final Logger log = LoggerFactory.getLogger(StorageSpaceData.class); - public StorageSpaceData() { - - } + public StorageSpaceData() {} /** * Used to create a new Storage Space entity. It could be a Storage Area Space (static * reservation) or a Space Reservation (dynamic space reservation) - * + * * @param guOwner * @param spaceType * @param spaceTokenAlias @@ -89,13 +83,22 @@ public StorageSpaceData() { * @param spaceFileName * @throws InvalidSpaceDataAttributesException */ - public StorageSpaceData(GridUserInterface guOwner, TSpaceType spaceType, String spaceTokenAlias, - TSizeInBytes totalDesiredSize, TSizeInBytes guaranteedSize, TLifeTimeInSeconds spaceLifetime, - TStorageSystemInfo storageInfo, Date date, PFN spaceFileName) + public StorageSpaceData( + GridUserInterface guOwner, + TSpaceType spaceType, + String spaceTokenAlias, + TSizeInBytes totalDesiredSize, + TSizeInBytes guaranteedSize, + TLifeTimeInSeconds spaceLifetime, + TStorageSystemInfo storageInfo, + Date date, + PFN spaceFileName) throws InvalidSpaceDataAttributesException { - boolean ok = (spaceType != null && ((guOwner != null) || (spaceType == TSpaceType.VOSPACE)) - && spaceTokenAlias != null); + boolean ok = + (spaceType != null + && ((guOwner != null) || (spaceType == TSpaceType.VOSPACE)) + && spaceTokenAlias != null); log.debug("Storage Space Data - User identity : {}", guOwner); log.debug("Storage Space Data - Space Type : {}", spaceType); log.debug("Storage Space Data - Space Token Alias : {}", spaceTokenAlias); @@ -126,7 +129,7 @@ public StorageSpaceData(GridUserInterface guOwner, TSpaceType spaceType, String /** * Constructor from Persistence Object Model - * + * * @param spaceData SpaceData */ public StorageSpaceData(StorageSpaceTO ssTO) { @@ -137,14 +140,16 @@ public StorageSpaceData(StorageSpaceTO ssTO) { throw new IllegalArgumentException("Received null argument"); } else { // Ownership of Storage Space - if (!(ssTO.getOwnerName() == null || ssTO.getVoName() == null + if (!(ssTO.getOwnerName() == null + || ssTO.getVoName() == null || ssTO.getVoName().equals(VO.NO_VO.getValue()))) { try { this.owner = GridUserManager.makeVOMSGridUser(ssTO.getOwnerName(), ssTO.getVoName()); } catch (IllegalArgumentException e) { log.error( "Unexpected error on voms grid user creation. Contact StoRM Support : IllegalArgumentException {}", - e.getMessage(), e); + e.getMessage(), + e); throw e; } } else { @@ -257,20 +262,22 @@ public StorageSpaceData(StorageSpaceTO ssTO) { } // FREE space - log.trace("StorageSpaceData - FREE (= available + unavailable) size : {}", - ssTO.getFreeSize()); + log.trace( + "StorageSpaceData - FREE (= available + unavailable) size : {}", ssTO.getFreeSize()); this.forceFreeSpaceSize(TSizeInBytes.makeEmpty()); if (ssTO.getFreeSize() >= 0) { try { this.forceFreeSpaceSize(TSizeInBytes.make(ssTO.getFreeSize(), SizeUnit.BYTES)); - log.trace("StorageSpaceData - FREE (= available + unavailable) size : {}", + log.trace( + "StorageSpaceData - FREE (= available + unavailable) size : {}", this.getFreeSpaceSize()); } catch (InvalidTSizeAttributesException ex3) { log.error("Error while constructing FreeSpaceSize", ex3); } } else { - log.trace("StorageSpaceData - FREE (= available + unavailable) size : EMPTY {}", + log.trace( + "StorageSpaceData - FREE (= available + unavailable) size : EMPTY {}", this.getFreeSpaceSize()); } @@ -289,19 +296,21 @@ public StorageSpaceData(StorageSpaceTO ssTO) { } // BUSY space - log.debug("StorageSpaceData - BUSY (= used + reserved + unavailable) size: {}", - ssTO.getBusySize()); + log.debug( + "StorageSpaceData - BUSY (= used + reserved + unavailable) size: {}", ssTO.getBusySize()); this.forceBusySpaceSize(TSizeInBytes.makeEmpty()); if (ssTO.getBusySize() >= 0) { try { this.forceBusySpaceSize(TSizeInBytes.make(ssTO.getBusySize(), SizeUnit.BYTES)); - log.trace("StorageSpaceData - BUSY (= used + reserved + unavailable) size: {}", + log.trace( + "StorageSpaceData - BUSY (= used + reserved + unavailable) size: {}", this.getBusySpaceSize()); } catch (InvalidTSizeAttributesException ex3) { log.error("Error while constructing BusySpaceSize", ex3); } } else { - log.trace("StorageSpaceData - BUSY (= used + reserved + unavailable) size: EMPTY {}", + log.trace( + "StorageSpaceData - BUSY (= used + reserved + unavailable) size: EMPTY {}", this.getBusySpaceSize()); } @@ -334,21 +343,15 @@ public StorageSpaceData(StorageSpaceTO ssTO) { log.trace("StorageSpaceData - Reserved : EMPTY {}", this.reservedSpaceSize); } } - } - /** - * @return - */ + /** @return */ public boolean isInitialized() { return !(usedSpaceSize.isEmpty() || unavailableSpaceSize.isEmpty()); } - /** - * Method that returns type of space specified in SRM request. - */ - + /** Method that returns type of space specified in SRM request. */ public TSpaceType getSpaceType() { return spaceType; @@ -359,10 +362,7 @@ public void setSpaceType(TSpaceType spaceType) { this.spaceType = spaceType; } - /** - * Method that returns the number of files in the SRM request that are currently in progress. - */ - + /** Method that returns the number of files in the SRM request that are currently in progress. */ public String getSpaceTokenAlias() { return spaceTokenAlias; @@ -429,65 +429,49 @@ public Date getCreationDate() { return this.creationDate; } - /** - * @return the owner - */ + /** @return the owner */ public final GridUserInterface getOwner() { return owner; } - /** - * @param owner the owner to set - */ + /** @param owner the owner to set */ public final void setOwner(GridUserInterface owner) { this.owner = owner; } - /** - * @return the spaceLifetime - */ + /** @return the spaceLifetime */ public final TLifeTimeInSeconds getSpaceLifetime() { return spaceLifetime; } - /** - * @param spaceLifetime the spaceLifetime to set - */ + /** @param spaceLifetime the spaceLifetime to set */ public final void setSpaceLifetime(TLifeTimeInSeconds spaceLifetime) { this.spaceLifetime = spaceLifetime; } - /** - * @return the storageInfo - */ + /** @return the storageInfo */ public final TStorageSystemInfo getStorageInfo() { return storageInfo; } - /** - * @param storageInfo the storageInfo to set - */ + /** @param storageInfo the storageInfo to set */ public final void setStorageInfo(TStorageSystemInfo storageInfo) { this.storageInfo = storageInfo; } - /** - * @return the spaceDesired - */ + /** @return the spaceDesired */ public final TSizeInBytes getTotalSpaceSize() { return totalSpaceSize; } - /** - * @param spaceDesired the spaceDesired to set - */ + /** @param spaceDesired the spaceDesired to set */ public final void setTotalSpaceSize(TSizeInBytes spaceDesired) { this.totalSpaceSize = spaceDesired; @@ -496,17 +480,13 @@ public final void setTotalSpaceSize(TSizeInBytes spaceDesired) { this.unforceAvailableSpaceSize(); } - /** - * @return the spaceGuaranteed - */ + /** @return the spaceGuaranteed */ public final TSizeInBytes getReservedSpaceSize() { return reservedSpaceSize; } - /** - * @param spaceGuaranteed the spaceGuaranteed to set - */ + /** @param spaceGuaranteed the spaceGuaranteed to set */ public final void setReservedSpaceSize(TSizeInBytes spaceGuaranteed) { this.reservedSpaceSize = spaceGuaranteed; @@ -514,14 +494,14 @@ public final void setReservedSpaceSize(TSizeInBytes spaceGuaranteed) { this.unforceBusySpaceSize(); } - /** - * @return - */ + /** @return */ public final TSizeInBytes getFreeSpaceSize() { if (!freeSpaceSizeForced) { // For now do not consider the reserved space, a better management is needed - if (this.totalSpaceSize == null || this.totalSpaceSize.isEmpty() || this.usedSpaceSize == null + if (this.totalSpaceSize == null + || this.totalSpaceSize.isEmpty() + || this.usedSpaceSize == null || this.usedSpaceSize.isEmpty()) { this.freeSpaceSize = TSizeInBytes.makeEmpty(); } else { @@ -543,34 +523,26 @@ public final TSizeInBytes getFreeSpaceSize() { return this.freeSpaceSize; } - /** - * @param freeSpaceSize - */ + /** @param freeSpaceSize */ private final void forceFreeSpaceSize(TSizeInBytes freeSpaceSize) { this.freeSpaceSizeForced = true; this.freeSpaceSize = freeSpaceSize; } - /** - * - */ + /** */ private final void unforceFreeSpaceSize() { this.freeSpaceSizeForced = false; } - /** - * @return the usedSpaceSize - */ + /** @return the usedSpaceSize */ public final TSizeInBytes getUsedSpaceSize() { return usedSpaceSize; } - /** - * @param usedSpaceSize the usedSpaceSize to set - */ + /** @param usedSpaceSize the usedSpaceSize to set */ public final void setUsedSpaceSize(TSizeInBytes usedSpaceSize) { this.usedSpaceSize = usedSpaceSize; @@ -621,20 +593,15 @@ private final void updateAvailableSize() { log.error(e.getMessage(), e); } } - } - /** - * @return the unavailableSpaceSize - */ + /** @return the unavailableSpaceSize */ public final TSizeInBytes getUnavailableSpaceSize() { return unavailableSpaceSize; } - /** - * @param unavailableSpaceSize the unavailableSpaceSize to set - */ + /** @param unavailableSpaceSize the unavailableSpaceSize to set */ public final void setUnavailableSpaceSize(TSizeInBytes unavailableSpaceSize) { this.unavailableSpaceSize = unavailableSpaceSize; @@ -642,20 +609,25 @@ public final void setUnavailableSpaceSize(TSizeInBytes unavailableSpaceSize) { this.unforceBusySpaceSize(); } - /** - * @return - */ + /** @return */ public final TSizeInBytes getBusySpaceSize() { if (!this.busySpaceSizeForced) { - if (this.usedSpaceSize == null || this.usedSpaceSize.isEmpty() - || this.unavailableSpaceSize == null || this.unavailableSpaceSize.isEmpty() - || this.reservedSpaceSize == null || this.reservedSpaceSize.isEmpty()) { + if (this.usedSpaceSize == null + || this.usedSpaceSize.isEmpty() + || this.unavailableSpaceSize == null + || this.unavailableSpaceSize.isEmpty() + || this.reservedSpaceSize == null + || this.reservedSpaceSize.isEmpty()) { this.busySpaceSize = TSizeInBytes.makeEmpty(); } else { try { - this.busySpaceSize = TSizeInBytes.make(this.usedSpaceSize.value() - + this.unavailableSpaceSize.value() + this.reservedSpaceSize.value(), SizeUnit.BYTES); + this.busySpaceSize = + TSizeInBytes.make( + this.usedSpaceSize.value() + + this.unavailableSpaceSize.value() + + this.reservedSpaceSize.value(), + SizeUnit.BYTES); } catch (InvalidTSizeAttributesException e) { log.warn("Unable to create a valid Busy Size, used empty one"); this.busySpaceSize = TSizeInBytes.makeEmpty(); @@ -665,22 +637,24 @@ public final TSizeInBytes getBusySpaceSize() { return this.busySpaceSize; } - /** - * @return the availableSpaceSize - */ + /** @return the availableSpaceSize */ public final TSizeInBytes getAvailableSpaceSize() { if (!this.availableSpaceSizeForced) { - if (this.totalSpaceSize == null || this.totalSpaceSize.isEmpty() + if (this.totalSpaceSize == null + || this.totalSpaceSize.isEmpty() || this.getBusySpaceSize().isEmpty()) { this.availableSpaceSize = TSizeInBytes.makeEmpty(); } else { try { - this.availableSpaceSize = TSizeInBytes - .make(this.totalSpaceSize.value() - this.getBusySpaceSize().value(), SizeUnit.BYTES); + this.availableSpaceSize = + TSizeInBytes.make( + this.totalSpaceSize.value() - this.getBusySpaceSize().value(), SizeUnit.BYTES); } catch (InvalidTSizeAttributesException e) { - log.warn("Unable to produce the TSizeInBytes object from '{}' and '{}'", - (this.totalSpaceSize.value() - this.getBusySpaceSize().value()), SizeUnit.BYTES); + log.warn( + "Unable to produce the TSizeInBytes object from '{}' and '{}'", + (this.totalSpaceSize.value() - this.getBusySpaceSize().value()), + SizeUnit.BYTES); this.availableSpaceSize = TSizeInBytes.makeEmpty(); } } @@ -688,34 +662,26 @@ public final TSizeInBytes getAvailableSpaceSize() { return this.availableSpaceSize; } - /** - * @param availableSpaceSize - */ + /** @param availableSpaceSize */ public final void forceAvailableSpaceSize(TSizeInBytes availableSpaceSize) { this.availableSpaceSizeForced = true; this.availableSpaceSize = availableSpaceSize; } - /** - * - */ + /** */ private final void unforceAvailableSpaceSize() { this.availableSpaceSizeForced = false; } - /** - * @param totalGuaranteedSize the totalGuaranteedSize to set - */ + /** @param totalGuaranteedSize the totalGuaranteedSize to set */ public void setTotalGuaranteedSize(TSizeInBytes totalGuaranteedSize) { this.totalGuaranteedSize = totalGuaranteedSize; } - /** - * @return the totalGuaranteedSize - */ + /** @return the totalGuaranteedSize */ public TSizeInBytes getTotalGuaranteedSize() { return totalGuaranteedSize; @@ -727,9 +693,7 @@ private final void forceBusySpaceSize(TSizeInBytes busySpaceSize) { this.busySpaceSize = busySpaceSize; } - /** - * - */ + /** */ private final void unforceBusySpaceSize() { this.busySpaceSizeForced = false; @@ -737,9 +701,7 @@ private final void unforceBusySpaceSize() { this.unforceAvailableSpaceSize(); } - /** - * @param creationDate the creationDate to set - */ + /** @param creationDate the creationDate to set */ public final void setCreationDate(Date creationDate) { this.creationDate = creationDate; @@ -748,7 +710,7 @@ public final void setCreationDate(Date creationDate) { /** * This method is used to verify if the Space Reservation is expired, so the lifetime is no more * valid. - * + * * @return true if expired, false otherwise. */ public boolean isExpired() { @@ -766,7 +728,7 @@ public boolean isExpired() { /* * (non-Javadoc) - * + * * @see java.lang.Object#toString() */ @Override @@ -847,19 +809,13 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; StorageSpaceData other = (StorageSpaceData) obj; if (spaceToken == null) { - if (other.spaceToken != null) - return false; - } else if (!spaceToken.equals(other.spaceToken)) - return false; + if (other.spaceToken != null) return false; + } else if (!spaceToken.equals(other.spaceToken)) return false; return true; } - } diff --git a/src/main/java/it/grid/storm/space/StorageSpaceNotInitializedException.java b/src/main/java/it/grid/storm/space/StorageSpaceNotInitializedException.java index 992bb0e9..d999e099 100644 --- a/src/main/java/it/grid/storm/space/StorageSpaceNotInitializedException.java +++ b/src/main/java/it/grid/storm/space/StorageSpaceNotInitializedException.java @@ -1,22 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class StorageSpaceNotInitializedException extends Exception { - /** - * - */ - private static final long serialVersionUID = 6322292500826011705L; + /** */ + private static final long serialVersionUID = 6322292500826011705L; - public StorageSpaceNotInitializedException(String string) { + public StorageSpaceNotInitializedException(String string) { - super(string); - } + super(string); + } } diff --git a/src/main/java/it/grid/storm/space/gpfsquota/GPFSFilesetQuotaInfo.java b/src/main/java/it/grid/storm/space/gpfsquota/GPFSFilesetQuotaInfo.java index 5bce7fc6..f48b190b 100644 --- a/src/main/java/it/grid/storm/space/gpfsquota/GPFSFilesetQuotaInfo.java +++ b/src/main/java/it/grid/storm/space/gpfsquota/GPFSFilesetQuotaInfo.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space.gpfsquota; @@ -10,23 +9,23 @@ public interface GPFSFilesetQuotaInfo { - public String getFilesetName(); + public String getFilesetName(); - public long getBlockUsage(); + public long getBlockUsage(); - public TSizeInBytes getBlockUsageAsTSize(); + public TSizeInBytes getBlockUsageAsTSize(); - public long getBlockHardLimit(); + public long getBlockHardLimit(); - public TSizeInBytes getBlockHardLimitAsTSize(); + public TSizeInBytes getBlockHardLimitAsTSize(); - public long getBlockSoftLimit(); + public long getBlockSoftLimit(); - public TSizeInBytes getBlockSoftLimitAsTSize(); + public TSizeInBytes getBlockSoftLimitAsTSize(); - public boolean isQuotaEnabled(); + public boolean isQuotaEnabled(); - public VirtualFS getVFS(); + public VirtualFS getVFS(); - public SizeUnit getSizeUnit(); + public SizeUnit getSizeUnit(); } diff --git a/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaInfo.java b/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaInfo.java index 8c7adaa5..6c5cafc1 100644 --- a/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaInfo.java +++ b/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaInfo.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space.gpfsquota; @@ -10,124 +9,124 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.util.GPFSSizeHelper; -/** - * Describes information about quota block limits on a GPFS fileset. - * - * - */ +/** Describes information about quota block limits on a GPFS fileset. */ public class GPFSQuotaInfo implements GPFSFilesetQuotaInfo { - public static GPFSQuotaInfo fromNativeQuotaInfo(VirtualFS fs, quota_info qi) { - - return new GPFSQuotaInfo(fs, qi); - } + public static GPFSQuotaInfo fromNativeQuotaInfo(VirtualFS fs, quota_info qi) { - private long blockHardLimit; - private long blockSoftLimit; - private long blockUsage; + return new GPFSQuotaInfo(fs, qi); + } - private String filesetName; - private boolean quotaEnabled = false; - private VirtualFS VFS; + private long blockHardLimit; + private long blockSoftLimit; + private long blockUsage; - private GPFSQuotaInfo(VirtualFS fs, quota_info qi) { + private String filesetName; + private boolean quotaEnabled = false; + private VirtualFS VFS; - this.VFS = fs; - this.filesetName = qi.getFileset_name(); - this.blockUsage = qi.getBlock_usage(); - this.blockHardLimit = qi.getBlock_hard_limit(); - this.blockSoftLimit = qi.getBlock_soft_limit(); - this.quotaEnabled = true; - } + private GPFSQuotaInfo(VirtualFS fs, quota_info qi) { - public long getBlockHardLimit() { + this.VFS = fs; + this.filesetName = qi.getFileset_name(); + this.blockUsage = qi.getBlock_usage(); + this.blockHardLimit = qi.getBlock_hard_limit(); + this.blockSoftLimit = qi.getBlock_soft_limit(); + this.quotaEnabled = true; + } - return blockHardLimit; - } + public long getBlockHardLimit() { - public long getBlockSoftLimit() { + return blockHardLimit; + } - return blockSoftLimit; - } + public long getBlockSoftLimit() { - public long getBlockUsage() { + return blockSoftLimit; + } - return blockUsage; - } + public long getBlockUsage() { - public String getFilesetName() { + return blockUsage; + } - return filesetName; - } + public String getFilesetName() { - @Override - public SizeUnit getSizeUnit() { + return filesetName; + } - return SizeUnit.BYTES; - } + @Override + public SizeUnit getSizeUnit() { - public VirtualFS getVFS() { + return SizeUnit.BYTES; + } - return VFS; - } + public VirtualFS getVFS() { - @Override - public boolean isQuotaEnabled() { + return VFS; + } - return quotaEnabled; - } + @Override + public boolean isQuotaEnabled() { - public void setBlockHardLimit(long blockHardLimit) { + return quotaEnabled; + } - this.blockHardLimit = blockHardLimit; - } + public void setBlockHardLimit(long blockHardLimit) { - public void setBlockSoftLimit(long blockSoftLimit) { + this.blockHardLimit = blockHardLimit; + } - this.blockSoftLimit = blockSoftLimit; - } + public void setBlockSoftLimit(long blockSoftLimit) { - public void setBlockUsage(long blockUsage) { + this.blockSoftLimit = blockSoftLimit; + } - this.blockUsage = blockUsage; - } + public void setBlockUsage(long blockUsage) { - public void setFilesetName(String filesetName) { + this.blockUsage = blockUsage; + } - this.filesetName = filesetName; - } + public void setFilesetName(String filesetName) { - public void setVFS(VirtualFS vFS) { + this.filesetName = filesetName; + } - VFS = vFS; - } + public void setVFS(VirtualFS vFS) { - @Override - public String toString() { - return "GPFSQuotaInfo [filesetName=" + filesetName + ", blockUsage=" - + getBlockUsageAsTSize() + ", blockHardLimit=" + getBlockHardLimitAsTSize() + ", blockSoftLimit=" - + getBlockSoftLimitAsTSize() + ", quotaEnabled=" + quotaEnabled + "]"; - } + VFS = vFS; + } - @Override - public TSizeInBytes getBlockUsageAsTSize() { + @Override + public String toString() { + return "GPFSQuotaInfo [filesetName=" + + filesetName + + ", blockUsage=" + + getBlockUsageAsTSize() + + ", blockHardLimit=" + + getBlockHardLimitAsTSize() + + ", blockSoftLimit=" + + getBlockSoftLimitAsTSize() + + ", quotaEnabled=" + + quotaEnabled + + "]"; + } - return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockUsage()), - getSizeUnit()); - } + @Override + public TSizeInBytes getBlockUsageAsTSize() { - @Override - public TSizeInBytes getBlockHardLimitAsTSize() { + return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockUsage()), getSizeUnit()); + } - return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockHardLimit()), - getSizeUnit()); - } + @Override + public TSizeInBytes getBlockHardLimitAsTSize() { - @Override - public TSizeInBytes getBlockSoftLimitAsTSize() { + return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockHardLimit()), getSizeUnit()); + } - return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockSoftLimit()), - getSizeUnit()); - } + @Override + public TSizeInBytes getBlockSoftLimitAsTSize() { + return TSizeInBytes.make(GPFSSizeHelper.getBytesFromKIB(getBlockSoftLimit()), getSizeUnit()); + } } diff --git a/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaManager.java b/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaManager.java index 82310d1b..19205c6d 100644 --- a/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaManager.java +++ b/src/main/java/it/grid/storm/space/gpfsquota/GPFSQuotaManager.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space.gpfsquota; @@ -15,7 +14,6 @@ import it.grid.storm.space.StorageSpaceData; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.util.VirtualFSHelper; - import java.util.List; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutionException; @@ -24,7 +22,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,19 +30,17 @@ * that fetch quota information from gpfs fs and update the space area data on the Storm database. * If no quota limits are enforced for a given fileset, the total size is computed starting from the * free space available on the filesystem. - * - * The manager must be started with the {@link #start()} method, and shutdown with the - * {@link #shutdown()} method. - * - * Quota calculation can also be triggered with the {@link #triggerComputeQuotas()} method. - * - * - * This is a singleton. - * + * + *

The manager must be started with the {@link #start()} method, and shutdown with the {@link + * #shutdown()} method. + * + *

Quota calculation can also be triggered with the {@link #triggerComputeQuotas()} method. + * + *

This is a singleton. + * * @author Andrea Ceccanti */ public enum GPFSQuotaManager { - INSTANCE; private static final Logger log = LoggerFactory.getLogger(GPFSQuotaManager.class); @@ -56,62 +51,44 @@ public enum GPFSQuotaManager { */ private static final long DEFAULT_RELAX_PERIOD = 15000; - /** - * The submitter execution service - */ + /** The submitter execution service */ private ScheduledExecutorService submitterExecutionService; - /** - * The quota workers execution service - */ + /** The quota workers execution service */ private ExecutorService quotaWorkersExecutionService; - /** - * The completion service used to block and wait for the submitted tasks. - */ + /** The completion service used to block and wait for the submitted tasks. */ private CompletionService quotaService; - - - /** - * The list of GPFS filesystems which have quota enabled. - */ + /** The list of GPFS filesystems which have quota enabled. */ private List quotaEnabledFilesystems; - /** - * The last exception thrown by a GPFS quota calculation job. - */ + /** The last exception thrown by a GPFS quota calculation job. */ private Throwable lastFailure = null; - /** - * The time when the last quota calculation job was submitted. - */ + /** The time when the last quota calculation job was submitted. */ private long lastSubmissionTime = 0L; - /** - * A lock to sync access to {@link #lastSubmissionTime} - */ + /** A lock to sync access to {@link #lastSubmissionTime} */ private Object submissionTimeLock = new Object(); - - private void configureExecutionService() { submitterExecutionService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("GPFSQuotaSubmitter")); - quotaWorkersExecutionService = Executors.newFixedThreadPool(quotaEnabledFilesystems.size(), - new NamedThreadFactory("GPFSQuotaWorker")); + quotaWorkersExecutionService = + Executors.newFixedThreadPool( + quotaEnabledFilesystems.size(), new NamedThreadFactory("GPFSQuotaWorker")); - quotaService = - new ExecutorCompletionService<>(quotaWorkersExecutionService); + quotaService = new ExecutorCompletionService<>(quotaWorkersExecutionService); long refreshPeriod = Configuration.getInstance().getGPFSQuotaRefreshPeriod(); log.info("GPFSQuotaManager refresh period (in seconds): {}", refreshPeriod); - submitterExecutionService.scheduleWithFixedDelay(new QuotaJobSubmitter(), 0, refreshPeriod, - TimeUnit.SECONDS); + submitterExecutionService.scheduleWithFixedDelay( + new QuotaJobSubmitter(), 0, refreshPeriod, TimeUnit.SECONDS); } public synchronized void start() { @@ -125,7 +102,6 @@ public synchronized void start() { } configureExecutionService(); - } class QuotaJobSubmitter implements Runnable { @@ -184,15 +160,14 @@ private void handleQuotaInfo(GPFSFilesetQuotaInfo info) { persistStorageSpaceData(ssd); log.debug("Persisted storage space data for quota info: {}", info); } catch (DataAccessException e) { - log.error("Storage space data for quota info {} not persisted: {}", info, e.getMessage(), - e); + log.error( + "Storage space data for quota info {} not persisted: {}", info, e.getMessage(), e); } - } private void handleNoLimitsQuota(GPFSFilesetQuotaInfo info, StorageSpaceData ssd) { - log.debug("Quota enabled on fs rooted at {} but no limits enforced.", - info.getVFS().getRootPath()); + log.debug( + "Quota enabled on fs rooted at {} but no limits enforced.", info.getVFS().getRootPath()); try { @@ -203,19 +178,24 @@ private void handleNoLimitsQuota(GPFSFilesetQuotaInfo info, StorageSpaceData ssd ssd.setTotalSpaceSize(freeSizeInBytes); } catch (FilesystemError e) { - log.error("Error computing free space on fs rooted at {}. {}", info.getVFS().getRootPath(), - e.getMessage(), e); + log.error( + "Error computing free space on fs rooted at {}. {}", + info.getVFS().getRootPath(), + e.getMessage(), + e); ssd.setTotalGuaranteedSize(null); ssd.setTotalSpaceSize(null); } catch (NamespaceException e) { - log.error("Error accessing fs driver for fs rooted at {}. {}", info.getVFS().getRootPath(), - e.getMessage(), e); + log.error( + "Error accessing fs driver for fs rooted at {}. {}", + info.getVFS().getRootPath(), + e.getMessage(), + e); ssd.setTotalGuaranteedSize(null); ssd.setTotalSpaceSize(null); - } } @@ -257,7 +237,6 @@ public synchronized void triggerComputeQuotas() { } } - public synchronized Throwable getLastFailure() { return lastFailure; @@ -271,11 +250,8 @@ public synchronized void resetFailure() { public synchronized void shutdown() { log.info("GPFSQuotaManager shutting down..."); - if (submitterExecutionService != null) - submitterExecutionService.shutdownNow(); + if (submitterExecutionService != null) submitterExecutionService.shutdownNow(); - if (quotaWorkersExecutionService != null) - quotaWorkersExecutionService.shutdownNow(); + if (quotaWorkersExecutionService != null) quotaWorkersExecutionService.shutdownNow(); } - } diff --git a/src/main/java/it/grid/storm/space/gpfsquota/GetGPFSFilesetQuotaInfoCommand.java b/src/main/java/it/grid/storm/space/gpfsquota/GetGPFSFilesetQuotaInfoCommand.java index f9e9df91..c541ae3e 100644 --- a/src/main/java/it/grid/storm/space/gpfsquota/GetGPFSFilesetQuotaInfoCommand.java +++ b/src/main/java/it/grid/storm/space/gpfsquota/GetGPFSFilesetQuotaInfoCommand.java @@ -1,58 +1,46 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space.gpfsquota; +import it.grid.storm.filesystem.swig.gpfs; +import it.grid.storm.namespace.model.VirtualFS; import java.util.concurrent.Callable; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import it.grid.storm.filesystem.swig.gpfs; -import it.grid.storm.namespace.model.VirtualFS; -/** - * Computes GPFS fileset quota by leveraging {@link gpfs#get_fileset_quota_info(String)}. - * - * - */ -public class GetGPFSFilesetQuotaInfoCommand implements - Callable { +/** Computes GPFS fileset quota by leveraging {@link gpfs#get_fileset_quota_info(String)}. */ +public class GetGPFSFilesetQuotaInfoCommand implements Callable { + + private static final Logger log = LoggerFactory.getLogger(GetGPFSFilesetQuotaInfoCommand.class); - private static final Logger log = LoggerFactory - .getLogger(GetGPFSFilesetQuotaInfoCommand.class); + private VirtualFS vfs; - private VirtualFS vfs; + public GetGPFSFilesetQuotaInfoCommand(VirtualFS vfs) { - public GetGPFSFilesetQuotaInfoCommand(VirtualFS vfs) { + this.vfs = vfs; + } - this.vfs = vfs; - } + @Override + public GPFSFilesetQuotaInfo call() throws Exception { - @Override - public GPFSFilesetQuotaInfo call() throws Exception { + String fsRoot = vfs.getRootPath(); + log.debug("GPFS quota info command running for fs rooted at {}", fsRoot); - String fsRoot = vfs.getRootPath(); - log.debug("GPFS quota info command running for fs rooted at {}", fsRoot); + if (!(vfs.getFSDriverInstance() instanceof gpfs)) + throw new IllegalArgumentException("VFS driver is not GPFS for fs rooted at " + fsRoot); - if (!(vfs.getFSDriverInstance() instanceof gpfs)) - throw new IllegalArgumentException( - "VFS driver is not GPFS for fs rooted at " + fsRoot); + gpfs fs = (gpfs) vfs.getFSDriverInstance(); - gpfs fs = (gpfs) vfs.getFSDriverInstance(); + if (!fs.is_quota_enabled(fsRoot)) { + log.error("GPFS Quota not enabled on fileset rooted at {}", fsRoot); + return null; + } - if (!fs.is_quota_enabled(fsRoot)) { - log.error("GPFS Quota not enabled on fileset rooted at {}", fsRoot); - return null; - } + GPFSQuotaInfo info = GPFSQuotaInfo.fromNativeQuotaInfo(vfs, fs.get_fileset_quota_info(fsRoot)); - GPFSQuotaInfo info = GPFSQuotaInfo.fromNativeQuotaInfo(vfs, - fs.get_fileset_quota_info(fsRoot)); - - log.debug("Computed GPFS fileset quota info for fs rooted at {}: {}", - fsRoot, info); - - return info; - } + log.debug("Computed GPFS fileset quota info for fs rooted at {}: {}", fsRoot, info); + return info; + } } diff --git a/src/main/java/it/grid/storm/space/init/UsedSpaceFile.java b/src/main/java/it/grid/storm/space/init/UsedSpaceFile.java index 604a722d..5a7a8a1c 100644 --- a/src/main/java/it/grid/storm/space/init/UsedSpaceFile.java +++ b/src/main/java/it/grid/storm/space/init/UsedSpaceFile.java @@ -1,9 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.space.init; +import com.google.common.base.Preconditions; import java.io.File; import java.io.IOException; import java.text.ParseException; @@ -11,162 +11,162 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; - import org.ini4j.Ini; import org.ini4j.InvalidFileFormatException; import org.ini4j.Profile.Section; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Preconditions; - public class UsedSpaceFile { - private static Logger log = LoggerFactory.getLogger(UsedSpaceFile.class); - - private static final String PATTERN_RFC2822 = "EEE, dd MMM yyyy HH:mm:ss Z"; - private static final String PATTERN_DEFAULT = "EEE MMM dd HH:mm:ss z yyyy"; - - private enum Key { - usedsize, checktime - } - - private Ini iniFile; - - public UsedSpaceFile(String iniFilePath) throws InvalidFileFormatException, - IOException { - - Preconditions.checkNotNull(iniFilePath, "Received null iniFilePath"); - iniFile = new Ini(new File(iniFilePath)); - } - - public File getIniFile() { - - return iniFile.getFile(); - } - - public List getDefinedSA() { - - return new ArrayList(iniFile.keySet()); - } - - public SaUsedSize getSAUsedSize(String saName) { - - Preconditions.checkNotNull(saName,"Received null saName parameter"); - Preconditions.checkArgument(hasSA(saName), - saName + " section not found into used-space ini file"); - - Section section = iniFile.get(saName); - Long usedSpace = null; - Date updateTime = null; - for (String optionKey : section.keySet()) { - switch (Key.valueOf(optionKey)) { - case usedsize: - try { - usedSpace = Long.parseLong(section.get(optionKey)); - } catch (NumberFormatException e) { - log.error( - "{}.{} is not a valid Long value. NumberFormatException: {}", - saName, optionKey, e.getMessage()); - } - break; - case checktime: - try { - updateTime = parseDate(section.get(optionKey)); - } catch (ParseException e) { - log.error("{}.{} is not a valid Date value. ParseException: {}", - saName, optionKey, e.getMessage()); - } - break; - default: - log.error("{}.{} is not recognized as a valid key.", saName, optionKey); - break; - } - } - return new SaUsedSize(saName, usedSpace, updateTime); - } - - public boolean hasSA(String saName) { - - return getDefinedSA().contains(saName); - } - - /** - * @param dateStr - * @return - * @throws ParseException - */ - private Date parseDate(String dateStr) throws ParseException { - - Preconditions.checkNotNull(dateStr,"Received null dateStr parameter"); - - SimpleDateFormat formatRFC2822 = new SimpleDateFormat(PATTERN_RFC2822); - SimpleDateFormat formatDefault = new SimpleDateFormat(PATTERN_DEFAULT); - - try { - return formatRFC2822.parse(dateStr); - } catch (ParseException e) { - log.debug("Unable to parse date {} using RFC2822 " - + "formatter.ParseException: {} Attempting with default formatter", - dateStr, e.getMessage(), e); - try { - return formatDefault.parse(dateStr); - } catch (ParseException e2) { - log.warn("Unable to parse the date {} with default formatter. " - + "ParseException: {}", dateStr, e.getMessage()); - throw e; - } - } - } - - public class SaUsedSize { - - private final String saName; - private final Long usedSize; - private final Date updateTime; - - /** - * @param saName - * @param usedSize - * @param updateTime - */ - public SaUsedSize(String saName, Long usedSize, Date updateTime) { - - Preconditions.checkNotNull(saName, "Received null saName"); - Preconditions.checkNotNull(usedSize, "Received null usedSize"); - this.saName = saName; - this.usedSize = usedSize; - this.updateTime = updateTime; //optional - } - - /** - * @return the saName - */ - public String getSaName() { - - return saName; - } - - /** - * @return the usedSize - */ - public Long getUsedSize() { - - return usedSize; - } - - /** - * @return the updateTime - */ - public Date getUpdateTime() { - - return updateTime; - } - - public boolean hasUpdateTime() { - - return updateTime != null; - } - } - + private static Logger log = LoggerFactory.getLogger(UsedSpaceFile.class); + + private static final String PATTERN_RFC2822 = "EEE, dd MMM yyyy HH:mm:ss Z"; + private static final String PATTERN_DEFAULT = "EEE MMM dd HH:mm:ss z yyyy"; + + private enum Key { + usedsize, + checktime + } + + private Ini iniFile; + + public UsedSpaceFile(String iniFilePath) throws InvalidFileFormatException, IOException { + + Preconditions.checkNotNull(iniFilePath, "Received null iniFilePath"); + iniFile = new Ini(new File(iniFilePath)); + } + + public File getIniFile() { + + return iniFile.getFile(); + } + + public List getDefinedSA() { + + return new ArrayList(iniFile.keySet()); + } + + public SaUsedSize getSAUsedSize(String saName) { + + Preconditions.checkNotNull(saName, "Received null saName parameter"); + Preconditions.checkArgument( + hasSA(saName), saName + " section not found into used-space ini file"); + + Section section = iniFile.get(saName); + Long usedSpace = null; + Date updateTime = null; + for (String optionKey : section.keySet()) { + switch (Key.valueOf(optionKey)) { + case usedsize: + try { + usedSpace = Long.parseLong(section.get(optionKey)); + } catch (NumberFormatException e) { + log.error( + "{}.{} is not a valid Long value. NumberFormatException: {}", + saName, + optionKey, + e.getMessage()); + } + break; + case checktime: + try { + updateTime = parseDate(section.get(optionKey)); + } catch (ParseException e) { + log.error( + "{}.{} is not a valid Date value. ParseException: {}", + saName, + optionKey, + e.getMessage()); + } + break; + default: + log.error("{}.{} is not recognized as a valid key.", saName, optionKey); + break; + } + } + return new SaUsedSize(saName, usedSpace, updateTime); + } + + public boolean hasSA(String saName) { + + return getDefinedSA().contains(saName); + } + + /** + * @param dateStr + * @return + * @throws ParseException + */ + private Date parseDate(String dateStr) throws ParseException { + + Preconditions.checkNotNull(dateStr, "Received null dateStr parameter"); + + SimpleDateFormat formatRFC2822 = new SimpleDateFormat(PATTERN_RFC2822); + SimpleDateFormat formatDefault = new SimpleDateFormat(PATTERN_DEFAULT); + + try { + return formatRFC2822.parse(dateStr); + } catch (ParseException e) { + log.debug( + "Unable to parse date {} using RFC2822 " + + "formatter.ParseException: {} Attempting with default formatter", + dateStr, + e.getMessage(), + e); + try { + return formatDefault.parse(dateStr); + } catch (ParseException e2) { + log.warn( + "Unable to parse the date {} with default formatter. " + "ParseException: {}", + dateStr, + e.getMessage()); + throw e; + } + } + } + + public class SaUsedSize { + + private final String saName; + private final Long usedSize; + private final Date updateTime; + + /** + * @param saName + * @param usedSize + * @param updateTime + */ + public SaUsedSize(String saName, Long usedSize, Date updateTime) { + + Preconditions.checkNotNull(saName, "Received null saName"); + Preconditions.checkNotNull(usedSize, "Received null usedSize"); + this.saName = saName; + this.usedSize = usedSize; + this.updateTime = updateTime; // optional + } + + /** @return the saName */ + public String getSaName() { + + return saName; + } + + /** @return the usedSize */ + public Long getUsedSize() { + + return usedSize; + } + + /** @return the updateTime */ + public Date getUpdateTime() { + + return updateTime; + } + + public boolean hasUpdateTime() { + + return updateTime != null; + } + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfSURLs.java b/src/main/java/it/grid/storm/srm/types/ArrayOfSURLs.java index 41654023..10ffea88 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfSURLs.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfSURLs.java @@ -1,24 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TExtraInfoArray - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ - package it.grid.storm.srm.types; +import com.google.common.collect.Lists; import java.io.Serializable; import java.util.Arrays; import java.util.List; import java.util.Map; -import com.google.common.collect.Lists; - public class ArrayOfSURLs implements Serializable { private static final long serialVersionUID = -6162739978949956886L; @@ -77,8 +74,7 @@ public String toString() { if (surls != null) { for (int i = 0; i < surls.size(); i++) { buf.append("'" + surls.get(i) + "'"); - if (i + 1 < surls.size()) - buf.append(","); + if (i + 1 < surls.size()) buf.append(","); } } else { @@ -86,7 +82,6 @@ public String toString() { } return buf.toString(); - } public List asStringList() { diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTExtraInfo.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTExtraInfo.java index f3d334ad..5a744952 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTExtraInfo.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTExtraInfo.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TExtraInfoArray - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date July, 2006 * @version 2.0 */ - package it.grid.storm.srm.types; import java.io.*; @@ -17,124 +15,118 @@ public class ArrayOfTExtraInfo implements Serializable { - /** - * - */ + /** */ private static final long serialVersionUID = 1L; - + public static String PNAME_STORAGESYSTEMINFO = "storageSystemInfo"; - private ArrayList extraInfoList; + private ArrayList extraInfoList; + + /** + * Constructor that requires a String. If it is null, then an + * InvalidArrayOfTExtraInfoAttributeException is thrown. + */ + public ArrayOfTExtraInfo(TExtraInfo[] infoArray) + throws InvalidArrayOfTExtraInfoAttributeException { + + if (infoArray == null) { + throw new InvalidArrayOfTExtraInfoAttributeException(infoArray); + } + } + + public ArrayOfTExtraInfo() { + + extraInfoList = new ArrayList(); + } + + public Object[] getArray() { - /** - * Constructor that requires a String. If it is null, then an - * InvalidArrayOfTExtraInfoAttributeException is thrown. - */ - public ArrayOfTExtraInfo(TExtraInfo[] infoArray) - throws InvalidArrayOfTExtraInfoAttributeException { - - if (infoArray == null) { - throw new InvalidArrayOfTExtraInfoAttributeException(infoArray); - } - } - - public ArrayOfTExtraInfo() { - - extraInfoList = new ArrayList(); - } - - public Object[] getArray() { - - return extraInfoList.toArray(); - } - - public TExtraInfo getTSpaceToken(int i) { - - return extraInfoList.get(i); - } - - public void setTExtraInfo(int index, TExtraInfo info) { - - extraInfoList.set(index, info); - } - - public void addTExtraInfo(TExtraInfo info) { - - extraInfoList.add(info); - } - - public int size() { - - return extraInfoList.size(); - } - - /** - * Fills this class using the values found in a structure inside a Hashtable. - * The Hashtable may contain different structures inside, all are identifiend - * by a name. Used for communication with the FE. - * - * @param inputParam - * Hashtable to read. - * @param fieldName - * Name that identifies the ArrayOfTExtraInfo structure in the - * Hashtable. - * @return A new ArrayOfTExtraInfo instance. - */ - public static ArrayOfTExtraInfo decode(Map inputParam, String fieldName) - throws InvalidArrayOfTExtraInfoAttributeException { - - List list = null; - try { - list = Arrays.asList((Object[]) inputParam.get(fieldName)); - } catch (NullPointerException e) { - // log.warn("Empty SURL array found!"); - } - - if (list == null) { - throw new InvalidArrayOfTExtraInfoAttributeException(null); - } - - ArrayOfTExtraInfo extraInfoArray = new ArrayOfTExtraInfo(); - - for (int i = 0; i < list.size(); i++) { - Hashtable extraInfo; - - extraInfo = (Hashtable) list.get(i); - try { - extraInfoArray.addTExtraInfo(TExtraInfo.decode(extraInfo)); - } catch (InvalidTExtraInfoAttributeException e) { - throw new InvalidArrayOfTExtraInfoAttributeException(null); - } - } - return extraInfoArray; - } - - public void encode(Map outputParam, String name) { - - Vector> vector = new Vector>(); - - for (TExtraInfo extraInfo : extraInfoList) { - Hashtable extraInfoMap = new Hashtable(); - extraInfo.encode(extraInfoMap); - vector.add(extraInfoMap); - } - outputParam.put(name, vector); - } - - public String toString() { - - StringBuilder sb = new StringBuilder(); - if (extraInfoList != null) { - sb.append("["); - for (Iterator it = extraInfoList.iterator(); it.hasNext();) { - TExtraInfo element = (TExtraInfo) it.next(); - sb.append(element.toString()); - } - sb.append("]"); - } else { - sb.append("EMPTY LIST"); - } - return sb.toString(); - } + return extraInfoList.toArray(); + } + public TExtraInfo getTSpaceToken(int i) { + + return extraInfoList.get(i); + } + + public void setTExtraInfo(int index, TExtraInfo info) { + + extraInfoList.set(index, info); + } + + public void addTExtraInfo(TExtraInfo info) { + + extraInfoList.add(info); + } + + public int size() { + + return extraInfoList.size(); + } + + /** + * Fills this class using the values found in a structure inside a Hashtable. The Hashtable may + * contain different structures inside, all are identifiend by a name. Used for communication with + * the FE. + * + * @param inputParam Hashtable to read. + * @param fieldName Name that identifies the ArrayOfTExtraInfo structure in the Hashtable. + * @return A new ArrayOfTExtraInfo instance. + */ + public static ArrayOfTExtraInfo decode(Map inputParam, String fieldName) + throws InvalidArrayOfTExtraInfoAttributeException { + + List list = null; + try { + list = Arrays.asList((Object[]) inputParam.get(fieldName)); + } catch (NullPointerException e) { + // log.warn("Empty SURL array found!"); + } + + if (list == null) { + throw new InvalidArrayOfTExtraInfoAttributeException(null); + } + + ArrayOfTExtraInfo extraInfoArray = new ArrayOfTExtraInfo(); + + for (int i = 0; i < list.size(); i++) { + Hashtable extraInfo; + + extraInfo = (Hashtable) list.get(i); + try { + extraInfoArray.addTExtraInfo(TExtraInfo.decode(extraInfo)); + } catch (InvalidTExtraInfoAttributeException e) { + throw new InvalidArrayOfTExtraInfoAttributeException(null); + } + } + return extraInfoArray; + } + + public void encode(Map outputParam, String name) { + + Vector> vector = new Vector>(); + + for (TExtraInfo extraInfo : extraInfoList) { + Hashtable extraInfoMap = new Hashtable(); + extraInfo.encode(extraInfoMap); + vector.add(extraInfoMap); + } + outputParam.put(name, vector); + } + + public String toString() { + + StringBuilder sb = new StringBuilder(); + if (extraInfoList != null) { + sb.append("["); + for (Iterator it = extraInfoList.iterator(); it.hasNext(); ) { + TExtraInfo element = (TExtraInfo) it.next(); + sb.append(element.toString()); + } + sb.append("]"); + } else { + sb.append("EMPTY LIST"); + } + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataPathDetail.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataPathDetail.java index 86824e6e..a5d3ec9b 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataPathDetail.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataPathDetail.java @@ -1,89 +1,80 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TExtraInfoArray - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ - package it.grid.storm.srm.types; +import java.io.Serializable; import java.util.ArrayList; -import java.util.Hashtable; import java.util.List; import java.util.Map; -import java.util.Vector; - -import it.grid.storm.srm.types.TSpaceToken; -import java.io.Serializable; public class ArrayOfTMetaDataPathDetail implements Serializable { - public static String PNAME_DETAILS = "details"; - public static String PNAME_ARRAYOFSUBPATHS = "arrayOfSubPaths"; - ArrayList metaDataList; - - public ArrayOfTMetaDataPathDetail() { + public static String PNAME_DETAILS = "details"; + public static String PNAME_ARRAYOFSUBPATHS = "arrayOfSubPaths"; + ArrayList metaDataList; - metaDataList = new ArrayList(); - } + public ArrayOfTMetaDataPathDetail() { - public Object[] getArray() { + metaDataList = new ArrayList(); + } - return metaDataList.toArray(); - } + public Object[] getArray() { - public TMetaDataPathDetail getTMetaDataPathDetail(int i) { + return metaDataList.toArray(); + } - return (TMetaDataPathDetail) metaDataList.get(i); - } + public TMetaDataPathDetail getTMetaDataPathDetail(int i) { - public void setTMetaDataPathDetail(int index, TMetaDataPathDetail elem) { + return (TMetaDataPathDetail) metaDataList.get(i); + } - metaDataList.set(index, elem); - } + public void setTMetaDataPathDetail(int index, TMetaDataPathDetail elem) { - public void addTMetaDataPathDetail(TMetaDataPathDetail elem) { + metaDataList.set(index, elem); + } - metaDataList.add(elem); - } + public void addTMetaDataPathDetail(TMetaDataPathDetail elem) { - public int size() { + metaDataList.add(elem); + } - return metaDataList.size(); - } + public int size() { - /** - * Encode method, used to create a structured paramter representing this - * object, for FE communication. - * - * @param outputParam - * structured Parameter that must be filled whit ArrayOfTMetaDataPath - * information. - * @param name - * name of the paramter - */ - public void encode(Map outputParam, String name) { + return metaDataList.size(); + } - List list = new ArrayList(); - for (int i = 0; i < metaDataList.size(); i++) { - ((TMetaDataPathDetail) metaDataList.get(i)).encode(list); - } - outputParam.put(name, list); - } + /** + * Encode method, used to create a structured paramter representing this object, for FE + * communication. + * + * @param outputParam structured Parameter that must be filled whit ArrayOfTMetaDataPath + * information. + * @param name name of the paramter + */ + public void encode(Map outputParam, String name) { - public String toString() { + List list = new ArrayList(); + for (int i = 0; i < metaDataList.size(); i++) { + ((TMetaDataPathDetail) metaDataList.get(i)).encode(list); + } + outputParam.put(name, list); + } - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < metaDataList.size(); i++) { - sb.append("MetaData[" + i + "]:\n"); - sb.append(((TMetaDataPathDetail) metaDataList.get(i)).toString()); - } - return sb.toString(); - } + public String toString() { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < metaDataList.size(); i++) { + sb.append("MetaData[" + i + "]:\n"); + sb.append(((TMetaDataPathDetail) metaDataList.get(i)).toString()); + } + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataSpace.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataSpace.java index 9e714891..8a61ee25 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataSpace.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTMetaDataSpace.java @@ -1,73 +1,69 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TTSpace Token - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ - package it.grid.storm.srm.types; +import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; -import it.grid.storm.srm.types.TMetaDataSpace; -import java.io.Serializable; public class ArrayOfTMetaDataSpace implements Serializable { - public static String PNAME_ARRAYOFSPACEDETAILS = "arrayOfSpaceDetails"; + public static String PNAME_ARRAYOFSPACEDETAILS = "arrayOfSpaceDetails"; - ArrayList metaDataList; + ArrayList metaDataList; - public ArrayOfTMetaDataSpace() { + public ArrayOfTMetaDataSpace() { - metaDataList = new ArrayList(); - } + metaDataList = new ArrayList(); + } - public TMetaDataSpace[] getArray() { + public TMetaDataSpace[] getArray() { - return (TMetaDataSpace[]) metaDataList - .toArray(new TMetaDataSpace[metaDataList.size()]); - } + return (TMetaDataSpace[]) metaDataList.toArray(new TMetaDataSpace[metaDataList.size()]); + } - public TMetaDataSpace getTMetaDataSpace(int i) { + public TMetaDataSpace getTMetaDataSpace(int i) { - return (TMetaDataSpace) metaDataList.get(i); - } + return (TMetaDataSpace) metaDataList.get(i); + } - public void setTMetaDataSpace(int index, TMetaDataSpace data) { + public void setTMetaDataSpace(int index, TMetaDataSpace data) { - metaDataList.set(index, data); - } + metaDataList.set(index, data); + } - public void addTMetaDataSpace(TMetaDataSpace data) { + public void addTMetaDataSpace(TMetaDataSpace data) { - metaDataList.add(data); - } + metaDataList.add(data); + } - public int size() { + public int size() { - return metaDataList.size(); - } + return metaDataList.size(); + } - public void encode(Map outputParam, String fieldName) { + public void encode(Map outputParam, String fieldName) { - ArrayList metaDataSpaceList = new ArrayList(); - int arraySize = this.size(); + ArrayList metaDataSpaceList = new ArrayList(); + int arraySize = this.size(); - for (int i = 0; i < arraySize; i++) { - Map metaDataSpace = new HashMap(); - TMetaDataSpace metaDataElement = this.getTMetaDataSpace(i); - metaDataElement.encode(metaDataSpace); + for (int i = 0; i < arraySize; i++) { + Map metaDataSpace = new HashMap(); + TMetaDataSpace metaDataElement = this.getTMetaDataSpace(i); + metaDataElement.encode(metaDataSpace); - metaDataSpaceList.add(metaDataSpace); - } + metaDataSpaceList.add(metaDataSpace); + } - outputParam.put(fieldName, metaDataSpaceList); - } + outputParam.put(fieldName, metaDataSpaceList); + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLLifetimeReturnStatus.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLLifetimeReturnStatus.java index 7fe5b43e..39c4aa1d 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLLifetimeReturnStatus.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLLifetimeReturnStatus.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an ArrayOfTSURLLifetimeReturnStatus. - * + * * @author Alberto Forti * @author CNAF Bologna * @date Dec 2006 @@ -13,117 +12,106 @@ package it.grid.storm.srm.types; import java.util.ArrayList; -import java.util.Hashtable; import java.util.Map; -import java.util.Vector; public class ArrayOfTSURLLifetimeReturnStatus { - public static String PNAME_ARRAYOFFILESTATUSES = "arrayOfFileStatuses"; - - ArrayList array; - - /** - * Constructs an ArrayOfTSURLLifetimeReturnStatus of 'numItems' empty - * elements. - * - * @param numItems - */ - public ArrayOfTSURLLifetimeReturnStatus(int numItems) { - - array = new ArrayList(numItems); - } - - // /** - // * Constructor that requires a String. If it is null, then an - // * InvalidArrayOfTExtraInfoAttributeException is thrown. - // */ - // public ArrayOfTSURLLifetimeReturnStatus(TSURLReturnStatus[] surlArray) - // throws InvalidArrayOfTSURLReturnStatusAttributeException { - // - // if (surlArray == null) throw new - // InvalidArrayOfTSURLReturnStatusAttributeException(surlArray); - // //FIXME this.tokenArray = tokenArray; - // } - - /** - * Constructs an empty ArrayOfTSURLLifetimeReturnStatus. - */ - public ArrayOfTSURLLifetimeReturnStatus() { - - array = new ArrayList(); - } - - /** - * Get the array list. - * - * @return ArrayList - */ - public ArrayList getArray() { - - return array; - } - - /** - * Get the i-th element of the array. - * - * @param i - * int - * @return TSURLLifetimeReturnStatus - */ - public TSURLLifetimeReturnStatus getTSURLLifetimeReturnStatus(int i) { - - return (TSURLLifetimeReturnStatus) array.get(i); - } - - /** - * Set the i-th element of the array. - * - * @param index - * int - * @param item - * TSURLLifetimeReturnStatus - */ - public void setTSURLReturnStatus(int index, TSURLLifetimeReturnStatus item) { - - array.set(index, item); - } - - /** - * Add an element to the array. - * - * @param item - * TSURLLifetimeReturnStatus - */ - public void addTSurlReturnStatus(TSURLLifetimeReturnStatus item) { - - array.add(item); - } - - /** - * Returns the size of the array. - * - * @return int - */ - public int size() { - - return array.size(); - } - - /** - * Encodes the array to a Hashtable structure. - * - * @param outputParam - * Hashtable - * @param name - * String - */ - public void encode(Map outputParam, String name) { - - ArrayList list = new ArrayList(); - for (int i = 0; i < array.size(); i++) { - ((TSURLLifetimeReturnStatus) array.get(i)).encode(list); - } - outputParam.put(name, list); - } + public static String PNAME_ARRAYOFFILESTATUSES = "arrayOfFileStatuses"; + + ArrayList array; + + /** + * Constructs an ArrayOfTSURLLifetimeReturnStatus of 'numItems' empty elements. + * + * @param numItems + */ + public ArrayOfTSURLLifetimeReturnStatus(int numItems) { + + array = new ArrayList(numItems); + } + + // /** + // * Constructor that requires a String. If it is null, then an + // * InvalidArrayOfTExtraInfoAttributeException is thrown. + // */ + // public ArrayOfTSURLLifetimeReturnStatus(TSURLReturnStatus[] surlArray) + // throws InvalidArrayOfTSURLReturnStatusAttributeException { + // + // if (surlArray == null) throw new + // InvalidArrayOfTSURLReturnStatusAttributeException(surlArray); + // //FIXME this.tokenArray = tokenArray; + // } + + /** Constructs an empty ArrayOfTSURLLifetimeReturnStatus. */ + public ArrayOfTSURLLifetimeReturnStatus() { + + array = new ArrayList(); + } + + /** + * Get the array list. + * + * @return ArrayList + */ + public ArrayList getArray() { + + return array; + } + + /** + * Get the i-th element of the array. + * + * @param i int + * @return TSURLLifetimeReturnStatus + */ + public TSURLLifetimeReturnStatus getTSURLLifetimeReturnStatus(int i) { + + return (TSURLLifetimeReturnStatus) array.get(i); + } + + /** + * Set the i-th element of the array. + * + * @param index int + * @param item TSURLLifetimeReturnStatus + */ + public void setTSURLReturnStatus(int index, TSURLLifetimeReturnStatus item) { + + array.set(index, item); + } + + /** + * Add an element to the array. + * + * @param item TSURLLifetimeReturnStatus + */ + public void addTSurlReturnStatus(TSURLLifetimeReturnStatus item) { + + array.add(item); + } + + /** + * Returns the size of the array. + * + * @return int + */ + public int size() { + + return array.size(); + } + + /** + * Encodes the array to a Hashtable structure. + * + * @param outputParam Hashtable + * @param name String + */ + public void encode(Map outputParam, String name) { + + ArrayList list = new ArrayList(); + for (int i = 0; i < array.size(); i++) { + ((TSURLLifetimeReturnStatus) array.get(i)).encode(list); + } + outputParam.put(name, list); + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLReturnStatus.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLReturnStatus.java index 3cdf6a38..38dd9e98 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLReturnStatus.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTSURLReturnStatus.java @@ -1,15 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TExtraInfoArray - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ - package it.grid.storm.srm.types; import java.util.ArrayList; @@ -19,149 +17,145 @@ public class ArrayOfTSURLReturnStatus { - public static String PNAME_ARRAYOFFILESTATUSES = "arrayOfFileStatuses"; + public static String PNAME_ARRAYOFFILESTATUSES = "arrayOfFileStatuses"; - ArrayList surlRetList; + ArrayList surlRetList; - /** - * Construct an ArrayOfTSURLReturnStatus of numItems empty elements. - */ - public ArrayOfTSURLReturnStatus(int numItems) { - - surlRetList = new ArrayList(numItems); - } - - /** - * Constructor that requires a String. If it is null, then an - * InvalidArrayOfTExtraInfoAttributeException is thrown. - */ - public ArrayOfTSURLReturnStatus(TSURLReturnStatus[] surlArray) - throws InvalidArrayOfTSURLReturnStatusAttributeException { - - if (surlArray == null) { - throw new InvalidArrayOfTSURLReturnStatusAttributeException(surlArray); - } - this.surlRetList = new ArrayList( - Arrays.asList(surlArray)); - } - - public ArrayOfTSURLReturnStatus() { - - surlRetList = new ArrayList(); - } - - public ArrayList getArray() { - - return surlRetList; - } - - public TSURLReturnStatus getTSURLReturnStatus(int i) { - - return (TSURLReturnStatus) surlRetList.get(i); - } - - public void setTSURLReturnStatus(int index, TSURLReturnStatus surl) { - - surlRetList.set(index, surl); - } - - public void addTSurlReturnStatus(TSURLReturnStatus surl) { - - surlRetList.add(surl); - } - - public int size() { - - return surlRetList.size(); - } - - /** - * @param surl - * @throws IllegalArgumentException - * if null argument or not contained surl - */ - public void updateStatus(TSURLReturnStatus surlStatus, TReturnStatus newStatus) - throws IllegalArgumentException { - - if (surlStatus == null || newStatus == null) { - throw new IllegalArgumentException( - "Unable to update the status,null arguments: surlStatus=" + surlStatus - + " newStatus=" + newStatus); - } - int index = surlRetList.indexOf(surlStatus); - if (index < 0) { - throw new IllegalArgumentException( - "Unable to update the status,unknown TSURLReturnStatus" + surlStatus); - } - surlRetList.get(index).setStatus(newStatus); - } - - public void encode(Map outputParam, String name) { - - List list = new ArrayList(); - for (int i = 0; i < surlRetList.size(); i++) { - ((TSURLReturnStatus) surlRetList.get(i)).encode(list); - } - - outputParam.put(name, list); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("ArrayOfTSURLReturnStatus [surlRetList="); - builder.append(surlRetList); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result - + ((surlRetList == null) ? 0 : surlRetList.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - ArrayOfTSURLReturnStatus other = (ArrayOfTSURLReturnStatus) obj; - if (surlRetList == null) { - if (other.surlRetList != null) { - return false; - } - } else if (!surlRetList.equals(other.surlRetList)) { - return false; - } - return true; - } + /** Construct an ArrayOfTSURLReturnStatus of numItems empty elements. */ + public ArrayOfTSURLReturnStatus(int numItems) { + + surlRetList = new ArrayList(numItems); + } + /** + * Constructor that requires a String. If it is null, then an + * InvalidArrayOfTExtraInfoAttributeException is thrown. + */ + public ArrayOfTSURLReturnStatus(TSURLReturnStatus[] surlArray) + throws InvalidArrayOfTSURLReturnStatusAttributeException { + + if (surlArray == null) { + throw new InvalidArrayOfTSURLReturnStatusAttributeException(surlArray); + } + this.surlRetList = new ArrayList(Arrays.asList(surlArray)); + } + + public ArrayOfTSURLReturnStatus() { + + surlRetList = new ArrayList(); + } + + public ArrayList getArray() { + + return surlRetList; + } + + public TSURLReturnStatus getTSURLReturnStatus(int i) { + + return (TSURLReturnStatus) surlRetList.get(i); + } + + public void setTSURLReturnStatus(int index, TSURLReturnStatus surl) { + + surlRetList.set(index, surl); + } + + public void addTSurlReturnStatus(TSURLReturnStatus surl) { + + surlRetList.add(surl); + } + + public int size() { + + return surlRetList.size(); + } + + /** + * @param surl + * @throws IllegalArgumentException if null argument or not contained surl + */ + public void updateStatus(TSURLReturnStatus surlStatus, TReturnStatus newStatus) + throws IllegalArgumentException { + + if (surlStatus == null || newStatus == null) { + throw new IllegalArgumentException( + "Unable to update the status,null arguments: surlStatus=" + + surlStatus + + " newStatus=" + + newStatus); + } + int index = surlRetList.indexOf(surlStatus); + if (index < 0) { + throw new IllegalArgumentException( + "Unable to update the status,unknown TSURLReturnStatus" + surlStatus); + } + surlRetList.get(index).setStatus(newStatus); + } + + public void encode(Map outputParam, String name) { + + List list = new ArrayList(); + for (int i = 0; i < surlRetList.size(); i++) { + ((TSURLReturnStatus) surlRetList.get(i)).encode(list); + } + + outputParam.put(name, list); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("ArrayOfTSURLReturnStatus [surlRetList="); + builder.append(surlRetList); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((surlRetList == null) ? 0 : surlRetList.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ArrayOfTSURLReturnStatus other = (ArrayOfTSURLReturnStatus) obj; + if (surlRetList == null) { + if (other.surlRetList != null) { + return false; + } + } else if (!surlRetList.equals(other.surlRetList)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTSizeInBytes.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTSizeInBytes.java index fa940caf..53133f2f 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTSizeInBytes.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTSizeInBytes.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the ArrayOfTSizeInBytes SRM type. - * + * * @author Alberto Forti * @author CNAF - INFN Bologna * @date Luglio, 2006 @@ -13,7 +12,6 @@ package it.grid.storm.srm.types; import it.grid.storm.common.types.SizeUnit; - import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; @@ -22,65 +20,64 @@ public class ArrayOfTSizeInBytes implements Serializable { - private static final long serialVersionUID = -1987674620390240434L; + private static final long serialVersionUID = -1987674620390240434L; - public static final String PNAME_arrayOfExpectedFileSizes = "arrayOfExpectedFileSizes"; + public static final String PNAME_arrayOfExpectedFileSizes = "arrayOfExpectedFileSizes"; - private ArrayList sizeInBytesList; + private ArrayList sizeInBytesList; - public ArrayOfTSizeInBytes() { + public ArrayOfTSizeInBytes() { - sizeInBytesList = new ArrayList(); - } + sizeInBytesList = new ArrayList(); + } - public static ArrayOfTSizeInBytes decode(Map inputParam, String fieldName) { + public static ArrayOfTSizeInBytes decode(Map inputParam, String fieldName) { - List inputList = null; - try { - inputList = Arrays.asList((Object[]) inputParam.get(fieldName)); - } catch (NullPointerException e) { - // log.warn("Empty SURL array found!"); - } + List inputList = null; + try { + inputList = Arrays.asList((Object[]) inputParam.get(fieldName)); + } catch (NullPointerException e) { + // log.warn("Empty SURL array found!"); + } - if (inputList == null) - return null; + if (inputList == null) return null; - ArrayOfTSizeInBytes list = new ArrayOfTSizeInBytes(); - for (int i = 0; i < inputList.size(); i++) { - TSizeInBytes size = null; - String strLong = (String) inputList.get(i); - try { - size = TSizeInBytes.make(Long.parseLong(strLong), SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException e) { - return null; - } - list.addTSizeInBytes(size); - } - return list; - } + ArrayOfTSizeInBytes list = new ArrayOfTSizeInBytes(); + for (int i = 0; i < inputList.size(); i++) { + TSizeInBytes size = null; + String strLong = (String) inputList.get(i); + try { + size = TSizeInBytes.make(Long.parseLong(strLong), SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException e) { + return null; + } + list.addTSizeInBytes(size); + } + return list; + } - public Object[] getArray() { + public Object[] getArray() { - return sizeInBytesList.toArray(); - } + return sizeInBytesList.toArray(); + } - public TSizeInBytes getTSizeInBytes(int i) { + public TSizeInBytes getTSizeInBytes(int i) { - return (TSizeInBytes) sizeInBytesList.get(i); - } + return (TSizeInBytes) sizeInBytesList.get(i); + } - public void setTSizeInBytes(int index, TSizeInBytes size) { + public void setTSizeInBytes(int index, TSizeInBytes size) { - sizeInBytesList.set(index, size); - } + sizeInBytesList.set(index, size); + } - public void addTSizeInBytes(TSizeInBytes size) { + public void addTSizeInBytes(TSizeInBytes size) { - sizeInBytesList.add(size); - } + sizeInBytesList.add(size); + } - public int size() { + public int size() { - return sizeInBytesList.size(); - } + return sizeInBytesList.size(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/ArrayOfTSpaceToken.java b/src/main/java/it/grid/storm/srm/types/ArrayOfTSpaceToken.java index fe598aa6..335f9453 100644 --- a/src/main/java/it/grid/storm/srm/types/ArrayOfTSpaceToken.java +++ b/src/main/java/it/grid/storm/srm/types/ArrayOfTSpaceToken.java @@ -1,117 +1,109 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a TTSpace Token - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 */ - package it.grid.storm.srm.types; +import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; -import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Vector; - -import it.grid.storm.srm.types.TSpaceToken; -import java.io.Serializable; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ArrayOfTSpaceToken implements Serializable { - private static Logger log = LoggerFactory.getLogger(ArrayOfTSpaceToken.class); + private static Logger log = LoggerFactory.getLogger(ArrayOfTSpaceToken.class); - public static final String PNAME_ARRAYOFSPACETOKENS = "arrayOfSpaceTokens"; + public static final String PNAME_ARRAYOFSPACETOKENS = "arrayOfSpaceTokens"; - ArrayList tokenList; + ArrayList tokenList; - /** - * Constructor that requires a String. If it is null, then an - * InvalidArrayOfTTSpaceTokenAttributeException is thrown. - */ - public ArrayOfTSpaceToken(TSpaceToken[] tokenArray) - throws InvalidArrayOfTSpaceTokenAttributeException { + /** + * Constructor that requires a String. If it is null, then an + * InvalidArrayOfTTSpaceTokenAttributeException is thrown. + */ + public ArrayOfTSpaceToken(TSpaceToken[] tokenArray) + throws InvalidArrayOfTSpaceTokenAttributeException { - if (tokenArray == null) - throw new InvalidArrayOfTSpaceTokenAttributeException(tokenArray); - // FIXME this.tokenArray = tokenArray; - } + if (tokenArray == null) throw new InvalidArrayOfTSpaceTokenAttributeException(tokenArray); + // FIXME this.tokenArray = tokenArray; + } - public ArrayOfTSpaceToken() { + public ArrayOfTSpaceToken() { - tokenList = new ArrayList(); - } + tokenList = new ArrayList(); + } - public static ArrayOfTSpaceToken decode(Map inputParam, String fieldName) - throws InvalidArrayOfTSpaceTokenAttributeException { + public static ArrayOfTSpaceToken decode(Map inputParam, String fieldName) + throws InvalidArrayOfTSpaceTokenAttributeException { - List tokensList = null; - try { - tokensList = Arrays.asList((Object[]) inputParam.get(fieldName)); - } catch (NullPointerException e) { - log.warn(""); - } - if (tokensList == null) - throw new InvalidArrayOfTSpaceTokenAttributeException(null); + List tokensList = null; + try { + tokensList = Arrays.asList((Object[]) inputParam.get(fieldName)); + } catch (NullPointerException e) { + log.warn(""); + } + if (tokensList == null) throw new InvalidArrayOfTSpaceTokenAttributeException(null); - ArrayOfTSpaceToken arrayOfTSpaceTokens = new ArrayOfTSpaceToken(); + ArrayOfTSpaceToken arrayOfTSpaceTokens = new ArrayOfTSpaceToken(); - for (int i = 0; i < tokensList.size(); i++) { - TSpaceToken token = null; - try { - token = TSpaceToken.make((String) tokensList.get(i)); - } catch (InvalidTSpaceTokenAttributesException e) { - token = TSpaceToken.makeEmpty(); - } - arrayOfTSpaceTokens.addTSpaceToken(token); - } + for (int i = 0; i < tokensList.size(); i++) { + TSpaceToken token = null; + try { + token = TSpaceToken.make((String) tokensList.get(i)); + } catch (InvalidTSpaceTokenAttributesException e) { + token = TSpaceToken.makeEmpty(); + } + arrayOfTSpaceTokens.addTSpaceToken(token); + } - return arrayOfTSpaceTokens; - } + return arrayOfTSpaceTokens; + } - public TSpaceToken getTSpaceToken(int i) { + public TSpaceToken getTSpaceToken(int i) { - return (TSpaceToken) tokenList.get(i); - } + return (TSpaceToken) tokenList.get(i); + } - public TSpaceToken[] getTSpaceTokenArray() { + public TSpaceToken[] getTSpaceTokenArray() { - TSpaceToken[] array = new TSpaceToken[0]; - return tokenList.toArray(array); - } + TSpaceToken[] array = new TSpaceToken[0]; + return tokenList.toArray(array); + } - public void addTSpaceToken(TSpaceToken token) { + public void addTSpaceToken(TSpaceToken token) { - tokenList.add(token); - } + tokenList.add(token); + } - public int size() { + public int size() { - return tokenList.size(); - } + return tokenList.size(); + } - /** - * Encode method, used to create a structured paramter representing this - * object, for FE communication. - * - * @param outputParam - * @param name - */ - public void encode(Map outputParam, String name) { + /** + * Encode method, used to create a structured paramter representing this object, for FE + * communication. + * + * @param outputParam + * @param name + */ + public void encode(Map outputParam, String name) { - Vector vector = new Vector(); - for (int i = 0; i < tokenList.size(); i++) { - ((TSpaceToken) tokenList.get(i)).encode(vector); - } + Vector vector = new Vector(); + for (int i = 0; i < tokenList.size(); i++) { + ((TSpaceToken) tokenList.get(i)).encode(vector); + } - outputParam.put(name, vector); - } + outputParam.put(name, vector); + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfSURLsAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfSURLsAttributeException.java index 6ec612f0..f9515afe 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfSURLsAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfSURLsAttributeException.java @@ -1,31 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an Exception throws if TExtraInfo is not well formed. * - * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; import java.util.List; public class InvalidArrayOfSURLsAttributeException extends Exception { - private boolean nullArray = true; + private boolean nullArray = true; - public InvalidArrayOfSURLsAttributeException(List array) { + public InvalidArrayOfSURLsAttributeException(List array) { - nullArray = (array == null); - } + nullArray = (array == null); + } - public String toString() { + public String toString() { - return "surlList = " + nullArray; - } + return "surlList = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTExtraInfoAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTExtraInfoAttributeException.java index a5b72c9b..25df64bd 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTExtraInfoAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTExtraInfoAttributeException.java @@ -1,28 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for SpaceToken - * is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for SpaceToken is invoked with a + * null String. */ - -import it.grid.storm.srm.types.TExtraInfo; - public class InvalidArrayOfTExtraInfoAttributeException extends Exception { - private boolean nullArray; + private boolean nullArray; - public InvalidArrayOfTExtraInfoAttributeException(Object[] infoArray) { + public InvalidArrayOfTExtraInfoAttributeException(Object[] infoArray) { - nullArray = infoArray == null; - } + nullArray = infoArray == null; + } - public String toString() { + public String toString() { - return "Invalid TExtraInfo[]: nullArray = " + nullArray; - } + return "Invalid TExtraInfo[]: nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataPathDetailAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataPathDetailAttributeException.java index 1d8b0c09..74130b66 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataPathDetailAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataPathDetailAttributeException.java @@ -1,30 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for SpaceToken - * is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for SpaceToken is invoked with a + * null String. */ +public class InvalidArrayOfTMetaDataPathDetailAttributeException extends Exception { -import it.grid.storm.srm.types.TMetaDataPathDetail; + private boolean nullArray; -public class InvalidArrayOfTMetaDataPathDetailAttributeException extends - Exception { + public InvalidArrayOfTMetaDataPathDetailAttributeException(TMetaDataPathDetail[] metaDataArray) { - private boolean nullArray; + nullArray = metaDataArray == null; + } - public InvalidArrayOfTMetaDataPathDetailAttributeException( - TMetaDataPathDetail[] metaDataArray) { + public String toString() { - nullArray = metaDataArray == null; - } - - public String toString() { - - return "Invalid TMetaDataPathDetail[]: nullArray = " + nullArray; - } + return "Invalid TMetaDataPathDetail[]: nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataSpaceAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataSpaceAttributeException.java index 81423123..59e2ea56 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataSpaceAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTMetaDataSpaceAttributeException.java @@ -1,29 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for SpaceToken - * is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for SpaceToken is invoked with a + * null String. */ - -import it.grid.storm.srm.types.TMetaDataSpace; - public class InvalidArrayOfTMetaDataSpaceAttributeException extends Exception { - private boolean nullArray; + private boolean nullArray; - public InvalidArrayOfTMetaDataSpaceAttributeException( - TMetaDataSpace[] metaDataArray) { + public InvalidArrayOfTMetaDataSpaceAttributeException(TMetaDataSpace[] metaDataArray) { - nullArray = metaDataArray == null; - } + nullArray = metaDataArray == null; + } - public String toString() { + public String toString() { - return "Invalid TMetaDataArray[]: nullArray = " + nullArray; - } + return "Invalid TMetaDataArray[]: nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSURLReturnStatusAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSURLReturnStatusAttributeException.java index e28104c7..75dd611b 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSURLReturnStatusAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSURLReturnStatusAttributeException.java @@ -1,30 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an Exception throws if TExtraInfo is not well formed. * - * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; -public class InvalidArrayOfTSURLReturnStatusAttributeException extends - Exception { +public class InvalidArrayOfTSURLReturnStatusAttributeException extends Exception { - private boolean nullArray = true; + private boolean nullArray = true; - public InvalidArrayOfTSURLReturnStatusAttributeException(Object[] array) { + public InvalidArrayOfTSURLReturnStatusAttributeException(Object[] array) { - nullArray = (array == null); - } + nullArray = (array == null); + } - public String toString() { + public String toString() { - return "nullArray = " + nullArray; - } + return "nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSpaceTokenAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSpaceTokenAttributeException.java index 37c1f539..f2ea64ec 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSpaceTokenAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidArrayOfTSpaceTokenAttributeException.java @@ -1,28 +1,23 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for SpaceToken - * is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for SpaceToken is invoked with a + * null String. */ - -import it.grid.storm.srm.types.TSpaceToken; - public class InvalidArrayOfTSpaceTokenAttributeException extends Exception { - private boolean nullArray; + private boolean nullArray; - public InvalidArrayOfTSpaceTokenAttributeException(TSpaceToken[] tokenArray) { + public InvalidArrayOfTSpaceTokenAttributeException(TSpaceToken[] tokenArray) { - nullArray = tokenArray == null; - } + nullArray = tokenArray == null; + } - public String toString() { + public String toString() { - return "Invalid SpaceToken[]: nullArray = " + nullArray; - } + return "Invalid SpaceToken[]: nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTDirOptionAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTDirOptionAttributesException.java index d80b1616..48958324 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTDirOptionAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTDirOptionAttributesException.java @@ -1,35 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception throws if TDirOptionData is not well - * formed. * - * + * This class represents an Exception throws if TDirOptionData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidTDirOptionAttributesException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private boolean allLevel = true; - private int numLevel = -1; + private boolean allLevel = true; + private int numLevel = -1; - public InvalidTDirOptionAttributesException(boolean allLevel, int numLevel) { + public InvalidTDirOptionAttributesException(boolean allLevel, int numLevel) { - this.allLevel = allLevel; - this.numLevel = numLevel; - } + this.allLevel = allLevel; + this.numLevel = numLevel; + } - public String toString() { + public String toString() { - return "Invalid TDirOption: recursion as specified by allLevel is " - + allLevel + ", but numLevel is set to " + numLevel; - } + return "Invalid TDirOption: recursion as specified by allLevel is " + + allLevel + + ", but numLevel is set to " + + numLevel; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTExtraInfoAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTExtraInfoAttributeException.java index 75d71aed..7453c7d8 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTExtraInfoAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTExtraInfoAttributeException.java @@ -1,29 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an Exception throws if TExtraInfo is not well formed. * - * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; public class InvalidTExtraInfoAttributeException extends Exception { - private boolean nullKey = true; + private boolean nullKey = true; - public InvalidTExtraInfoAttributeException(String key) { + public InvalidTExtraInfoAttributeException(String key) { - nullKey = (key == null); - } + nullKey = (key == null); + } - public String toString() { + public String toString() { - return "nullKey = " + nullKey; - } + return "nullKey = " + nullKey; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTMetaDataSpaceAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTMetaDataSpaceAttributeException.java index 5a7db074..0de24307 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTMetaDataSpaceAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTMetaDataSpaceAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when TMetaDataSpace retrince an - * invalid constructor attributes. - * + * This class represents an Exception thrown when TMetaDataSpace retrince an invalid constructor + * attributes. + * * @author Magnoni Luca * @author CNAF INFN Bologna * @date Avril 2005 * @version 1.0 */ - public class InvalidTMetaDataSpaceAttributeException extends Exception { - private boolean nullToken; + private boolean nullToken; - /** - * Constructor that requires the SizeUnit that aused the exception to be - * thrown. - */ - public InvalidTMetaDataSpaceAttributeException(TSpaceToken token) { + /** Constructor that requires the SizeUnit that aused the exception to be thrown. */ + public InvalidTMetaDataSpaceAttributeException(TSpaceToken token) { - nullToken = token == null; - } + nullToken = token == null; + } - public String toString() { + public String toString() { - return "Ivalid TMetaDataSpace Attributes: nullSizeUnit=" + nullToken; - } + return "Ivalid TMetaDataSpace Attributes: nullSizeUnit=" + nullToken; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTRequestTokenAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTRequestTokenAttributesException.java index a3cbe3d2..ee0dafea 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTRequestTokenAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTRequestTokenAttributesException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for - * TRequestToken is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for TRequestToken is invoked with + * a null String. + * * @author Magnoni Luca * @author CNAF INFN Bologna * @date Avril, 2005 @@ -15,15 +14,15 @@ */ public class InvalidTRequestTokenAttributesException extends Exception { - private boolean nullString; + private boolean nullString; - public InvalidTRequestTokenAttributesException(String s) { + public InvalidTRequestTokenAttributesException(String s) { - nullString = s == null; - } + nullString = s == null; + } - public String toString() { + public String toString() { - return "Invalid RequestToken Attributes: nullString=" + nullString; - } + return "Invalid RequestToken Attributes: nullString=" + nullString; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSURLAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSURLAttributesException.java index 88a063be..26213b06 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSURLAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSURLAttributesException.java @@ -1,49 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when a TSURL constructor is invoked - * with null or empty SiteProtocol or SFN. - * + * This class represents an Exception thrown when a TSURL constructor is invoked with null or empty + * SiteProtocol or SFN. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 26th, 2005 * @version 1.0 */ - -import it.grid.storm.common.types.SiteProtocol; import it.grid.storm.common.types.SFN; +import it.grid.storm.common.types.SiteProtocol; public class InvalidTSURLAttributesException extends Exception { - private boolean nullProtocol; // boolean true if Protocol is null - private boolean nullSFN; // boolean true if SFN is null - private boolean emptyProtocol = false; // boolean true if the supplied - // SiteProtocol is empty - private boolean emptySFN = false; // boolean true if the supplied SFN is empty - // private boolean relativePath = false; - - /** - * Constructor that requires the Protocol and SFN that caused the exception to - * be thrown. - */ - public InvalidTSURLAttributesException(SiteProtocol prt, SFN sfn) { - - nullProtocol = (prt == null); - if (!nullProtocol) - emptyProtocol = (prt == SiteProtocol.EMPTY); - nullSFN = (sfn == null); - if (!nullSFN) - emptySFN = sfn.isEmpty(); - } - - public String toString() { - - return "Invalid TSURL Attibutes: nullProtocol=" + nullProtocol - + "; emptyProtocol=" + emptyProtocol + "; nullSFN=" + nullSFN - + "; emptySFN=" + emptySFN + "."; - } + private boolean nullProtocol; // boolean true if Protocol is null + private boolean nullSFN; // boolean true if SFN is null + private boolean emptyProtocol = false; // boolean true if the supplied + // SiteProtocol is empty + private boolean emptySFN = false; // boolean true if the supplied SFN is empty + // private boolean relativePath = false; + + /** Constructor that requires the Protocol and SFN that caused the exception to be thrown. */ + public InvalidTSURLAttributesException(SiteProtocol prt, SFN sfn) { + + nullProtocol = (prt == null); + if (!nullProtocol) emptyProtocol = (prt == SiteProtocol.EMPTY); + nullSFN = (sfn == null); + if (!nullSFN) emptySFN = sfn.isEmpty(); + } + + public String toString() { + + return "Invalid TSURL Attibutes: nullProtocol=" + + nullProtocol + + "; emptyProtocol=" + + emptyProtocol + + "; nullSFN=" + + nullSFN + + "; emptySFN=" + + emptySFN + + "."; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSURLInfoAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSURLInfoAttributeException.java index f1d2d563..3d629268 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSURLInfoAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSURLInfoAttributeException.java @@ -1,31 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an Exception throws if TSURLINFO is not well formed. * - * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; -import it.grid.storm.srm.types.TSURL; - public class InvalidTSURLInfoAttributeException extends Exception { - private boolean nullSurl = true; + private boolean nullSurl = true; - public InvalidTSURLInfoAttributeException(TSURL surl) { + public InvalidTSURLInfoAttributeException(TSURL surl) { - nullSurl = (surl == null); - } + nullSurl = (surl == null); + } - public String toString() { + public String toString() { - return "nullSurl = " + nullSurl; - } + return "nullSurl = " + nullSurl; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSURLLifetimeReturnStatusAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSURLLifetimeReturnStatusAttributeException.java index e8c5e1b6..c075c5c1 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSURLLifetimeReturnStatusAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSURLLifetimeReturnStatusAttributeException.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throwed if TSURLLifetimeReturnStatus is - * not well formed. * - * + * This class represents an Exception throwed if TSURLLifetimeReturnStatus is not well formed. * + * * @author Alberto Forti * @author CNAF-INFN Bologna * @date Dec 2006 @@ -13,20 +11,17 @@ */ package it.grid.storm.srm.types; -import it.grid.storm.srm.types.TSURL; +public class InvalidTSURLLifetimeReturnStatusAttributeException extends Exception { -public class InvalidTSURLLifetimeReturnStatusAttributeException extends - Exception { + private boolean nullSurl = true; - private boolean nullSurl = true; + public InvalidTSURLLifetimeReturnStatusAttributeException(TSURL surl) { - public InvalidTSURLLifetimeReturnStatusAttributeException(TSURL surl) { + nullSurl = (surl == null); + } - nullSurl = (surl == null); - } + public String toString() { - public String toString() { - - return "nullSurl = " + nullSurl; - } + return "nullSurl = " + nullSurl; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSURLReturnStatusAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSURLReturnStatusAttributeException.java index 57302b0a..8686afd6 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSURLReturnStatusAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSURLReturnStatusAttributeException.java @@ -1,31 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents an Exception throws if TSURLINFO is not well formed. * - * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; -import it.grid.storm.srm.types.TSURL; - public class InvalidTSURLReturnStatusAttributeException extends Exception { - private boolean nullSurl = true; + private boolean nullSurl = true; - public InvalidTSURLReturnStatusAttributeException(TSURL surl) { + public InvalidTSURLReturnStatusAttributeException(TSURL surl) { - nullSurl = (surl == null); - } + nullSurl = (surl == null); + } - public String toString() { + public String toString() { - return "nullSurl = " + nullSurl; - } + return "nullSurl = " + nullSurl; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSizeAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSizeAttributesException.java index 8b77c083..5e74e3fb 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSizeAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSizeAttributesException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when FileSize receives null as - * constructor attributes; or a negative size. - * + * This class represents an Exception thrown when FileSize receives null as constructor attributes; + * or a negative size. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 23rd, 2005 @@ -17,27 +16,24 @@ public class InvalidTSizeAttributesException extends RuntimeException { - /** - * - */ - private static final long serialVersionUID = 1L; - - private boolean negativeSize; - private boolean nullUnit; - - /** - * Constructor that requires the long and the SizeUnit that caused the - * exception to be thrown. - */ - public InvalidTSizeAttributesException(long size, SizeUnit unit) { - - nullUnit = unit == null; - negativeSize = size < 0; - } - - public String toString() { - - return "Ivalid TFileSize Attributes: nullSizeUnit=" + nullUnit - + "; negativeSize=" + negativeSize; - } + /** */ + private static final long serialVersionUID = 1L; + + private boolean negativeSize; + private boolean nullUnit; + + /** Constructor that requires the long and the SizeUnit that caused the exception to be thrown. */ + public InvalidTSizeAttributesException(long size, SizeUnit unit) { + + nullUnit = unit == null; + negativeSize = size < 0; + } + + public String toString() { + + return "Ivalid TFileSize Attributes: nullSizeUnit=" + + nullUnit + + "; negativeSize=" + + negativeSize; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTSpaceTokenAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTSpaceTokenAttributesException.java index 900ebb62..a392b5a5 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTSpaceTokenAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTSpaceTokenAttributesException.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when the constructor for SpaceToken - * is invoked with a null String. - * + * This class represents an Exception thrown when the constructor for SpaceToken is invoked with a + * null String. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 23rd, 2005 @@ -15,12 +14,10 @@ */ public class InvalidTSpaceTokenAttributesException extends Exception { - public InvalidTSpaceTokenAttributesException() { + public InvalidTSpaceTokenAttributesException() {} - } + public String toString() { - public String toString() { - - return "Invalid TSpaceToken Attribute: null String!"; - } + return "Invalid TSpaceToken Attribute: null String!"; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTTURLAttributesException.java b/src/main/java/it/grid/storm/srm/types/InvalidTTURLAttributesException.java index eac7f92d..12900ad1 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTTURLAttributesException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTTURLAttributesException.java @@ -1,57 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an Exception thrown when a TURL constructor is invoked - * with null TransferProtocol or with null TFN. - * + * This class represents an Exception thrown when a TURL constructor is invoked with null + * TransferProtocol or with null TFN. + * * @author Ezio Corso * @author EGRID - ICTP Trieste * @date March 26th, 2005 * @version 2.0 */ - -import it.grid.storm.common.types.TransferProtocol; import it.grid.storm.common.types.TFN; +import it.grid.storm.common.types.TransferProtocol; public class InvalidTTURLAttributesException extends IllegalArgumentException { - private boolean nullProtocol; // boolean true if TransferProtocol is null - private boolean nullTFN; // boolean true if TFN is null - private boolean emptyProtocol = false; // boolean true if the TransferProtocol - // specified is EMPTY - private boolean emptyTFN = false; // boolean true if the suppliedTFN is empty + private boolean nullProtocol; // boolean true if TransferProtocol is null + private boolean nullTFN; // boolean true if TFN is null + private boolean emptyProtocol = false; // boolean true if the TransferProtocol + // specified is EMPTY + private boolean emptyTFN = false; // boolean true if the suppliedTFN is empty - public InvalidTTURLAttributesException() { + public InvalidTTURLAttributesException() { - super(); - } + super(); + } - public InvalidTTURLAttributesException(Throwable cause) { + public InvalidTTURLAttributesException(Throwable cause) { - super(cause); - } + super(cause); + } - /** - * Constructor that requires the Protocol and SFN that caused the exception to - * be thrown. - */ - public InvalidTTURLAttributesException(TransferProtocol prt, TFN tfn) { + /** Constructor that requires the Protocol and SFN that caused the exception to be thrown. */ + public InvalidTTURLAttributesException(TransferProtocol prt, TFN tfn) { - nullProtocol = (prt == null); - nullTFN = (tfn == null); - if (!nullProtocol) - emptyProtocol = (prt == TransferProtocol.EMPTY); - if (!nullTFN) - emptyTFN = tfn.isEmpty(); - } + nullProtocol = (prt == null); + nullTFN = (tfn == null); + if (!nullProtocol) emptyProtocol = (prt == TransferProtocol.EMPTY); + if (!nullTFN) emptyTFN = tfn.isEmpty(); + } - public String toString() { + public String toString() { - return "nullProtocol=" + nullProtocol + "; nullTFN=" + nullTFN - + "; emptyProtocol=" + emptyProtocol + "; emptyTFN=" + emptyTFN + "."; - } + return "nullProtocol=" + + nullProtocol + + "; nullTFN=" + + nullTFN + + "; emptyProtocol=" + + emptyProtocol + + "; emptyTFN=" + + emptyTFN + + "."; + } } diff --git a/src/main/java/it/grid/storm/srm/types/InvalidTUserIDAttributeException.java b/src/main/java/it/grid/storm/srm/types/InvalidTUserIDAttributeException.java index ba8476fc..9d229e3a 100644 --- a/src/main/java/it/grid/storm/srm/types/InvalidTUserIDAttributeException.java +++ b/src/main/java/it/grid/storm/srm/types/InvalidTUserIDAttributeException.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if TUserIDData is not well formed. - * * - * + * This class represents an Exception throws if TUserIDData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date @@ -15,19 +13,19 @@ public class InvalidTUserIDAttributeException extends Exception { - private boolean nullString = true; - private boolean emptyString = true; + private boolean nullString = true; + private boolean emptyString = true; - public InvalidTUserIDAttributeException(String id) { + public InvalidTUserIDAttributeException(String id) { - nullString = (id == null); - if (id != null) { - emptyString = (id.equals("")); - } - } + nullString = (id == null); + if (id != null) { + emptyString = (id.equals("")); + } + } - public String toString() { + public String toString() { - return "nullString = " + nullString + " EmptyString = " + emptyString; - } + return "nullString = " + nullString + " EmptyString = " + emptyString; + } } diff --git a/src/main/java/it/grid/storm/srm/types/SRMCommandException.java b/src/main/java/it/grid/storm/srm/types/SRMCommandException.java index 3fbcee81..6fb88954 100644 --- a/src/main/java/it/grid/storm/srm/types/SRMCommandException.java +++ b/src/main/java/it/grid/storm/srm/types/SRMCommandException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -20,5 +19,4 @@ public TReturnStatus getReturnStatus() { return returnStatus; } - -} \ No newline at end of file +} diff --git a/src/main/java/it/grid/storm/srm/types/TAccessLatency.java b/src/main/java/it/grid/storm/srm/types/TAccessLatency.java index e7a7b724..ce245d5a 100644 --- a/src/main/java/it/grid/storm/srm/types/TAccessLatency.java +++ b/src/main/java/it/grid/storm/srm/types/TAccessLatency.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TAccessLatency SRM type. - * + * * @author Alberto Forti * @author CNAF - INFN Bologna * @date Luglio, 2006 @@ -16,80 +15,72 @@ public class TAccessLatency { - public static String PNAME_accessLatency = "accessLatency"; + public static String PNAME_accessLatency = "accessLatency"; - private String accessLatency = null; + private String accessLatency = null; - public static final TAccessLatency ONLINE = new TAccessLatency("ONLINE"); - public static final TAccessLatency NEARLINE = new TAccessLatency("NEARLINE"); - public static final TAccessLatency EMPTY = new TAccessLatency("EMPTY"); + public static final TAccessLatency ONLINE = new TAccessLatency("ONLINE"); + public static final TAccessLatency NEARLINE = new TAccessLatency("NEARLINE"); + public static final TAccessLatency EMPTY = new TAccessLatency("EMPTY"); - private TAccessLatency(String accessLatency) { + private TAccessLatency(String accessLatency) { - this.accessLatency = accessLatency; - } + this.accessLatency = accessLatency; + } - public final static TAccessLatency getTAccessLatency(int idx) { + public static final TAccessLatency getTAccessLatency(int idx) { - switch (idx) { - case 0: - return ONLINE; - case 1: - return NEARLINE; - default: - return EMPTY; - } - } + switch (idx) { + case 0: + return ONLINE; + case 1: + return NEARLINE; + default: + return EMPTY; + } + } - /** - * decode() method creates a TAccessLatency object from the information - * contained into the structured parameter received from the FE. - * - * @param inputParam - * map structure - * @param fieldName - * field name - * @return - */ - public final static TAccessLatency decode(Map inputParam, String fieldName) { + /** + * decode() method creates a TAccessLatency object from the information contained into the + * structured parameter received from the FE. + * + * @param inputParam map structure + * @param fieldName field name + * @return + */ + public static final TAccessLatency decode(Map inputParam, String fieldName) { - Integer val; + Integer val; - val = (Integer) inputParam.get(fieldName); - if (val == null) - return EMPTY; + val = (Integer) inputParam.get(fieldName); + if (val == null) return EMPTY; - return TAccessLatency.getTAccessLatency(val.intValue()); - } + return TAccessLatency.getTAccessLatency(val.intValue()); + } - /** - * encode() method creates structured parameter representing this ogbject. It - * is passed to the FE. - * - * @param outputParam - * hashtable structure - * @param fieldName - * field name - */ - public void encode(Map outputParam, String fieldName) { + /** + * encode() method creates structured parameter representing this ogbject. It is passed to the FE. + * + * @param outputParam hashtable structure + * @param fieldName field name + */ + public void encode(Map outputParam, String fieldName) { - Integer value = null; + Integer value = null; - if (this.equals(ONLINE)) - value = Integer.valueOf(0); - if (this.equals(NEARLINE)) - value = Integer.valueOf(1); + if (this.equals(ONLINE)) value = Integer.valueOf(0); + if (this.equals(NEARLINE)) value = Integer.valueOf(1); - outputParam.put(fieldName, value); - } + outputParam.put(fieldName, value); + } - public String toString() { + public String toString() { - return accessLatency; - } + return accessLatency; + } - public String getValue() { + public String getValue() { - return accessLatency; - } + return accessLatency; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TAccessPattern.java b/src/main/java/it/grid/storm/srm/types/TAccessPattern.java index 134a5906..bfb174b2 100644 --- a/src/main/java/it/grid/storm/srm/types/TAccessPattern.java +++ b/src/main/java/it/grid/storm/srm/types/TAccessPattern.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TTransferParameters SRM type. - * + * * @author Alberto Forti * @author Cnaf -INFN Bologna * @date July, 2006 @@ -16,80 +15,72 @@ public class TAccessPattern { - public static String PNAME_accessPattern = "accessPattern"; + public static String PNAME_accessPattern = "accessPattern"; - private String accessPattern = null; + private String accessPattern = null; - public static final TAccessPattern TRANSFER_MODE = new TAccessPattern( - "TRANSFER_MODE"), PROCESSING_MODE = new TAccessPattern("PROCESSING_MODE"), - EMPTY = new TAccessPattern("EMPTY"); + public static final TAccessPattern TRANSFER_MODE = new TAccessPattern("TRANSFER_MODE"), + PROCESSING_MODE = new TAccessPattern("PROCESSING_MODE"), + EMPTY = new TAccessPattern("EMPTY"); - private TAccessPattern(String accessPattern) { + private TAccessPattern(String accessPattern) { - this.accessPattern = accessPattern; - } + this.accessPattern = accessPattern; + } - public final static TAccessPattern getTAccessPattern(int idx) { + public static final TAccessPattern getTAccessPattern(int idx) { - switch (idx) { - case 0: - return TRANSFER_MODE; - case 1: - return PROCESSING_MODE; - default: - return EMPTY; - } - } + switch (idx) { + case 0: + return TRANSFER_MODE; + case 1: + return PROCESSING_MODE; + default: + return EMPTY; + } + } - /** - * decode() method creates a TAccessPattern object from the inforation - * contained into the structured parameter received from the FE. - * - * @param inputParam - * map structure - * @param fieldName - * field name - * @return - */ - public final static TAccessPattern decode(Map inputParam, String fieldName) { + /** + * decode() method creates a TAccessPattern object from the inforation contained into the + * structured parameter received from the FE. + * + * @param inputParam map structure + * @param fieldName field name + * @return + */ + public static final TAccessPattern decode(Map inputParam, String fieldName) { - Integer val; + Integer val; - val = (Integer) inputParam.get(fieldName); - if (val == null) - return EMPTY; + val = (Integer) inputParam.get(fieldName); + if (val == null) return EMPTY; - return TAccessPattern.getTAccessPattern(val.intValue()); - } + return TAccessPattern.getTAccessPattern(val.intValue()); + } - /** - * encode() method creates structured parameter representing this ogbject. It - * is passed to the FE. - * - * @param outputParam - * map structure - * @param fieldName - * field name - */ - public void encode(Map outputParam, String fieldName) { + /** + * encode() method creates structured parameter representing this ogbject. It is passed to the FE. + * + * @param outputParam map structure + * @param fieldName field name + */ + public void encode(Map outputParam, String fieldName) { - Integer value = null; + Integer value = null; - if (this.equals(TAccessPattern.TRANSFER_MODE)) - value = Integer.valueOf(0); - if (this.equals(TAccessPattern.PROCESSING_MODE)) - value = Integer.valueOf(1); + if (this.equals(TAccessPattern.TRANSFER_MODE)) value = Integer.valueOf(0); + if (this.equals(TAccessPattern.PROCESSING_MODE)) value = Integer.valueOf(1); - outputParam.put(fieldName, value); - } + outputParam.put(fieldName, value); + } - public String toString() { + public String toString() { - return accessPattern; - } + return accessPattern; + } - public String getValue() { + public String getValue() { - return accessPattern; - } + return accessPattern; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TCheckSumType.java b/src/main/java/it/grid/storm/srm/types/TCheckSumType.java index c5c64615..010adec5 100644 --- a/src/main/java/it/grid/storm/srm/types/TCheckSumType.java +++ b/src/main/java/it/grid/storm/srm/types/TCheckSumType.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -8,40 +7,38 @@ /** * This class represents the TCheckSumType of a Permission Area managed by Srm. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ -/** - * Class that represent CheckSum for file. - */ +/** Class that represent CheckSum for file. */ public class TCheckSumType { - public static String PNAME_CHECKSUMTYPE = "checkSumType"; + public static String PNAME_CHECKSUMTYPE = "checkSumType"; - private String chkType = null; + private String chkType = null; - public TCheckSumType(String chkType) { + public TCheckSumType(String chkType) { - this.chkType = chkType; - } + this.chkType = chkType; + } - @Override - public String toString() { + @Override + public String toString() { - return chkType.toString(); - } + return chkType.toString(); + } - public String getValue() { + public String getValue() { - return chkType.toString(); - } + return chkType.toString(); + } - public void encode(Map param, String name) { + public void encode(Map param, String name) { - param.put(name, this.toString()); - } + param.put(name, this.toString()); + } }; diff --git a/src/main/java/it/grid/storm/srm/types/TCheckSumValue.java b/src/main/java/it/grid/storm/srm/types/TCheckSumValue.java index 0205571d..421d6ce2 100644 --- a/src/main/java/it/grid/storm/srm/types/TCheckSumValue.java +++ b/src/main/java/it/grid/storm/srm/types/TCheckSumValue.java @@ -1,48 +1,44 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; -import java.util.Hashtable; import java.util.Map; /** * This class represents the TCheckSumValue of a Permission Area managed by Srm. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ -/** - * Class that represent CheckSum for file. - */ +/** Class that represent CheckSum for file. */ public class TCheckSumValue { - private String value = null; + private String value = null; - public static String PNAME_CHECKSUMVALUE = "checkSumValue"; + public static String PNAME_CHECKSUMVALUE = "checkSumValue"; - // TO Complete wut Exception if Strin specified == null - public TCheckSumValue(String value) { + // TO Complete wut Exception if Strin specified == null + public TCheckSumValue(String value) { - this.value = value; - } + this.value = value; + } - public String toString() { + public String toString() { - return value; - } + return value; + } - public String getValue() { + public String getValue() { - return value; - } + return value; + } - public void encode(Map param, String name) { + public void encode(Map param, String name) { - param.put(name, this.toString()); - } + param.put(name, this.toString()); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TConnectionType.java b/src/main/java/it/grid/storm/srm/types/TConnectionType.java index ad5614cf..83d70f42 100644 --- a/src/main/java/it/grid/storm/srm/types/TConnectionType.java +++ b/src/main/java/it/grid/storm/srm/types/TConnectionType.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TTransferParameters SRM type. - * + * * @author Alberto Forti * @author Cnaf -INFN Bologna * @date July, 2006 @@ -12,61 +11,58 @@ */ package it.grid.storm.srm.types; -import java.util.Hashtable; import java.util.Map; public class TConnectionType { - public static String PNAME_connectionType = "connectionType"; - private String connectionType = null; + public static String PNAME_connectionType = "connectionType"; + private String connectionType = null; - public static final TConnectionType WAN = new TConnectionType("WAN"), - LAN = new TConnectionType("LAN"), EMPTY = new TConnectionType("EMPTY"); + public static final TConnectionType WAN = new TConnectionType("WAN"), + LAN = new TConnectionType("LAN"), + EMPTY = new TConnectionType("EMPTY"); - private TConnectionType(String connectionType) { + private TConnectionType(String connectionType) { - this.connectionType = connectionType; - } + this.connectionType = connectionType; + } - public final static TConnectionType getTConnectionType(int idx) { + public static final TConnectionType getTConnectionType(int idx) { - switch (idx) { - case 0: - return WAN; - case 1: - return LAN; - default: - return EMPTY; - } - } + switch (idx) { + case 0: + return WAN; + case 1: + return LAN; + default: + return EMPTY; + } + } - public final static TConnectionType decode(Map inputParam, String fieldName) { + public static final TConnectionType decode(Map inputParam, String fieldName) { - Integer val; + Integer val; - val = (Integer) inputParam.get(fieldName); - if (val == null) - return EMPTY; + val = (Integer) inputParam.get(fieldName); + if (val == null) return EMPTY; - return TConnectionType.getTConnectionType(val.intValue()); - } + return TConnectionType.getTConnectionType(val.intValue()); + } - public int toInt(TConnectionType conType) { + public int toInt(TConnectionType conType) { - if (conType.equals(WAN)) - return 0; - if (conType.equals(LAN)) - return 1; - return 2; - } + if (conType.equals(WAN)) return 0; + if (conType.equals(LAN)) return 1; + return 2; + } - public String toString() { + public String toString() { - return connectionType; - } + return connectionType; + } - public String getValue() { + public String getValue() { - return connectionType; - } + return connectionType; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TDirOption.java b/src/main/java/it/grid/storm/srm/types/TDirOption.java index 223b33dd..ec5768b3 100644 --- a/src/main/java/it/grid/storm/srm/types/TDirOption.java +++ b/src/main/java/it/grid/storm/srm/types/TDirOption.java @@ -1,13 +1,12 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents an TDirOption Object. TDirOption contains information - * about directory visit. - * + * This class represents an TDirOption Object. TDirOption contains information about directory + * visit. + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril 20 @@ -15,86 +14,78 @@ */ public class TDirOption { - private boolean isASourceDirectory; - private boolean allLevelRecursive; - private int numOfLevels = 0; - - /** - * Constructor that requires boolean isDirectory indicating whether the SURL - * refers to a Directory or not, and a boolean allLevel to indicate if - * recursion on all sub-directories is wanted. If allLevel is false, an - * InvalidTDirOptionAttributesException is thrown. - */ - public TDirOption(boolean isDirectory, boolean allLevel) - throws InvalidTDirOptionAttributesException { - - this.allLevelRecursive = allLevel; - this.isASourceDirectory = isDirectory; - if (allLevelRecursive == false) - throw new InvalidTDirOptionAttributesException(allLevel, -1); - } - - private TDirOption(boolean isDirectory) { - - this.isASourceDirectory = isDirectory; - this.allLevelRecursive = false; - } - - public static TDirOption makeNotDirectory() { - - return new TDirOption(false); - } - - public static TDirOption makeFirstLevel() { - - return new TDirOption(true, false, 1); - } - /** - * Constructor that requires boolean isDirectory, boolean allLevel, int - * numLevel. An exception is thrown if allLevel is true, and numLevel>0. - */ - public TDirOption(boolean isDirectory, boolean allLevel, int numLevel) - throws InvalidTDirOptionAttributesException { - - if ((allLevel == true) && (numLevel > 0)) - throw new InvalidTDirOptionAttributesException(allLevel, numLevel); - allLevelRecursive = allLevel; - numOfLevels = numLevel; - isASourceDirectory = isDirectory; - } - - /** - * Return True if SURL associated with TDirOption is a valid directory for - * visit. - */ - public boolean isDirectory() { - - return isASourceDirectory; - } - - /** - * Return true if allLevelRecursive is true - */ - public boolean isAllLevelRecursive() { - - return allLevelRecursive; - } - - /** - * Return num of recursive level to visit. If isAllLevelRecursive then 0 is - * returned. - */ - public int getNumLevel() { - - if (!allLevelRecursive) - return numOfLevels; - else - return 0; - } - - public String toString() { - - return "isASourceDirectory=" + isASourceDirectory + " allLevelRecursive=" - + allLevelRecursive + " numOfLevels=" + numOfLevels; - } + private boolean isASourceDirectory; + private boolean allLevelRecursive; + private int numOfLevels = 0; + + /** + * Constructor that requires boolean isDirectory indicating whether the SURL refers to a Directory + * or not, and a boolean allLevel to indicate if recursion on all sub-directories is wanted. If + * allLevel is false, an InvalidTDirOptionAttributesException is thrown. + */ + public TDirOption(boolean isDirectory, boolean allLevel) + throws InvalidTDirOptionAttributesException { + + this.allLevelRecursive = allLevel; + this.isASourceDirectory = isDirectory; + if (allLevelRecursive == false) throw new InvalidTDirOptionAttributesException(allLevel, -1); + } + + private TDirOption(boolean isDirectory) { + + this.isASourceDirectory = isDirectory; + this.allLevelRecursive = false; + } + + public static TDirOption makeNotDirectory() { + + return new TDirOption(false); + } + + public static TDirOption makeFirstLevel() { + + return new TDirOption(true, false, 1); + } + /** + * Constructor that requires boolean isDirectory, boolean allLevel, int numLevel. An exception is + * thrown if allLevel is true, and numLevel>0. + */ + public TDirOption(boolean isDirectory, boolean allLevel, int numLevel) + throws InvalidTDirOptionAttributesException { + + if ((allLevel == true) && (numLevel > 0)) + throw new InvalidTDirOptionAttributesException(allLevel, numLevel); + allLevelRecursive = allLevel; + numOfLevels = numLevel; + isASourceDirectory = isDirectory; + } + + /** Return True if SURL associated with TDirOption is a valid directory for visit. */ + public boolean isDirectory() { + + return isASourceDirectory; + } + + /** Return true if allLevelRecursive is true */ + public boolean isAllLevelRecursive() { + + return allLevelRecursive; + } + + /** Return num of recursive level to visit. If isAllLevelRecursive then 0 is returned. */ + public int getNumLevel() { + + if (!allLevelRecursive) return numOfLevels; + else return 0; + } + + public String toString() { + + return "isASourceDirectory=" + + isASourceDirectory + + " allLevelRecursive=" + + allLevelRecursive + + " numOfLevels=" + + numOfLevels; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TExtraInfo.java b/src/main/java/it/grid/storm/srm/types/TExtraInfo.java index 741df5ce..e4f99fc9 100644 --- a/src/main/java/it/grid/storm/srm/types/TExtraInfo.java +++ b/src/main/java/it/grid/storm/srm/types/TExtraInfo.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the TExtraInfo additional data associated with the SRM - * request. - * + * This class represents the TExtraInfo additional data associated with the SRM request. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; import java.util.HashMap; @@ -19,82 +16,78 @@ public class TExtraInfo { - public static String PNAME_EXTRAINFO = "extraInfo"; - - private static String PNAME_KEY = "key"; - private static String PNAME_VALUE = "value"; - - private String key = null; - private String value = null; - - public TExtraInfo() { - - this.key = "unknown"; - this.value = "N/A"; - } - - public TExtraInfo(String key, String value) - throws InvalidTExtraInfoAttributeException { - - if (key == null) { - throw new InvalidTExtraInfoAttributeException(key); - } - this.key = key; - this.value = value; - } - - /** - * @param inputParam - * @param name - * @return - * @throws InvalidTExtraInfoAttributeException - */ - public static TExtraInfo decode(Map inputParam, String name) - throws InvalidTExtraInfoAttributeException { - - String k, val; - Map param = (Map) inputParam.get(name); - k = (String) param.get(TExtraInfo.PNAME_KEY); - val = (String) param.get(TExtraInfo.PNAME_VALUE); - return new TExtraInfo(k, val); - } - - /** - * @param param - * @return - * @throws InvalidTExtraInfoAttributeException - */ - public static TExtraInfo decode(Map param) - throws InvalidTExtraInfoAttributeException { - - String k, val; - k = (String) param.get(TExtraInfo.PNAME_KEY); - val = (String) param.get(TExtraInfo.PNAME_VALUE); - return new TExtraInfo(k, val); - } - - /** - * @param outputParam - * @param fieldName - */ - public void encode(Map outputParam, String fieldName) { - - HashMap param = new HashMap(); - this.encode(param); - outputParam.put(fieldName, param); - } - - /** - * @param outputParam - */ - public void encode(Map outputParam) { - - outputParam.put(TExtraInfo.PNAME_KEY, (String) key); - outputParam.put(TExtraInfo.PNAME_VALUE, (String) value); - } - - public String toString() { - - return "<'" + this.key + "','" + this.value + "'>"; - } + public static String PNAME_EXTRAINFO = "extraInfo"; + + private static String PNAME_KEY = "key"; + private static String PNAME_VALUE = "value"; + + private String key = null; + private String value = null; + + public TExtraInfo() { + + this.key = "unknown"; + this.value = "N/A"; + } + + public TExtraInfo(String key, String value) throws InvalidTExtraInfoAttributeException { + + if (key == null) { + throw new InvalidTExtraInfoAttributeException(key); + } + this.key = key; + this.value = value; + } + + /** + * @param inputParam + * @param name + * @return + * @throws InvalidTExtraInfoAttributeException + */ + public static TExtraInfo decode(Map inputParam, String name) + throws InvalidTExtraInfoAttributeException { + + String k, val; + Map param = (Map) inputParam.get(name); + k = (String) param.get(TExtraInfo.PNAME_KEY); + val = (String) param.get(TExtraInfo.PNAME_VALUE); + return new TExtraInfo(k, val); + } + + /** + * @param param + * @return + * @throws InvalidTExtraInfoAttributeException + */ + public static TExtraInfo decode(Map param) throws InvalidTExtraInfoAttributeException { + + String k, val; + k = (String) param.get(TExtraInfo.PNAME_KEY); + val = (String) param.get(TExtraInfo.PNAME_VALUE); + return new TExtraInfo(k, val); + } + + /** + * @param outputParam + * @param fieldName + */ + public void encode(Map outputParam, String fieldName) { + + HashMap param = new HashMap(); + this.encode(param); + outputParam.put(fieldName, param); + } + + /** @param outputParam */ + public void encode(Map outputParam) { + + outputParam.put(TExtraInfo.PNAME_KEY, (String) key); + outputParam.put(TExtraInfo.PNAME_VALUE, (String) value); + } + + public String toString() { + + return "<'" + this.key + "','" + this.value + "'>"; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TFileLocality.java b/src/main/java/it/grid/storm/srm/types/TFileLocality.java index d20bc34d..8f37cd3a 100644 --- a/src/main/java/it/grid/storm/srm/types/TFileLocality.java +++ b/src/main/java/it/grid/storm/srm/types/TFileLocality.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -8,7 +7,7 @@ /** * This class represents the TFileStorageType of an Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 @@ -16,132 +15,113 @@ */ public class TFileLocality { - public static String PNAME_FILELOCALITY = "fileLocality"; - - public static final TFileLocality ONLINE = new TFileLocality("ONLINE"); - public static final TFileLocality NEARLINE = new TFileLocality("NEARLINE"); - public static final TFileLocality ONLINE_AND_NEARLINE = new TFileLocality( - "ONLINE_AND_NEARLINE"); - public static final TFileLocality LOST = new TFileLocality("LOST"); - public static final TFileLocality NONE = new TFileLocality("NONE"); - public static final TFileLocality UNAVAILABLE = new TFileLocality( - "UNAVAILABLE"); - public static final TFileLocality EMPTY = new TFileLocality(""); - - private String fileLocality = null; - - private TFileLocality(String fileLoc) { - - this.fileLocality = fileLoc; - } - - public String toString() { - - return fileLocality; - } - - public String getValue() { - - return fileLocality; - } - - /** - * Facility method to obtain a TFileStorageType object given its String - * representation. If an invalid String is supplied, then an EMPTY - * TFileStorageType is returned. - */ - public static TFileLocality getTFileLocality(String loc) { - - if (loc.toLowerCase().replaceAll(" ", "") - .equals(ONLINE.getValue().toLowerCase())) { - return ONLINE; - } else if (loc.toLowerCase().replaceAll(" ", "") - .equals(NEARLINE.getValue().toLowerCase())) { - return NEARLINE; - } else if (loc.toLowerCase().replaceAll(" ", "") - .equals(ONLINE_AND_NEARLINE.getValue().toLowerCase())) { - return ONLINE_AND_NEARLINE; - } else if (loc.toLowerCase().replaceAll(" ", "") - .equals(LOST.getValue().toLowerCase())) { - return LOST; - } else if (loc.toLowerCase().replaceAll(" ", "") - .equals(NONE.getValue().toLowerCase())) { - return NONE; - } else if (loc.toLowerCase().replaceAll(" ", "") - .equals(UNAVAILABLE.getValue().toLowerCase())) { - return UNAVAILABLE; - } else { - return EMPTY; - } - } - - /** - * Facility method to obtain a TFileStorageType object given its int - * representation. If an invalid String is supplied, then an EMPTY - * TFileStorageType is returned. - */ - public static TFileLocality getTFileLocality(int loc) { - - switch (loc) { - case 0: - return ONLINE; - case 1: - return NEARLINE; - case 2: - return ONLINE_AND_NEARLINE; - case 3: - return LOST; - case 4: - return NONE; - case 5: - return UNAVAILABLE; - default: - return EMPTY; - } - } - - /** - * Decode method, use to create a TFileLocaliy object from the information - * contained into a structure parametet received from FE. - * - * @param inputParam - * @param name - * @return - */ - - public static TFileLocality decode(Map inputParam, String name) { - - Integer fileLoc = (Integer) inputParam.get(name); - if (fileLoc != null) - return TFileLocality.getTFileLocality(fileLoc.intValue()); - else - return TFileLocality.EMPTY; - } - - /** - * Encode method use to create a structured paramter that represents this - * object, used for pass information to FE. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { - - Integer value = null; - if (this.equals(TFileLocality.ONLINE)) - value = Integer.valueOf(0); - if (this.equals(TFileLocality.NEARLINE)) - value = Integer.valueOf(1); - if (this.equals(TFileLocality.ONLINE_AND_NEARLINE)) - value = Integer.valueOf(2); - if (this.equals(TFileLocality.LOST)) - value = Integer.valueOf(3); - if (this.equals(TFileLocality.NONE)) - value = Integer.valueOf(4); - if (this.equals(TFileLocality.UNAVAILABLE)) - value = Integer.valueOf(5); - param.put(name, value); - - } - + public static String PNAME_FILELOCALITY = "fileLocality"; + + public static final TFileLocality ONLINE = new TFileLocality("ONLINE"); + public static final TFileLocality NEARLINE = new TFileLocality("NEARLINE"); + public static final TFileLocality ONLINE_AND_NEARLINE = new TFileLocality("ONLINE_AND_NEARLINE"); + public static final TFileLocality LOST = new TFileLocality("LOST"); + public static final TFileLocality NONE = new TFileLocality("NONE"); + public static final TFileLocality UNAVAILABLE = new TFileLocality("UNAVAILABLE"); + public static final TFileLocality EMPTY = new TFileLocality(""); + + private String fileLocality = null; + + private TFileLocality(String fileLoc) { + + this.fileLocality = fileLoc; + } + + public String toString() { + + return fileLocality; + } + + public String getValue() { + + return fileLocality; + } + + /** + * Facility method to obtain a TFileStorageType object given its String representation. If an + * invalid String is supplied, then an EMPTY TFileStorageType is returned. + */ + public static TFileLocality getTFileLocality(String loc) { + + if (loc.toLowerCase().replaceAll(" ", "").equals(ONLINE.getValue().toLowerCase())) { + return ONLINE; + } else if (loc.toLowerCase().replaceAll(" ", "").equals(NEARLINE.getValue().toLowerCase())) { + return NEARLINE; + } else if (loc.toLowerCase() + .replaceAll(" ", "") + .equals(ONLINE_AND_NEARLINE.getValue().toLowerCase())) { + return ONLINE_AND_NEARLINE; + } else if (loc.toLowerCase().replaceAll(" ", "").equals(LOST.getValue().toLowerCase())) { + return LOST; + } else if (loc.toLowerCase().replaceAll(" ", "").equals(NONE.getValue().toLowerCase())) { + return NONE; + } else if (loc.toLowerCase().replaceAll(" ", "").equals(UNAVAILABLE.getValue().toLowerCase())) { + return UNAVAILABLE; + } else { + return EMPTY; + } + } + + /** + * Facility method to obtain a TFileStorageType object given its int representation. If an invalid + * String is supplied, then an EMPTY TFileStorageType is returned. + */ + public static TFileLocality getTFileLocality(int loc) { + + switch (loc) { + case 0: + return ONLINE; + case 1: + return NEARLINE; + case 2: + return ONLINE_AND_NEARLINE; + case 3: + return LOST; + case 4: + return NONE; + case 5: + return UNAVAILABLE; + default: + return EMPTY; + } + } + + /** + * Decode method, use to create a TFileLocaliy object from the information contained into a + * structure parametet received from FE. + * + * @param inputParam + * @param name + * @return + */ + public static TFileLocality decode(Map inputParam, String name) { + + Integer fileLoc = (Integer) inputParam.get(name); + if (fileLoc != null) return TFileLocality.getTFileLocality(fileLoc.intValue()); + else return TFileLocality.EMPTY; + } + + /** + * Encode method use to create a structured paramter that represents this object, used for pass + * information to FE. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { + + Integer value = null; + if (this.equals(TFileLocality.ONLINE)) value = Integer.valueOf(0); + if (this.equals(TFileLocality.NEARLINE)) value = Integer.valueOf(1); + if (this.equals(TFileLocality.ONLINE_AND_NEARLINE)) value = Integer.valueOf(2); + if (this.equals(TFileLocality.LOST)) value = Integer.valueOf(3); + if (this.equals(TFileLocality.NONE)) value = Integer.valueOf(4); + if (this.equals(TFileLocality.UNAVAILABLE)) value = Integer.valueOf(5); + param.put(name, value); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TFileStorageType.java b/src/main/java/it/grid/storm/srm/types/TFileStorageType.java index a73637ac..a4dedd97 100644 --- a/src/main/java/it/grid/storm/srm/types/TFileStorageType.java +++ b/src/main/java/it/grid/storm/srm/types/TFileStorageType.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -8,7 +7,7 @@ /** * This class represents the TFileStorageType of an Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 @@ -16,105 +15,94 @@ */ public class TFileStorageType { - private String fileType = null; - public final static String PNAME_FILESTORAGETYPE = "fileStorageType"; - - public static final TFileStorageType VOLATILE = new TFileStorageType( - "Volatile"); - public static final TFileStorageType DURABLE = new TFileStorageType("Durable"); - public static final TFileStorageType PERMANENT = new TFileStorageType( - "Permanent"); - public static final TFileStorageType EMPTY = new TFileStorageType("Empty"); - - private TFileStorageType(String fileType) { - - this.fileType = fileType; - } - - public String toString() { - - return fileType; - } - - public String getValue() { - - return fileType; - } - - /** - * Facility method to obtain a TFileStorageType object given its String - * representation. If an invalid String is supplied, then an EMPTY - * TFileStorageType is returned. - */ - public static TFileStorageType getTFileStorageType(String type) { - - if (type.toLowerCase().trim().equals(VOLATILE.getValue().toLowerCase())) { - return VOLATILE; - } - if (type.toLowerCase().trim().equals(PERMANENT.getValue().toLowerCase())) { - return PERMANENT; - } - if (type.toLowerCase().trim().equals(DURABLE.getValue().toLowerCase())) { - return DURABLE; - } else { - return EMPTY; - } - } - - /** - * Facility method to obtain a TFileStorageType object given its String - * representation. If an invalid String is supplied, then an EMPTY - * TFileStorageType is returned. - */ - public static TFileStorageType getTFileStorageType(int type) { - - switch (type) { - case 0: - return VOLATILE; - case 1: - return DURABLE; - case 2: - return PERMANENT; - default: - return EMPTY; - } - } - - /** - * Decode method use to create a TFileStorageType object from the information - * contain into structured parameter receive from FE. - * - * @param inputParam - * @param name - * @return - */ - - public static TFileStorageType decode(Map inputParam, String name) { - - Integer fileType = (Integer) inputParam.get(name); - if (fileType != null) - return TFileStorageType.getTFileStorageType(fileType.intValue()); - else - return TFileStorageType.EMPTY; - } - - /** - * Encode method use to Create a structured paramter that rapresents this - * object, used for pass information to FE. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { - - Integer value = null; - if (this.equals(TFileStorageType.VOLATILE)) - value = Integer.valueOf(0); - if (this.equals(TFileStorageType.DURABLE)) - value = Integer.valueOf(1); - if (this.equals(TFileStorageType.PERMANENT)) - value = Integer.valueOf(2); - param.put(name, value); - } - + private String fileType = null; + public static final String PNAME_FILESTORAGETYPE = "fileStorageType"; + + public static final TFileStorageType VOLATILE = new TFileStorageType("Volatile"); + public static final TFileStorageType DURABLE = new TFileStorageType("Durable"); + public static final TFileStorageType PERMANENT = new TFileStorageType("Permanent"); + public static final TFileStorageType EMPTY = new TFileStorageType("Empty"); + + private TFileStorageType(String fileType) { + + this.fileType = fileType; + } + + public String toString() { + + return fileType; + } + + public String getValue() { + + return fileType; + } + + /** + * Facility method to obtain a TFileStorageType object given its String representation. If an + * invalid String is supplied, then an EMPTY TFileStorageType is returned. + */ + public static TFileStorageType getTFileStorageType(String type) { + + if (type.toLowerCase().trim().equals(VOLATILE.getValue().toLowerCase())) { + return VOLATILE; + } + if (type.toLowerCase().trim().equals(PERMANENT.getValue().toLowerCase())) { + return PERMANENT; + } + if (type.toLowerCase().trim().equals(DURABLE.getValue().toLowerCase())) { + return DURABLE; + } else { + return EMPTY; + } + } + + /** + * Facility method to obtain a TFileStorageType object given its String representation. If an + * invalid String is supplied, then an EMPTY TFileStorageType is returned. + */ + public static TFileStorageType getTFileStorageType(int type) { + + switch (type) { + case 0: + return VOLATILE; + case 1: + return DURABLE; + case 2: + return PERMANENT; + default: + return EMPTY; + } + } + + /** + * Decode method use to create a TFileStorageType object from the information contain into + * structured parameter receive from FE. + * + * @param inputParam + * @param name + * @return + */ + public static TFileStorageType decode(Map inputParam, String name) { + + Integer fileType = (Integer) inputParam.get(name); + if (fileType != null) return TFileStorageType.getTFileStorageType(fileType.intValue()); + else return TFileStorageType.EMPTY; + } + + /** + * Encode method use to Create a structured paramter that rapresents this object, used for pass + * information to FE. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { + + Integer value = null; + if (this.equals(TFileStorageType.VOLATILE)) value = Integer.valueOf(0); + if (this.equals(TFileStorageType.DURABLE)) value = Integer.valueOf(1); + if (this.equals(TFileStorageType.PERMANENT)) value = Integer.valueOf(2); + param.put(name, value); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TFileType.java b/src/main/java/it/grid/storm/srm/types/TFileType.java index 0bd30553..3f3c4e75 100644 --- a/src/main/java/it/grid/storm/srm/types/TFileType.java +++ b/src/main/java/it/grid/storm/srm/types/TFileType.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -8,64 +7,57 @@ /** * This class represents the TFileType of a File Area managed by Srm. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - public class TFileType { - private String fileType = null; + private String fileType = null; - public static String PNAME_TYPE = "type"; - public static final TFileType FILE = new TFileType("File"); - public static final TFileType DIRECTORY = new TFileType("Directory"); - public static final TFileType LINK = new TFileType("Link"); + public static String PNAME_TYPE = "type"; + public static final TFileType FILE = new TFileType("File"); + public static final TFileType DIRECTORY = new TFileType("Directory"); + public static final TFileType LINK = new TFileType("Link"); - private TFileType(String fileType) { + private TFileType(String fileType) { - this.fileType = fileType; - } + this.fileType = fileType; + } - public String toString() { + public String toString() { - return fileType; - } + return fileType; + } - public String getValue() { + public String getValue() { - return fileType; - } + return fileType; + } - public static TFileType getTFileType(String type) { + public static TFileType getTFileType(String type) { - if (type.equals(FILE.getValue())) - return FILE; - if (type.equals(DIRECTORY.getValue())) - return DIRECTORY; - if (type.equals(LINK.getValue())) - return LINK; - return null; - } + if (type.equals(FILE.getValue())) return FILE; + if (type.equals(DIRECTORY.getValue())) return DIRECTORY; + if (type.equals(LINK.getValue())) return LINK; + return null; + } - /** - * Encode method use to represnts in a structured paramter this objects, for - * communication to FE component. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { + /** + * Encode method use to represnts in a structured paramter this objects, for communication to FE + * component. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { - Integer value = null; - if (this.equals(TFileType.FILE)) - value = Integer.valueOf(0); - if (this.equals(TFileType.DIRECTORY)) - value = Integer.valueOf(1); - if (this.equals(TFileType.LINK)) - value = Integer.valueOf(2); - param.put(name, value); - } + Integer value = null; + if (this.equals(TFileType.FILE)) value = Integer.valueOf(0); + if (this.equals(TFileType.DIRECTORY)) value = Integer.valueOf(1); + if (this.equals(TFileType.LINK)) value = Integer.valueOf(2); + param.put(name, value); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TGroupID.java b/src/main/java/it/grid/storm/srm/types/TGroupID.java index d64375bd..b11a6244 100644 --- a/src/main/java/it/grid/storm/srm/types/TGroupID.java +++ b/src/main/java/it/grid/storm/srm/types/TGroupID.java @@ -1,46 +1,42 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TGroupID in Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; import java.util.Map; public class TGroupID { - public static String NAME_GROUPID = "groupID"; - private String groupID; - - // To COMPLETE with Exception if string null specified - - public TGroupID(String id) { + public static String NAME_GROUPID = "groupID"; + private String groupID; - groupID = id; - } + // To COMPLETE with Exception if string null specified - public String toString() { + public TGroupID(String id) { - return groupID; - } + groupID = id; + } - public String getValue() { + public String toString() { - return groupID; - } + return groupID; + } - public void encode(Map param, String name) { + public String getValue() { - param.put(name, groupID); + return groupID; + } - } + public void encode(Map param, String name) { + param.put(name, groupID); + } }; diff --git a/src/main/java/it/grid/storm/srm/types/TGroupPermission.java b/src/main/java/it/grid/storm/srm/types/TGroupPermission.java index 22146668..be54d4d5 100644 --- a/src/main/java/it/grid/storm/srm/types/TGroupPermission.java +++ b/src/main/java/it/grid/storm/srm/types/TGroupPermission.java @@ -1,16 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TGroupPermission in Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; import java.util.HashMap; @@ -18,52 +16,51 @@ public class TGroupPermission { - private TGroupID groupID; - private TPermissionMode permMode; - - public static String PNAME_GROUPPERMISSION = "groupPermission"; + private TGroupID groupID; + private TPermissionMode permMode; - public TGroupPermission(TGroupID groupID, TPermissionMode permMode) { + public static String PNAME_GROUPPERMISSION = "groupPermission"; - this.groupID = groupID; - this.permMode = permMode; - } + public TGroupPermission(TGroupID groupID, TPermissionMode permMode) { - public TGroupID getGroupID() { + this.groupID = groupID; + this.permMode = permMode; + } - return groupID; - } + public TGroupID getGroupID() { - public TPermissionMode getPermissionMode() { + return groupID; + } - return permMode; - } + public TPermissionMode getPermissionMode() { - public static TGroupPermission makeDirectoryDefault() { + return permMode; + } - return new TGroupPermission(new TGroupID("undef"), TPermissionMode.NONE); - } + public static TGroupPermission makeDirectoryDefault() { - public static TGroupPermission makeFileDefault() { + return new TGroupPermission(new TGroupID("undef"), TPermissionMode.NONE); + } - return new TGroupPermission(new TGroupID("undef"), TPermissionMode.NONE); - } + public static TGroupPermission makeFileDefault() { - /** - * Encode method use to provide a represnetation of this object into a - * structures paramter for communication to FE component. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { + return new TGroupPermission(new TGroupID("undef"), TPermissionMode.NONE); + } - Map paramStructure = new HashMap(); - if ((groupID != null) && (permMode != null)) { - groupID.encode(paramStructure, TGroupID.NAME_GROUPID); - permMode.encode(paramStructure, TPermissionMode.PNAME_MODE); - param.put(name, paramStructure); - } - } + /** + * Encode method use to provide a represnetation of this object into a structures paramter for + * communication to FE component. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { + Map paramStructure = new HashMap(); + if ((groupID != null) && (permMode != null)) { + groupID.encode(paramStructure, TGroupID.NAME_GROUPID); + permMode.encode(paramStructure, TPermissionMode.PNAME_MODE); + param.put(name, paramStructure); + } + } } diff --git a/src/main/java/it/grid/storm/srm/types/TLifeTimeInSeconds.java b/src/main/java/it/grid/storm/srm/types/TLifeTimeInSeconds.java index ab8df371..7faafe1a 100644 --- a/src/main/java/it/grid/storm/srm/types/TLifeTimeInSeconds.java +++ b/src/main/java/it/grid/storm/srm/types/TLifeTimeInSeconds.java @@ -1,265 +1,228 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents TLifeTime in seconds as a long. - * + * * @author Ezio Corso - Magnoni Luca * @author EGRID ICTP Trieste / CNAF INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; +import it.grid.storm.common.types.*; import java.io.Serializable; import java.util.Date; import java.util.Map; -import it.grid.storm.common.types.*; - public class TLifeTimeInSeconds implements Serializable { - /** - * - */ - private static final long serialVersionUID = -8025723621535456819L; - private long time = -1; - private TimeUnit u = TimeUnit.EMPTY; - private boolean empty = true; - private static TLifeTimeInSeconds emptyTime = null; - private boolean infinite = false; - private static TLifeTimeInSeconds infiniteTime = null; - - public static String PNAME_LIFETIMEASSIGNED = "lifetimeAssigned"; - public static String PNAME_LIFETIMELEFT = "lifetimeLeft"; - public static String PNAME_DESIREDLIFETIMEOFRESERVEDSPACE = "desiredLifetimeOfReservedSpace"; - public static String PNAME_LIFETIMEOFRESERVEDSPACE = "lifetimeOfReservedSpace"; - public static String PNAME_FILELIFETIME = "fileLifetime"; - public static String PNAME_PINLIFETIME = "pinLifetime"; - - /** - * This constructor requires a long time representing the time in TimeUnit u. - */ - private TLifeTimeInSeconds(long time, TimeUnit u, boolean empty, - boolean infinite) { - - this.time = time; - this.u = u; - this.empty = empty; - this.infinite = infinite; - } - - /** - * Method that requires a long time representing the time in TimeUnit u; it - * throws an InvalidTLifeTimeAttributeException if u is null. A negative value - * for time, automatically results in an Infinite TLifeTimeInSeconds. - */ - public static TLifeTimeInSeconds make(long time, TimeUnit unit) - throws IllegalArgumentException { - - if (unit == null) { - throw new IllegalArgumentException( - "Unable to create the object, illegal arguments: time=" + time - + " unit=" + unit); - } - if (time < 0) { - return makeInfinite(); - } - return new TLifeTimeInSeconds(time, unit, false, false); - } - - /** - * Method that returns an Empty TLifeTimeInSeconds. - */ - public static TLifeTimeInSeconds makeEmpty() { - - if (emptyTime == null) - emptyTime = new TLifeTimeInSeconds(0, TimeUnit.EMPTY, true, false); - return emptyTime; - } - - /** - * Method that returns an Infinite TLifeTimeInSeconds. - */ - public static TLifeTimeInSeconds makeInfinite() { - - if (infiniteTime == null) - infiniteTime = new TLifeTimeInSeconds(-1, TimeUnit.EMPTY, false, true); - return infiniteTime; - } - - /** - * Method that returns true if this is an Empty TLifeTimeInSeconds. - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns true if This is an Infinite TLifeTimeInSeconds. - */ - public boolean isInfinite() { - - return infinite; - } - - /** - * Method that returns a long value for this LifeTime. It returns -1 if This - * is an Empty or Infinite TLifeTimeInSeconds. - */ - public long value() { - - return time; - } - - /** - * Method that returns the TimeUnit for this LifeTime. It returns - * TimeUnit.EMPTY if This is an Empty or Infintie TLifeTimeInSeconds. - */ - public TimeUnit unit() { - - return u; - } - - /** - * Public static method that return this LifeTime value converted into the - * specified TimeUnit. It returns -1 if a null TimeUnit is passed, or if This - * is an Empty or Infinite TLifeTimeInSeconds. - */ - public double getTimeIn(TimeUnit u) { - - if ((!empty) && (!infinite) && (u != null)) { - Long l_time = Long.valueOf(time); - double result = l_time.doubleValue() - * (this.u.conversionFactor() / u.conversionFactor()); - return result; - } else - return -1; - } - - /** - * Returns the number of seconds remaining to reach startTimeInSeconds plus - * the value of this instance. - * - * @param startTimeInSeconds - * The starting time in seconds. - * @return Seconds remaining, zero otherwise. - */ - public TLifeTimeInSeconds timeLeft(long startTimeInSeconds) { - - if (empty) - return emptyTime; - long secondsLeft = this.time + startTimeInSeconds; - - Date currentDate = new Date(); - long currentTime = currentDate.getTime(); // current time in milliseconds - currentTime /= 1000; // current time in seconds - secondsLeft -= currentTime; - if (secondsLeft < 0) - secondsLeft = 0; - - TLifeTimeInSeconds timeLeft = null; - try { - timeLeft = TLifeTimeInSeconds.make(secondsLeft, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - timeLeft = TLifeTimeInSeconds.makeEmpty(); - } - - return timeLeft; - } - - /** - * Returns the number of seconds remaining to reach startingDate plus the - * value of this instance. - * - * @param startingDate - * The starting date. - * @return Seconds remaining, zero otherwise. - */ - public TLifeTimeInSeconds timeLeft(Date startingDate) { - - if (empty || (startingDate == null)) - return emptyTime; - long startTimeInSeconds = startingDate.getTime() / 1000; - return timeLeft(startTimeInSeconds); - } - - /** - * Method that returns a TSizeInBytes object retrieving its value by the - * Hashtable used for communicating with the FE - */ - public static TLifeTimeInSeconds decode(Map inputParam, String fieldName) { - - String lifetime = (String) inputParam.get(fieldName); - - if (lifetime == null) - return TLifeTimeInSeconds.makeEmpty(); - long lifetimeLong = Long.parseLong(lifetime); - - try { - return TLifeTimeInSeconds.make(lifetimeLong, TimeUnit.SECONDS); - } catch (IllegalArgumentException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - return TLifeTimeInSeconds.makeEmpty(); - } - } - - /** - * Encode method, create a representation of this object into a structured - * paramter used for communication to FE component. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { - - if (empty) - return; - String lifetimeString; - lifetimeString = String.valueOf(this.time); - param.put(name, lifetimeString); - } - - public String toString() { - - if (empty) - return "Empty TLifeTimeInSeconds!"; - if (infinite) - return "Infinite TLifeTimeInSeconds"; - return "" + time + " " + u; - } - - /** - * Beware that this equality will _not_ return true for the same quantity - * expressed in different units of measure! - */ - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof TLifeTimeInSeconds)) - return false; - TLifeTimeInSeconds et = (TLifeTimeInSeconds) o; - if ((this.empty) && (et.empty)) - return true; - if ((this.infinite) && (et.infinite)) - return true; - return ((this.time == et.time) && (this.u == et.u) && (this.empty == et.empty)); - } - - public int hashCode() { - - if (empty) - return -1; - if (infinite) - return -2; - int hash = 17; - hash = 37 * hash + Long.valueOf(time).hashCode(); - hash = 37 * hash + u.hashCode(); - return hash; - } + /** */ + private static final long serialVersionUID = -8025723621535456819L; + + private long time = -1; + private TimeUnit u = TimeUnit.EMPTY; + private boolean empty = true; + private static TLifeTimeInSeconds emptyTime = null; + private boolean infinite = false; + private static TLifeTimeInSeconds infiniteTime = null; + + public static String PNAME_LIFETIMEASSIGNED = "lifetimeAssigned"; + public static String PNAME_LIFETIMELEFT = "lifetimeLeft"; + public static String PNAME_DESIREDLIFETIMEOFRESERVEDSPACE = "desiredLifetimeOfReservedSpace"; + public static String PNAME_LIFETIMEOFRESERVEDSPACE = "lifetimeOfReservedSpace"; + public static String PNAME_FILELIFETIME = "fileLifetime"; + public static String PNAME_PINLIFETIME = "pinLifetime"; + + /** This constructor requires a long time representing the time in TimeUnit u. */ + private TLifeTimeInSeconds(long time, TimeUnit u, boolean empty, boolean infinite) { + + this.time = time; + this.u = u; + this.empty = empty; + this.infinite = infinite; + } + + /** + * Method that requires a long time representing the time in TimeUnit u; it throws an + * InvalidTLifeTimeAttributeException if u is null. A negative value for time, automatically + * results in an Infinite TLifeTimeInSeconds. + */ + public static TLifeTimeInSeconds make(long time, TimeUnit unit) throws IllegalArgumentException { + + if (unit == null) { + throw new IllegalArgumentException( + "Unable to create the object, illegal arguments: time=" + time + " unit=" + unit); + } + if (time < 0) { + return makeInfinite(); + } + return new TLifeTimeInSeconds(time, unit, false, false); + } + + /** Method that returns an Empty TLifeTimeInSeconds. */ + public static TLifeTimeInSeconds makeEmpty() { + + if (emptyTime == null) emptyTime = new TLifeTimeInSeconds(0, TimeUnit.EMPTY, true, false); + return emptyTime; + } + + /** Method that returns an Infinite TLifeTimeInSeconds. */ + public static TLifeTimeInSeconds makeInfinite() { + + if (infiniteTime == null) + infiniteTime = new TLifeTimeInSeconds(-1, TimeUnit.EMPTY, false, true); + return infiniteTime; + } + + /** Method that returns true if this is an Empty TLifeTimeInSeconds. */ + public boolean isEmpty() { + + return empty; + } + + /** Method that returns true if This is an Infinite TLifeTimeInSeconds. */ + public boolean isInfinite() { + + return infinite; + } + + /** + * Method that returns a long value for this LifeTime. It returns -1 if This is an Empty or + * Infinite TLifeTimeInSeconds. + */ + public long value() { + + return time; + } + + /** + * Method that returns the TimeUnit for this LifeTime. It returns TimeUnit.EMPTY if This is an + * Empty or Infintie TLifeTimeInSeconds. + */ + public TimeUnit unit() { + + return u; + } + + /** + * Public static method that return this LifeTime value converted into the specified TimeUnit. It + * returns -1 if a null TimeUnit is passed, or if This is an Empty or Infinite TLifeTimeInSeconds. + */ + public double getTimeIn(TimeUnit u) { + + if ((!empty) && (!infinite) && (u != null)) { + Long l_time = Long.valueOf(time); + double result = l_time.doubleValue() * (this.u.conversionFactor() / u.conversionFactor()); + return result; + } else return -1; + } + + /** + * Returns the number of seconds remaining to reach startTimeInSeconds plus the value of this + * instance. + * + * @param startTimeInSeconds The starting time in seconds. + * @return Seconds remaining, zero otherwise. + */ + public TLifeTimeInSeconds timeLeft(long startTimeInSeconds) { + + if (empty) return emptyTime; + long secondsLeft = this.time + startTimeInSeconds; + + Date currentDate = new Date(); + long currentTime = currentDate.getTime(); // current time in milliseconds + currentTime /= 1000; // current time in seconds + secondsLeft -= currentTime; + if (secondsLeft < 0) secondsLeft = 0; + + TLifeTimeInSeconds timeLeft = null; + try { + timeLeft = TLifeTimeInSeconds.make(secondsLeft, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + timeLeft = TLifeTimeInSeconds.makeEmpty(); + } + + return timeLeft; + } + + /** + * Returns the number of seconds remaining to reach startingDate plus the value of this instance. + * + * @param startingDate The starting date. + * @return Seconds remaining, zero otherwise. + */ + public TLifeTimeInSeconds timeLeft(Date startingDate) { + + if (empty || (startingDate == null)) return emptyTime; + long startTimeInSeconds = startingDate.getTime() / 1000; + return timeLeft(startTimeInSeconds); + } + + /** + * Method that returns a TSizeInBytes object retrieving its value by the Hashtable used for + * communicating with the FE + */ + public static TLifeTimeInSeconds decode(Map inputParam, String fieldName) { + + String lifetime = (String) inputParam.get(fieldName); + + if (lifetime == null) return TLifeTimeInSeconds.makeEmpty(); + long lifetimeLong = Long.parseLong(lifetime); + + try { + return TLifeTimeInSeconds.make(lifetimeLong, TimeUnit.SECONDS); + } catch (IllegalArgumentException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + return TLifeTimeInSeconds.makeEmpty(); + } + } + + /** + * Encode method, create a representation of this object into a structured paramter used for + * communication to FE component. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { + + if (empty) return; + String lifetimeString; + lifetimeString = String.valueOf(this.time); + param.put(name, lifetimeString); + } + + public String toString() { + + if (empty) return "Empty TLifeTimeInSeconds!"; + if (infinite) return "Infinite TLifeTimeInSeconds"; + return "" + time + " " + u; + } + + /** + * Beware that this equality will _not_ return true for the same quantity expressed in different + * units of measure! + */ + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof TLifeTimeInSeconds)) return false; + TLifeTimeInSeconds et = (TLifeTimeInSeconds) o; + if ((this.empty) && (et.empty)) return true; + if ((this.infinite) && (et.infinite)) return true; + return ((this.time == et.time) && (this.u == et.u) && (this.empty == et.empty)); + } + + public int hashCode() { + + if (empty) return -1; + if (infinite) return -2; + int hash = 17; + hash = 37 * hash + Long.valueOf(time).hashCode(); + hash = 37 * hash + u.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TMetaDataPathDetail.java b/src/main/java/it/grid/storm/srm/types/TMetaDataPathDetail.java index 4de09867..6aa00f38 100644 --- a/src/main/java/it/grid/storm/srm/types/TMetaDataPathDetail.java +++ b/src/main/java/it/grid/storm/srm/types/TMetaDataPathDetail.java @@ -1,27 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * - * This class represents the TMetaDataPathDetail,return structure for ls - * request. - * - * - * + * This class represents the TMetaDataPathDetail,return structure for ls request. + * * @author Magnoni Luca - * * @author Cnaf -INFN Bologna - * * @date - * * @version 1.0 */ - package it.grid.storm.srm.types; import it.grid.storm.common.types.StFN; - import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; @@ -30,444 +20,352 @@ public class TMetaDataPathDetail { - private static final SimpleDateFormat dateFormat = new SimpleDateFormat( - "yyyy-MM-dd'T'HH:mm:ss"); - - private TSURL surl = null; - // Change in new srm 2.2 - private StFN stfn = null; - private TReturnStatus retStatus = null; - private TSizeInBytes size = null; - private Date createdAtTime = null; - private Date lastModificationAtTime = null; - private TFileStorageType fileStorageType = null; - private TRetentionPolicyInfo retentionPolicyInfo = null; - private TFileLocality fileLocality = null; - private ArrayOfTSpaceToken tokenArray = null; - private TFileType type = null; - private TLifeTimeInSeconds lifetimeAssigned = null; - private TLifeTimeInSeconds lifetimeLeft = null; - private TUserPermission ownerPermission = null; - private TGroupPermission groupPermission = null; - private TPermissionMode otherPermission = null; - private TCheckSumType checkSumType = null; - private TCheckSumValue checkSumValue = null; - private ArrayOfTMetaDataPathDetail arrayOfSubPaths = null; - - public TMetaDataPathDetail() { - - } - - /** - * Method that return Surl - */ - public TSURL getSurl() { - - return surl; - } - - /** - * Method that set Surl - */ - public void setSurl(TSURL surl) { - - this.surl = surl; - } - - /** - * Method that return StFN - */ - public StFN getStFN() { - - return stfn; - } - - /** - * Method that set StFN - */ - public void setStFN(StFN stfn) { - - this.stfn = stfn; - } - - /** - * Method that return Status - */ - public TReturnStatus getStatus() { - - return retStatus; - } - - /** - * Method that set Status. - */ - public void setStatus(TReturnStatus status) { - - this.retStatus = status; - } - - /** - * Method that Return Size - */ - public TSizeInBytes getSize() { - - return size; - } - - /** - * Method that set Size - */ - public void setSize(TSizeInBytes size) { - - this.size = size; - } - - /** - * Method that get LastModificationAtTime Value; - */ - public Date getModificationTime() { - - return lastModificationAtTime; - } - - /** - * Method that set CreatedAtTime Value - */ - public void setModificationTime(Date lastModificationAtTime) { - - this.lastModificationAtTime = lastModificationAtTime; - } - - /** - * Method that get CreatedAtTime Value; - */ - public Date getCreationTime() { - - return createdAtTime; - } - - /** - * Method that set CreatedAtTime Value - */ - public void setCreationTime(Date createdAtTime) { + private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); + + private TSURL surl = null; + // Change in new srm 2.2 + private StFN stfn = null; + private TReturnStatus retStatus = null; + private TSizeInBytes size = null; + private Date createdAtTime = null; + private Date lastModificationAtTime = null; + private TFileStorageType fileStorageType = null; + private TRetentionPolicyInfo retentionPolicyInfo = null; + private TFileLocality fileLocality = null; + private ArrayOfTSpaceToken tokenArray = null; + private TFileType type = null; + private TLifeTimeInSeconds lifetimeAssigned = null; + private TLifeTimeInSeconds lifetimeLeft = null; + private TUserPermission ownerPermission = null; + private TGroupPermission groupPermission = null; + private TPermissionMode otherPermission = null; + private TCheckSumType checkSumType = null; + private TCheckSumValue checkSumValue = null; + private ArrayOfTMetaDataPathDetail arrayOfSubPaths = null; + + public TMetaDataPathDetail() {} + + /** Method that return Surl */ + public TSURL getSurl() { + + return surl; + } + + /** Method that set Surl */ + public void setSurl(TSURL surl) { + + this.surl = surl; + } + + /** Method that return StFN */ + public StFN getStFN() { + + return stfn; + } + + /** Method that set StFN */ + public void setStFN(StFN stfn) { + + this.stfn = stfn; + } + + /** Method that return Status */ + public TReturnStatus getStatus() { + + return retStatus; + } + + /** Method that set Status. */ + public void setStatus(TReturnStatus status) { + + this.retStatus = status; + } + + /** Method that Return Size */ + public TSizeInBytes getSize() { + + return size; + } + + /** Method that set Size */ + public void setSize(TSizeInBytes size) { + + this.size = size; + } + + /** Method that get LastModificationAtTime Value; */ + public Date getModificationTime() { + + return lastModificationAtTime; + } + + /** Method that set CreatedAtTime Value */ + public void setModificationTime(Date lastModificationAtTime) { + + this.lastModificationAtTime = lastModificationAtTime; + } + + /** Method that get CreatedAtTime Value; */ + public Date getCreationTime() { + + return createdAtTime; + } + + /** Method that set CreatedAtTime Value */ + public void setCreationTime(Date createdAtTime) { + + this.createdAtTime = createdAtTime; + } + + /** Method that return TFileStorageType; */ + public TFileStorageType getFileStorageType() { + + return fileStorageType; + } + + /** Method that set TFileStorageType */ + public void setTFileStorageType(TFileStorageType type) { + + this.fileStorageType = type; + } + + /** Method that return TRetentionPolicyInfo; */ + public TRetentionPolicyInfo getTRetentionPolicyInfo() { + + return this.retentionPolicyInfo; + } + + /** Method that set TRetentionPolicyInfo */ + public void setTRetentionPolicyInfo(TRetentionPolicyInfo info) { + + this.retentionPolicyInfo = info; + } - this.createdAtTime = createdAtTime; - } + /** Method that return TFileLocality; */ + public TFileLocality getTFileLocality() { - /** - * Method that return TFileStorageType; - */ - public TFileStorageType getFileStorageType() { + return this.fileLocality; + } - return fileStorageType; - } + /** Method that set TFileLocality */ + public void setTFileLocality(TFileLocality loc) { - /** - * Method that set TFileStorageType - */ - public void setTFileStorageType(TFileStorageType type) { + this.fileLocality = loc; + } - this.fileStorageType = type; - } + /** Method that return array of TSpaceToken; */ + public ArrayOfTSpaceToken getArrayOfTSpaceToken() { - /** - * Method that return TRetentionPolicyInfo; - */ - public TRetentionPolicyInfo getTRetentionPolicyInfo() { + return this.tokenArray; + } - return this.retentionPolicyInfo; - } + /** Method that set array of TSpaceToken */ + public void setArrayOfTSpaceToken(ArrayOfTSpaceToken tokenArray) { - /** - * Method that set TRetentionPolicyInfo - */ - public void setTRetentionPolicyInfo(TRetentionPolicyInfo info) { + this.tokenArray = tokenArray; + } - this.retentionPolicyInfo = info; - } + /** Method that return TFileType; */ + public TFileType getFileType() { - /** - * Method that return TFileLocality; - */ - public TFileLocality getTFileLocality() { + return type; + } - return this.fileLocality; - } + /** Method that set TFileType */ + public void setFileType(TFileType type) { - /** - * Method that set TFileLocality - */ - public void setTFileLocality(TFileLocality loc) { + this.type = type; + } - this.fileLocality = loc; - } + /** Method that GET lifetime assigned; */ + public TLifeTimeInSeconds getLifetimeAssigned() { - /** - * Method that return array of TSpaceToken; - */ - public ArrayOfTSpaceToken getArrayOfTSpaceToken() { + return lifetimeAssigned; + } - return this.tokenArray; - } + /** Method that set lifetime assigned */ + public void setLifeTimeAssigned(TLifeTimeInSeconds lifetime) { - /** - * Method that set array of TSpaceToken - */ - public void setArrayOfTSpaceToken(ArrayOfTSpaceToken tokenArray) { + this.lifetimeAssigned = lifetime; + } - this.tokenArray = tokenArray; - } + /** Method that GET lifetime LEFT; */ + public TLifeTimeInSeconds getLifetimeLeft() { - /** - * Method that return TFileType; - */ - public TFileType getFileType() { + return lifetimeLeft; + } - return type; - } + /** Method that set lifetime Left */ + public void setLifetimeLeft(TLifeTimeInSeconds lifetime) { - /** - * Method that set TFileType - */ - public void setFileType(TFileType type) { + this.lifetimeLeft = lifetime; + } - this.type = type; - } + /** Method that set OwnerPermission */ + public void setOwnerPermission(TUserPermission ownerPermission) { - /** - * Method that GET lifetime assigned; - */ - public TLifeTimeInSeconds getLifetimeAssigned() { + this.ownerPermission = ownerPermission; + } - return lifetimeAssigned; - } + /** Method that return OwnerPermission; */ + public TUserPermission getOwnerPermission() { - /** - * Method that set lifetime assigned - */ - public void setLifeTimeAssigned(TLifeTimeInSeconds lifetime) { + return ownerPermission; + } - this.lifetimeAssigned = lifetime; - } + /** Method that set TGroupPermissionArray */ + public void setGroupPermission(TGroupPermission groupP) { - /** - * Method that GET lifetime LEFT; - */ + this.groupPermission = groupP; + } - public TLifeTimeInSeconds getLifetimeLeft() { + /** Method that get TGroupPermissionArray */ + public TGroupPermission getGroupPermission() { - return lifetimeLeft; - } + return groupPermission; + } - /** - * Method that set lifetime Left - */ - public void setLifetimeLeft(TLifeTimeInSeconds lifetime) { + /** Method that set otherPermission */ + public void setOtherPermission(TPermissionMode otherP) { - this.lifetimeLeft = lifetime; - } + this.otherPermission = otherP; + } - /** - * Method that set OwnerPermission - */ - public void setOwnerPermission(TUserPermission ownerPermission) { - - this.ownerPermission = ownerPermission; - } - - /** - * Method that return OwnerPermission; - */ - public TUserPermission getOwnerPermission() { - - return ownerPermission; - } - - /** - * Method that set TGroupPermissionArray - */ - public void setGroupPermission(TGroupPermission groupP) { - - this.groupPermission = groupP; - } - - /** - * Method that get TGroupPermissionArray - */ - public TGroupPermission getGroupPermission() { - - return groupPermission; - } - - /** - * Method that set otherPermission - */ - public void setOtherPermission(TPermissionMode otherP) { - - this.otherPermission = otherP; - } - - /** - * Method that get otherPermission - */ - public TPermissionMode getUserPermissionArray() { - - return otherPermission; - } - - /** - * - * @param checkSumType - */ - public void setCheckSumType(TCheckSumType checkSumType) { - - this.checkSumType = checkSumType; - } - - /** - * Method that get CHECKSUMTYPE - */ - public TCheckSumType getCheckSumType() { - - return checkSumType; - } - - /** - * - * @param checkSumValue - */ - public void setCheckSumValue(TCheckSumValue checkSumValue) { - - this.checkSumValue = checkSumValue; - } - - /** - * Method that get CHECKSUMVALUE - */ - public TCheckSumValue getCheckSumValue() { - - return checkSumValue; - } - - /** - * Method that get TMetaDataPathDetails - * - * @TODO - */ - public ArrayOfTMetaDataPathDetail getArrayOfSubPaths() { - - return arrayOfSubPaths; - } - - public void setArrayOfSubPaths(ArrayOfTMetaDataPathDetail array) { - - arrayOfSubPaths = array; - } - - /** - * Encode method, used to encode a TMetaDataPathDetail object into a - * structured paramter (Hashtable), used for communicate to the FE component - * thourgh xmlrpc. - * - * @param param - * Hashtable that must contain structures results - * @param name - * name for the TMetaData field - */ - public void encode(List list) { - - Map param = new HashMap(); - - /* (1) StFN */ - if (this.stfn != null) { - this.stfn.encode(param, StFN.PNAME_PATH); - } - /* (2) TReturnStatus */ - if (this.retStatus != null) { - this.retStatus.encode(param, TReturnStatus.PNAME_STATUS); - } - /* (3) Size */ - if (this.size != null) { - this.size.encode(param, TSizeInBytes.PNAME_SIZE); - } - /* (4) createdAtTime */ - if (this.createdAtTime != null) { - param.put("createdAtTime", dateFormat.format(createdAtTime)); - } - /* (5) lastModificationTime */ - if (this.lastModificationAtTime != null) { - // param.put("lastModificationTime", lastModificationAtTime.toString()); - param.put("lastModificationTime", - dateFormat.format(lastModificationAtTime)); - } - /* (6) fileStorageType */ - if (this.fileStorageType != null) { - this.fileStorageType - .encode(param, TFileStorageType.PNAME_FILESTORAGETYPE); - } - /* (7) TRetentionPolicyInfo */ - if (this.retentionPolicyInfo != null) { - this.retentionPolicyInfo.encode(param, - TRetentionPolicyInfo.PNAME_retentionPolicyInfo); - } - /* (8) fileLocality */ - if (this.fileLocality != null) { - this.fileLocality.encode(param, TFileLocality.PNAME_FILELOCALITY); - } - /* (9) ArrayOfTSpaceToken */ - if (this.tokenArray != null) { - this.tokenArray - .encode(param, ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); - } - /* (10) TFileType */ - if (this.type != null) { - this.type.encode(param, TFileType.PNAME_TYPE); - } - /* (11) lifeTimeAssigned */ - if (this.lifetimeAssigned != null) { - this.lifetimeAssigned.encode(param, - TLifeTimeInSeconds.PNAME_LIFETIMEASSIGNED); - } - /* (12) lifeTimeLeft */ - if (this.lifetimeLeft != null) { - this.lifetimeLeft.encode(param, TLifeTimeInSeconds.PNAME_LIFETIMELEFT); - } - /* (13) TUserPermission ownerPermission */ - if (this.ownerPermission != null) { - this.ownerPermission.encode(param, TUserPermission.PNAME_OWNERPERMISSION); - } - /* (14) TGroupPermission groupPermission */ - if (this.groupPermission != null) { - this.groupPermission - .encode(param, TGroupPermission.PNAME_GROUPPERMISSION); - } - /* (15) TPermissionMode otherPermission */ - if (this.otherPermission != null) { - this.otherPermission.encode(param, TPermissionMode.PNAME_OTHERPERMISSION); - } - /* (16) TCheckSumType */ - if (this.checkSumType != null) { - this.checkSumType.encode(param, TCheckSumType.PNAME_CHECKSUMTYPE); - } - /* (17) TCheckSumValue */ - if (this.checkSumValue != null) { - this.checkSumValue.encode(param, TCheckSumValue.PNAME_CHECKSUMVALUE); - } - /* (18) ArrayOfTMetaDataPathDetails arrayOfSubPaths */ - if (this.arrayOfSubPaths != null) { - this.arrayOfSubPaths.encode(param, - ArrayOfTMetaDataPathDetail.PNAME_ARRAYOFSUBPATHS); - } - - // Add Hastable to global vector - list.add(param); - } - - @Override - public String toString() { - - StringBuilder sb = new StringBuilder(); - sb.append(stfn.toString() + "\n"); - return sb.toString(); - } + /** Method that get otherPermission */ + public TPermissionMode getUserPermissionArray() { + + return otherPermission; + } + + /** @param checkSumType */ + public void setCheckSumType(TCheckSumType checkSumType) { + + this.checkSumType = checkSumType; + } + + /** Method that get CHECKSUMTYPE */ + public TCheckSumType getCheckSumType() { + + return checkSumType; + } + + /** @param checkSumValue */ + public void setCheckSumValue(TCheckSumValue checkSumValue) { + + this.checkSumValue = checkSumValue; + } + + /** Method that get CHECKSUMVALUE */ + public TCheckSumValue getCheckSumValue() { + + return checkSumValue; + } + + /** Method that get TMetaDataPathDetails @TODO */ + public ArrayOfTMetaDataPathDetail getArrayOfSubPaths() { + + return arrayOfSubPaths; + } + + public void setArrayOfSubPaths(ArrayOfTMetaDataPathDetail array) { + + arrayOfSubPaths = array; + } + + /** + * Encode method, used to encode a TMetaDataPathDetail object into a structured paramter + * (Hashtable), used for communicate to the FE component thourgh xmlrpc. + * + * @param param Hashtable that must contain structures results + * @param name name for the TMetaData field + */ + public void encode(List list) { + + Map param = new HashMap(); + + /* (1) StFN */ + if (this.stfn != null) { + this.stfn.encode(param, StFN.PNAME_PATH); + } + /* (2) TReturnStatus */ + if (this.retStatus != null) { + this.retStatus.encode(param, TReturnStatus.PNAME_STATUS); + } + /* (3) Size */ + if (this.size != null) { + this.size.encode(param, TSizeInBytes.PNAME_SIZE); + } + /* (4) createdAtTime */ + if (this.createdAtTime != null) { + param.put("createdAtTime", dateFormat.format(createdAtTime)); + } + /* (5) lastModificationTime */ + if (this.lastModificationAtTime != null) { + // param.put("lastModificationTime", lastModificationAtTime.toString()); + param.put("lastModificationTime", dateFormat.format(lastModificationAtTime)); + } + /* (6) fileStorageType */ + if (this.fileStorageType != null) { + this.fileStorageType.encode(param, TFileStorageType.PNAME_FILESTORAGETYPE); + } + /* (7) TRetentionPolicyInfo */ + if (this.retentionPolicyInfo != null) { + this.retentionPolicyInfo.encode(param, TRetentionPolicyInfo.PNAME_retentionPolicyInfo); + } + /* (8) fileLocality */ + if (this.fileLocality != null) { + this.fileLocality.encode(param, TFileLocality.PNAME_FILELOCALITY); + } + /* (9) ArrayOfTSpaceToken */ + if (this.tokenArray != null) { + this.tokenArray.encode(param, ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); + } + /* (10) TFileType */ + if (this.type != null) { + this.type.encode(param, TFileType.PNAME_TYPE); + } + /* (11) lifeTimeAssigned */ + if (this.lifetimeAssigned != null) { + this.lifetimeAssigned.encode(param, TLifeTimeInSeconds.PNAME_LIFETIMEASSIGNED); + } + /* (12) lifeTimeLeft */ + if (this.lifetimeLeft != null) { + this.lifetimeLeft.encode(param, TLifeTimeInSeconds.PNAME_LIFETIMELEFT); + } + /* (13) TUserPermission ownerPermission */ + if (this.ownerPermission != null) { + this.ownerPermission.encode(param, TUserPermission.PNAME_OWNERPERMISSION); + } + /* (14) TGroupPermission groupPermission */ + if (this.groupPermission != null) { + this.groupPermission.encode(param, TGroupPermission.PNAME_GROUPPERMISSION); + } + /* (15) TPermissionMode otherPermission */ + if (this.otherPermission != null) { + this.otherPermission.encode(param, TPermissionMode.PNAME_OTHERPERMISSION); + } + /* (16) TCheckSumType */ + if (this.checkSumType != null) { + this.checkSumType.encode(param, TCheckSumType.PNAME_CHECKSUMTYPE); + } + /* (17) TCheckSumValue */ + if (this.checkSumValue != null) { + this.checkSumValue.encode(param, TCheckSumValue.PNAME_CHECKSUMVALUE); + } + /* (18) ArrayOfTMetaDataPathDetails arrayOfSubPaths */ + if (this.arrayOfSubPaths != null) { + this.arrayOfSubPaths.encode(param, ArrayOfTMetaDataPathDetail.PNAME_ARRAYOFSUBPATHS); + } + + // Add Hastable to global vector + list.add(param); + } + + @Override + public String toString() { + + StringBuilder sb = new StringBuilder(); + sb.append(stfn.toString() + "\n"); + return sb.toString(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TMetaDataSpace.java b/src/main/java/it/grid/storm/srm/types/TMetaDataSpace.java index 15be38f1..13df9950 100644 --- a/src/main/java/it/grid/storm/srm/types/TMetaDataSpace.java +++ b/src/main/java/it/grid/storm/srm/types/TMetaDataSpace.java @@ -1,340 +1,296 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; import it.grid.storm.space.SpaceHelper; import it.grid.storm.space.StorageSpaceData; - import java.io.Serializable; import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TMetaDataSpace implements Serializable { - private TSpaceType spaceType; - private TReturnStatus status = null; - private TSpaceToken spaceToken; - private TRetentionPolicyInfo retentionPolicyInfo; - private TUserID owner; - private TSizeInBytes totalSize; - private TSizeInBytes guaranteedSize; - private TSizeInBytes unusedSize; - private TLifeTimeInSeconds lifetimeAssigned = null; - private TLifeTimeInSeconds lifetimeLeft = null; - - private static final Logger log = LoggerFactory - .getLogger(TMetaDataSpace.class); - - public TMetaDataSpace() { - - this.spaceType = TSpaceType.EMPTY; - this.status = null; - this.spaceToken = TSpaceToken.makeEmpty(); - this.retentionPolicyInfo = null; - this.owner = TUserID.makeEmpty(); - this.totalSize = TSizeInBytes.makeEmpty(); - this.guaranteedSize = TSizeInBytes.makeEmpty(); - this.unusedSize = TSizeInBytes.makeEmpty(); - this.lifetimeAssigned = TLifeTimeInSeconds.makeEmpty(); - this.lifetimeLeft = TLifeTimeInSeconds.makeEmpty(); - } - - /** - * Constructor - * - * @param spaceType - * TSpaceType - * @param spaceToken - * TSpaceToken - * @param status - * TReturnStatus - * @param user - * TUserID - * @param totalSize - * TSizeInBytes - * @param guaranteedSize - * TSizeInBytes - * @param unusedSize - * TSizeInBytes - * @param lifetimeAssigned - * TLifeTimeInSeconds - * @param lifetimeLeft - * TLifeTimeInSeconds - * @throws InvalidTMetaDataSpaceAttributeException - */ - public TMetaDataSpace(TSpaceType spaceType, TSpaceToken spaceToken, - TReturnStatus status, TUserID user, TSizeInBytes totalSize, - TSizeInBytes guaranteedSize, TSizeInBytes unusedSize, - TLifeTimeInSeconds lifetimeAssigned, TLifeTimeInSeconds lifetimeLeft) - throws InvalidTMetaDataSpaceAttributeException { - - boolean ok = (spaceToken != null); - - if (!ok) { - throw new InvalidTMetaDataSpaceAttributeException(spaceToken); - } - - this.spaceType = spaceType; - this.spaceToken = spaceToken; - this.status = status; - this.owner = user; - this.totalSize = totalSize; - this.guaranteedSize = guaranteedSize; - this.unusedSize = unusedSize; - this.lifetimeAssigned = lifetimeAssigned; - this.lifetimeLeft = lifetimeLeft; - } - - /** - * Constructor with SpaceData returned by DAO. - * - * @param spaceData - * of type StorageSpaceData - * @throws InvalidTMetaDataSpaceAttributeException - * @throws InvalidTSizeAttributesException - */ - public TMetaDataSpace(StorageSpaceData spaceData) - throws InvalidTMetaDataSpaceAttributeException, - InvalidTSizeAttributesException { - - if (spaceData == null) { - log.warn("TMetaDataSpace built without SPACEDATA detail."); - this.spaceType = TSpaceType.EMPTY; - this.spaceToken = TSpaceToken.makeEmpty(); - this.status = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid space token"); - this.owner = TUserID.makeEmpty(); - this.totalSize = TSizeInBytes.makeEmpty(); - this.guaranteedSize = TSizeInBytes.makeEmpty(); - this.unusedSize = TSizeInBytes.makeEmpty(); - this.lifetimeAssigned = TLifeTimeInSeconds.makeEmpty(); - this.lifetimeLeft = TLifeTimeInSeconds.makeEmpty(); - } else { - boolean ok = (spaceData.getSpaceToken() != null); - if (!ok) { - log.warn("TMetaDataSpace built with SpaceData without Token.. !?"); - throw new InvalidTMetaDataSpaceAttributeException(spaceToken); - } - this.spaceType = spaceData.getSpaceType(); - this.spaceToken = spaceData.getSpaceToken(); - this.owner = spaceData.getUserID(); - this.totalSize = spaceData.getTotalSpaceSize(); - this.guaranteedSize = spaceData.getTotalGuaranteedSize(); - try { - if (SpaceHelper.isStorageArea(spaceData)) { - this.guaranteedSize = spaceData.getTotalGuaranteedSize(); - } else { - this.guaranteedSize = spaceData.getReservedSpaceSize(); - } - } catch (IllegalArgumentException e) { - // impossible - } - - this.unusedSize = spaceData.getFreeSpaceSize(); - this.lifetimeAssigned = spaceData.getLifeTime(); - if (this.lifetimeAssigned.isInfinite()) { - this.lifetimeLeft = TLifeTimeInSeconds.makeInfinite(); - } else { - this.lifetimeLeft = this.lifetimeAssigned.timeLeft(spaceData - .getCreationDate()); - } - if ((this.lifetimeLeft.value() == 0) - && (this.spaceType != TSpaceType.VOSPACE)) { - this.status = new TReturnStatus(TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, - "Expired space lifetime"); - } else { - this.status = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "Valid space token"); - } - } - } - - public static TMetaDataSpace makeEmpty() { - - return new TMetaDataSpace(); - } - - /** - * Method that returns SpaceType - */ - public TSpaceType getSpaceType() { - - return spaceType; - } - - /** - * Get TReturnStatus - */ - public TReturnStatus getStatus() { - - return status; - } - - /** - * Set TReturnStatus - */ - public void setStatus(TReturnStatus status) { - - this.status = status; - } - - /** - * Return Space Token; - */ - public TSpaceToken getSpaceToken() { - - return spaceToken; - } - - public void setSpaceToken(TSpaceToken token) { - - this.spaceToken = token; - } - - public void setSpaceType(TSpaceType type) { - - this.spaceType = type; - } - - public void setOwner(TUserID uid) { - - this.owner = uid; - } - - public void setTotalSize(TSizeInBytes tsize) { - - this.totalSize = tsize; - } - - public void setGuarSize(TSizeInBytes gsize) { - - this.guaranteedSize = gsize; - } - - public void setUnSize(TSizeInBytes usize) { - - this.unusedSize = usize; - } - - public void setLifeTime(TLifeTimeInSeconds time) { - - this.lifetimeAssigned = time; - } - - public void setLifeTimeLeft(TLifeTimeInSeconds time) { + private TSpaceType spaceType; + private TReturnStatus status = null; + private TSpaceToken spaceToken; + private TRetentionPolicyInfo retentionPolicyInfo; + private TUserID owner; + private TSizeInBytes totalSize; + private TSizeInBytes guaranteedSize; + private TSizeInBytes unusedSize; + private TLifeTimeInSeconds lifetimeAssigned = null; + private TLifeTimeInSeconds lifetimeLeft = null; + + private static final Logger log = LoggerFactory.getLogger(TMetaDataSpace.class); + + public TMetaDataSpace() { + + this.spaceType = TSpaceType.EMPTY; + this.status = null; + this.spaceToken = TSpaceToken.makeEmpty(); + this.retentionPolicyInfo = null; + this.owner = TUserID.makeEmpty(); + this.totalSize = TSizeInBytes.makeEmpty(); + this.guaranteedSize = TSizeInBytes.makeEmpty(); + this.unusedSize = TSizeInBytes.makeEmpty(); + this.lifetimeAssigned = TLifeTimeInSeconds.makeEmpty(); + this.lifetimeLeft = TLifeTimeInSeconds.makeEmpty(); + } + + /** + * Constructor + * + * @param spaceType TSpaceType + * @param spaceToken TSpaceToken + * @param status TReturnStatus + * @param user TUserID + * @param totalSize TSizeInBytes + * @param guaranteedSize TSizeInBytes + * @param unusedSize TSizeInBytes + * @param lifetimeAssigned TLifeTimeInSeconds + * @param lifetimeLeft TLifeTimeInSeconds + * @throws InvalidTMetaDataSpaceAttributeException + */ + public TMetaDataSpace( + TSpaceType spaceType, + TSpaceToken spaceToken, + TReturnStatus status, + TUserID user, + TSizeInBytes totalSize, + TSizeInBytes guaranteedSize, + TSizeInBytes unusedSize, + TLifeTimeInSeconds lifetimeAssigned, + TLifeTimeInSeconds lifetimeLeft) + throws InvalidTMetaDataSpaceAttributeException { + + boolean ok = (spaceToken != null); + + if (!ok) { + throw new InvalidTMetaDataSpaceAttributeException(spaceToken); + } + + this.spaceType = spaceType; + this.spaceToken = spaceToken; + this.status = status; + this.owner = user; + this.totalSize = totalSize; + this.guaranteedSize = guaranteedSize; + this.unusedSize = unusedSize; + this.lifetimeAssigned = lifetimeAssigned; + this.lifetimeLeft = lifetimeLeft; + } + + /** + * Constructor with SpaceData returned by DAO. + * + * @param spaceData of type StorageSpaceData + * @throws InvalidTMetaDataSpaceAttributeException + * @throws InvalidTSizeAttributesException + */ + public TMetaDataSpace(StorageSpaceData spaceData) + throws InvalidTMetaDataSpaceAttributeException, InvalidTSizeAttributesException { + + if (spaceData == null) { + log.warn("TMetaDataSpace built without SPACEDATA detail."); + this.spaceType = TSpaceType.EMPTY; + this.spaceToken = TSpaceToken.makeEmpty(); + this.status = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid space token"); + this.owner = TUserID.makeEmpty(); + this.totalSize = TSizeInBytes.makeEmpty(); + this.guaranteedSize = TSizeInBytes.makeEmpty(); + this.unusedSize = TSizeInBytes.makeEmpty(); + this.lifetimeAssigned = TLifeTimeInSeconds.makeEmpty(); + this.lifetimeLeft = TLifeTimeInSeconds.makeEmpty(); + } else { + boolean ok = (spaceData.getSpaceToken() != null); + if (!ok) { + log.warn("TMetaDataSpace built with SpaceData without Token.. !?"); + throw new InvalidTMetaDataSpaceAttributeException(spaceToken); + } + this.spaceType = spaceData.getSpaceType(); + this.spaceToken = spaceData.getSpaceToken(); + this.owner = spaceData.getUserID(); + this.totalSize = spaceData.getTotalSpaceSize(); + this.guaranteedSize = spaceData.getTotalGuaranteedSize(); + try { + if (SpaceHelper.isStorageArea(spaceData)) { + this.guaranteedSize = spaceData.getTotalGuaranteedSize(); + } else { + this.guaranteedSize = spaceData.getReservedSpaceSize(); + } + } catch (IllegalArgumentException e) { + // impossible + } + + this.unusedSize = spaceData.getFreeSpaceSize(); + this.lifetimeAssigned = spaceData.getLifeTime(); + if (this.lifetimeAssigned.isInfinite()) { + this.lifetimeLeft = TLifeTimeInSeconds.makeInfinite(); + } else { + this.lifetimeLeft = this.lifetimeAssigned.timeLeft(spaceData.getCreationDate()); + } + if ((this.lifetimeLeft.value() == 0) && (this.spaceType != TSpaceType.VOSPACE)) { + this.status = + new TReturnStatus(TStatusCode.SRM_SPACE_LIFETIME_EXPIRED, "Expired space lifetime"); + } else { + this.status = new TReturnStatus(TStatusCode.SRM_SUCCESS, "Valid space token"); + } + } + } + + public static TMetaDataSpace makeEmpty() { + + return new TMetaDataSpace(); + } + + /** Method that returns SpaceType */ + public TSpaceType getSpaceType() { + + return spaceType; + } + + /** Get TReturnStatus */ + public TReturnStatus getStatus() { + + return status; + } + + /** Set TReturnStatus */ + public void setStatus(TReturnStatus status) { + + this.status = status; + } + + /** Return Space Token; */ + public TSpaceToken getSpaceToken() { + + return spaceToken; + } + + public void setSpaceToken(TSpaceToken token) { + + this.spaceToken = token; + } + + public void setSpaceType(TSpaceType type) { + + this.spaceType = type; + } + + public void setOwner(TUserID uid) { + + this.owner = uid; + } + + public void setTotalSize(TSizeInBytes tsize) { + + this.totalSize = tsize; + } + + public void setGuarSize(TSizeInBytes gsize) { + + this.guaranteedSize = gsize; + } + + public void setUnSize(TSizeInBytes usize) { + + this.unusedSize = usize; + } + + public void setLifeTime(TLifeTimeInSeconds time) { + + this.lifetimeAssigned = time; + } - this.lifetimeLeft = time; - } + public void setLifeTimeLeft(TLifeTimeInSeconds time) { - /** - * Return retentionPolicyInfo - */ - public TRetentionPolicyInfo getRetentionPolicyInfo() { + this.lifetimeLeft = time; + } - return retentionPolicyInfo; - } + /** Return retentionPolicyInfo */ + public TRetentionPolicyInfo getRetentionPolicyInfo() { - public void setRetentionPolicyInfo(TRetentionPolicyInfo retentionPolicyInfo) { + return retentionPolicyInfo; + } - this.retentionPolicyInfo = retentionPolicyInfo; - } + public void setRetentionPolicyInfo(TRetentionPolicyInfo retentionPolicyInfo) { - /** - * Return User Identifier; - */ - public TUserID getUserID() { + this.retentionPolicyInfo = retentionPolicyInfo; + } - return owner; - } + /** Return User Identifier; */ + public TUserID getUserID() { - /** - * Return TotalSize; - */ - public TSizeInBytes getTotalSize() { + return owner; + } - return totalSize; - } + /** Return TotalSize; */ + public TSizeInBytes getTotalSize() { - /** - * Return Guaranteed Size; - */ - public TSizeInBytes getGuaranteedSize() { + return totalSize; + } - return guaranteedSize; - } + /** Return Guaranteed Size; */ + public TSizeInBytes getGuaranteedSize() { - /** - * Return Unused Size. - */ - public TSizeInBytes getUnusedSize() { + return guaranteedSize; + } - return unusedSize; - } + /** Return Unused Size. */ + public TSizeInBytes getUnusedSize() { - /** - * Return Lifetime Assigned. - */ - public TLifeTimeInSeconds getLifeTimeAssigned() { + return unusedSize; + } - return lifetimeAssigned; + /** Return Lifetime Assigned. */ + public TLifeTimeInSeconds getLifeTimeAssigned() { - } + return lifetimeAssigned; + } - /** - * Return LifeTime Left - */ - public TLifeTimeInSeconds getLifeTimeLeft() { + /** Return LifeTime Left */ + public TLifeTimeInSeconds getLifeTimeLeft() { - return lifetimeLeft; - } + return lifetimeLeft; + } - /** - * Method used to encode value for FE communication. - */ - public void encode(Map outputParam, String fieldName) { + /** Method used to encode value for FE communication. */ + public void encode(Map outputParam, String fieldName) { - Map metaDataSpace = new HashMap(); + Map metaDataSpace = new HashMap(); - this.encode(metaDataSpace); - outputParam.put(fieldName, metaDataSpace); - } + this.encode(metaDataSpace); + outputParam.put(fieldName, metaDataSpace); + } - /** - * Method used to encode value for FE communication. - */ - public void encode(Map metaDataSpace) { + /** Method used to encode value for FE communication. */ + public void encode(Map metaDataSpace) { - spaceToken.encode(metaDataSpace, TSpaceToken.PNAME_SPACETOKEN); - if (status != null) { - status.encode(metaDataSpace, TReturnStatus.PNAME_STATUS); - } - if (retentionPolicyInfo != null) { - retentionPolicyInfo.encode(metaDataSpace, - TRetentionPolicyInfo.PNAME_retentionPolicyInfo); - } - owner.encode(metaDataSpace, TUserID.PNAME_OWNER); - totalSize.encode(metaDataSpace, TSizeInBytes.PNAME_TOTALSIZE); - guaranteedSize.encode(metaDataSpace, TSizeInBytes.PNAME_GUARANTEEDSIZE); - unusedSize.encode(metaDataSpace, TSizeInBytes.PNAME_UNUSEDSIZE); - lifetimeAssigned.encode(metaDataSpace, - TLifeTimeInSeconds.PNAME_LIFETIMEASSIGNED); - lifetimeLeft.encode(metaDataSpace, TLifeTimeInSeconds.PNAME_LIFETIMELEFT); - } + spaceToken.encode(metaDataSpace, TSpaceToken.PNAME_SPACETOKEN); + if (status != null) { + status.encode(metaDataSpace, TReturnStatus.PNAME_STATUS); + } + if (retentionPolicyInfo != null) { + retentionPolicyInfo.encode(metaDataSpace, TRetentionPolicyInfo.PNAME_retentionPolicyInfo); + } + owner.encode(metaDataSpace, TUserID.PNAME_OWNER); + totalSize.encode(metaDataSpace, TSizeInBytes.PNAME_TOTALSIZE); + guaranteedSize.encode(metaDataSpace, TSizeInBytes.PNAME_GUARANTEEDSIZE); + unusedSize.encode(metaDataSpace, TSizeInBytes.PNAME_UNUSEDSIZE); + lifetimeAssigned.encode(metaDataSpace, TLifeTimeInSeconds.PNAME_LIFETIMEASSIGNED); + lifetimeLeft.encode(metaDataSpace, TLifeTimeInSeconds.PNAME_LIFETIMELEFT); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TOverwriteMode.java b/src/main/java/it/grid/storm/srm/types/TOverwriteMode.java index 241467e0..bd7ef371 100644 --- a/src/main/java/it/grid/storm/srm/types/TOverwriteMode.java +++ b/src/main/java/it/grid/storm/srm/types/TOverwriteMode.java @@ -1,64 +1,55 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TOverwriteMode of an Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; public class TOverwriteMode { - private String mode = null; - - public static final TOverwriteMode EMPTY = new TOverwriteMode("Empty"); - public static final TOverwriteMode NEVER = new TOverwriteMode("Never"); - public static final TOverwriteMode ALWAYS = new TOverwriteMode("Always"); - public static final TOverwriteMode WHENFILESAREDIFFERENT = new TOverwriteMode( - "WhenFilesAreDifferent"); + private String mode = null; - private TOverwriteMode(String mode) { + public static final TOverwriteMode EMPTY = new TOverwriteMode("Empty"); + public static final TOverwriteMode NEVER = new TOverwriteMode("Never"); + public static final TOverwriteMode ALWAYS = new TOverwriteMode("Always"); + public static final TOverwriteMode WHENFILESAREDIFFERENT = + new TOverwriteMode("WhenFilesAreDifferent"); - this.mode = mode; - } + private TOverwriteMode(String mode) { - public String toString() { + this.mode = mode; + } - return mode; - } + public String toString() { - public String getValue() { + return mode; + } - return mode; - } + public String getValue() { - /** - * @param mode - * @return - * @throws IllegalArgumentException - */ - public static TOverwriteMode getTOverwriteMode(String mode) - throws IllegalArgumentException { + return mode; + } - if (mode == null) { - throw new IllegalArgumentException("Received null mode parameter"); - } - if (mode.equals(EMPTY.getValue())) - return EMPTY; - if (mode.equals(NEVER.getValue())) - return NEVER; - if (mode.equals(ALWAYS.getValue())) - return ALWAYS; - if (mode.equals(WHENFILESAREDIFFERENT.getValue())) - return WHENFILESAREDIFFERENT; - throw new IllegalArgumentException( - "No matching TOverwriteMode for String \'" + mode + "\'"); - } + /** + * @param mode + * @return + * @throws IllegalArgumentException + */ + public static TOverwriteMode getTOverwriteMode(String mode) throws IllegalArgumentException { + if (mode == null) { + throw new IllegalArgumentException("Received null mode parameter"); + } + if (mode.equals(EMPTY.getValue())) return EMPTY; + if (mode.equals(NEVER.getValue())) return NEVER; + if (mode.equals(ALWAYS.getValue())) return ALWAYS; + if (mode.equals(WHENFILESAREDIFFERENT.getValue())) return WHENFILESAREDIFFERENT; + throw new IllegalArgumentException("No matching TOverwriteMode for String \'" + mode + "\'"); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TPermissionMode.java b/src/main/java/it/grid/storm/srm/types/TPermissionMode.java index dd6f8891..0b657399 100644 --- a/src/main/java/it/grid/storm/srm/types/TPermissionMode.java +++ b/src/main/java/it/grid/storm/srm/types/TPermissionMode.java @@ -1,143 +1,116 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; -import java.util.Hashtable; -import java.util.Map; - import it.grid.storm.filesystem.FilesystemPermission; +import java.util.Map; /** - * This class represents the TPermissionMode of a File or Space Area managed by - * Srm. - * + * This class represents the TPermissionMode of a File or Space Area managed by Srm. + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - public class TPermissionMode { - public static String PNAME_OTHERPERMISSION = "otherPermission"; - public static String PNAME_MODE = "mode"; - - private String mode = null; - - public static final TPermissionMode NONE = new TPermissionMode("None"); - public static final TPermissionMode X = new TPermissionMode("X"); - public static final TPermissionMode W = new TPermissionMode("W"); - public static final TPermissionMode WX = new TPermissionMode("WX"); - public static final TPermissionMode R = new TPermissionMode("R"); - public static final TPermissionMode RX = new TPermissionMode("RX"); - public static final TPermissionMode RW = new TPermissionMode("RW"); - public static final TPermissionMode RWX = new TPermissionMode("RWX"); - - private TPermissionMode(String mode) { - - this.mode = mode; - } - - public String toString() { - - return mode; - } - - public String getValue() { - - return mode; - } - - public static TPermissionMode getTPermissionMode(String type) { - - if (type.equals(NONE.getValue())) - return NONE; - if (type.equals(X.getValue())) - return X; - if (type.equals(W.getValue())) - return W; - if (type.equals(WX.getValue())) - return WX; - if (type.equals(R.getValue())) - return R; - if (type.equals(RX.getValue())) - return RX; - if (type.equals(RW.getValue())) - return RW; - if (type.equals(RWX.getValue())) - return RWX; - return null; - } - - public static TPermissionMode getTPermissionMode(int type) { - - switch (type) { - case 0: - return NONE; - case 1: - return X; - case 2: - return W; - case 3: - return WX; - case 4: - return R; - case 5: - return RX; - case 6: - return RW; - case 7: - return RWX; - default: - return NONE; - } - } - - public static TPermissionMode getTPermissionMode(FilesystemPermission type) { - - String perm = ""; - - if (type.canReadFile() || type.canListDirectory()) - perm += "R"; - if (type.canWriteFile()) - perm += "W"; - if (type.canTraverseDirectory()) - perm += "X"; - if (perm.length() == 0) - perm = "None"; - return getTPermissionMode(perm); - } - - /** - * This method is used to encode Permission mode from BE to FE commonucation. - * - * @param param - * Hashtable that will contains output xmlrpc structure. - * @param name - * The name of the field to be added. - */ - public void encode(Map param, String name) { - - Integer permissionInt = null; - if (this.equals(NONE)) - permissionInt = Integer.valueOf(0); - if (this.equals(X)) - permissionInt = Integer.valueOf(1); - if (this.equals(W)) - permissionInt = Integer.valueOf(2); - if (this.equals(WX)) - permissionInt = Integer.valueOf(3); - if (this.equals(R)) - permissionInt = Integer.valueOf(4); - if (this.equals(RX)) - permissionInt = Integer.valueOf(5); - if (this.equals(RW)) - permissionInt = Integer.valueOf(6); - if (this.equals(RWX)) - permissionInt = Integer.valueOf(7); - - param.put(name, permissionInt); - } + public static String PNAME_OTHERPERMISSION = "otherPermission"; + public static String PNAME_MODE = "mode"; + + private String mode = null; + + public static final TPermissionMode NONE = new TPermissionMode("None"); + public static final TPermissionMode X = new TPermissionMode("X"); + public static final TPermissionMode W = new TPermissionMode("W"); + public static final TPermissionMode WX = new TPermissionMode("WX"); + public static final TPermissionMode R = new TPermissionMode("R"); + public static final TPermissionMode RX = new TPermissionMode("RX"); + public static final TPermissionMode RW = new TPermissionMode("RW"); + public static final TPermissionMode RWX = new TPermissionMode("RWX"); + + private TPermissionMode(String mode) { + + this.mode = mode; + } + + public String toString() { + + return mode; + } + + public String getValue() { + + return mode; + } + + public static TPermissionMode getTPermissionMode(String type) { + + if (type.equals(NONE.getValue())) return NONE; + if (type.equals(X.getValue())) return X; + if (type.equals(W.getValue())) return W; + if (type.equals(WX.getValue())) return WX; + if (type.equals(R.getValue())) return R; + if (type.equals(RX.getValue())) return RX; + if (type.equals(RW.getValue())) return RW; + if (type.equals(RWX.getValue())) return RWX; + return null; + } + + public static TPermissionMode getTPermissionMode(int type) { + + switch (type) { + case 0: + return NONE; + case 1: + return X; + case 2: + return W; + case 3: + return WX; + case 4: + return R; + case 5: + return RX; + case 6: + return RW; + case 7: + return RWX; + default: + return NONE; + } + } + + public static TPermissionMode getTPermissionMode(FilesystemPermission type) { + + String perm = ""; + + if (type.canReadFile() || type.canListDirectory()) perm += "R"; + if (type.canWriteFile()) perm += "W"; + if (type.canTraverseDirectory()) perm += "X"; + if (perm.length() == 0) perm = "None"; + return getTPermissionMode(perm); + } + + /** + * This method is used to encode Permission mode from BE to FE commonucation. + * + * @param param Hashtable that will contains output xmlrpc structure. + * @param name The name of the field to be added. + */ + public void encode(Map param, String name) { + + Integer permissionInt = null; + if (this.equals(NONE)) permissionInt = Integer.valueOf(0); + if (this.equals(X)) permissionInt = Integer.valueOf(1); + if (this.equals(W)) permissionInt = Integer.valueOf(2); + if (this.equals(WX)) permissionInt = Integer.valueOf(3); + if (this.equals(R)) permissionInt = Integer.valueOf(4); + if (this.equals(RX)) permissionInt = Integer.valueOf(5); + if (this.equals(RW)) permissionInt = Integer.valueOf(6); + if (this.equals(RWX)) permissionInt = Integer.valueOf(7); + + param.put(name, permissionInt); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TRequestToken.java b/src/main/java/it/grid/storm/srm/types/TRequestToken.java index 0f7454ed..f8333aca 100644 --- a/src/main/java/it/grid/storm/srm/types/TRequestToken.java +++ b/src/main/java/it/grid/storm/srm/types/TRequestToken.java @@ -1,156 +1,146 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; +import com.fasterxml.jackson.annotation.JsonIgnore; +import it.grid.storm.config.Configuration; import java.io.Serializable; import java.util.Calendar; import java.util.Date; import java.util.Map; import java.util.UUID; -import com.fasterxml.jackson.annotation.JsonIgnore; - -import it.grid.storm.config.Configuration; - /** * This class represents a Request Token - * + * * @author Magnoni Luca - * */ - public class TRequestToken implements Serializable { - private static final long serialVersionUID = -6926632390881024529L; - - public static final String PNAME_REQUESTOKEN = "requestToken"; + private static final long serialVersionUID = -6926632390881024529L; - private final String value; + public static final String PNAME_REQUESTOKEN = "requestToken"; - private final Calendar expiration; + private final String value; - private static final long REQUEST_LIFETIME = Configuration.getInstance() - .getExpiredRequestTime() * 1000; + private final Calendar expiration; - public TRequestToken(String requestToken, Date timestamp) - throws InvalidTRequestTokenAttributesException { + private static final long REQUEST_LIFETIME = + Configuration.getInstance().getExpiredRequestTime() * 1000; - if (requestToken == null || requestToken.trim().isEmpty()) { - throw new InvalidTRequestTokenAttributesException(requestToken); - } - this.value = requestToken; - Calendar expiration = null; - if (timestamp != null) { - expiration = Calendar.getInstance(); - expiration.setTimeInMillis(timestamp.getTime() + REQUEST_LIFETIME); - } - this.expiration = expiration; - } - - public TRequestToken() throws InvalidTRequestTokenAttributesException { - this(UUID.randomUUID().toString(), Calendar.getInstance().getTime()); - } - - public static TRequestToken getRandom() { - - UUID token = UUID.randomUUID(); - try { - return new TRequestToken(token.toString(), Calendar.getInstance() - .getTime()); - } catch (InvalidTRequestTokenAttributesException e) { - // never thrown - throw new IllegalStateException( - "Unexpected InvalidTRequestTokenAttributesException", e); - } - } - - @JsonIgnore - public boolean hasExpirationDate() { - - return expiration != null; - } - - @JsonIgnore - public boolean isExpired() throws IllegalStateException { - - if (!hasExpirationDate()) { - throw new IllegalStateException( - "Unable to check expiration, the token han not an expiration date"); - } - return expiration.before(Calendar.getInstance()); - } - - /** - * @return the expiration - */ - public Calendar getExpiration() { - - return expiration; - } - - public void updateExpiration(Date expiration) { - - this.expiration.setTime(expiration); - } - - public String getValue() { - - return value; - } - - public String toString() { - - return value; - } - - public static TRequestToken decode(Map inputParam, - String fieldName) throws InvalidTRequestTokenAttributesException { - - return new TRequestToken((String) inputParam.get(fieldName), null); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + ((value == null) ? 0 : value.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TRequestToken other = (TRequestToken) obj; - if (value == null) { - if (other.value != null) { - return false; - } - } else if (!value.equals(other.value)) { - return false; - } - return true; - } + public TRequestToken(String requestToken, Date timestamp) + throws InvalidTRequestTokenAttributesException { + if (requestToken == null || requestToken.trim().isEmpty()) { + throw new InvalidTRequestTokenAttributesException(requestToken); + } + this.value = requestToken; + Calendar expiration = null; + if (timestamp != null) { + expiration = Calendar.getInstance(); + expiration.setTimeInMillis(timestamp.getTime() + REQUEST_LIFETIME); + } + this.expiration = expiration; + } + + public TRequestToken() throws InvalidTRequestTokenAttributesException { + this(UUID.randomUUID().toString(), Calendar.getInstance().getTime()); + } + + public static TRequestToken getRandom() { + + UUID token = UUID.randomUUID(); + try { + return new TRequestToken(token.toString(), Calendar.getInstance().getTime()); + } catch (InvalidTRequestTokenAttributesException e) { + // never thrown + throw new IllegalStateException("Unexpected InvalidTRequestTokenAttributesException", e); + } + } + + @JsonIgnore + public boolean hasExpirationDate() { + + return expiration != null; + } + + @JsonIgnore + public boolean isExpired() throws IllegalStateException { + + if (!hasExpirationDate()) { + throw new IllegalStateException( + "Unable to check expiration, the token han not an expiration date"); + } + return expiration.before(Calendar.getInstance()); + } + + /** @return the expiration */ + public Calendar getExpiration() { + + return expiration; + } + + public void updateExpiration(Date expiration) { + + this.expiration.setTime(expiration); + } + + public String getValue() { + + return value; + } + + public String toString() { + + return value; + } + + public static TRequestToken decode(Map inputParam, String fieldName) + throws InvalidTRequestTokenAttributesException { + + return new TRequestToken((String) inputParam.get(fieldName), null); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((value == null) ? 0 : value.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TRequestToken other = (TRequestToken) obj; + if (value == null) { + if (other.value != null) { + return false; + } + } else if (!value.equals(other.value)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TRequestType.java b/src/main/java/it/grid/storm/srm/types/TRequestType.java index d37db3d1..ecd56946 100644 --- a/src/main/java/it/grid/storm/srm/types/TRequestType.java +++ b/src/main/java/it/grid/storm/srm/types/TRequestType.java @@ -1,57 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; /** - * This class represents the ReqType of an SRM request. It is a simple - * application of the TypeSafe Enum Pattern. - * + * This class represents the ReqType of an SRM request. It is a simple application of the TypeSafe + * Enum Pattern. + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 18th, 2005 * @version 3.0 */ public enum TRequestType { + PREPARE_TO_GET("PrepareToGet"), + PREPARE_TO_PUT("PrepareToPut"), + COPY("Copy"), + BRING_ON_LINE("BringOnLine"), + EMPTY("Empty"), + UNKNOWN("Unknown"); - PREPARE_TO_GET("PrepareToGet"), PREPARE_TO_PUT("PrepareToPut"), COPY("Copy"), BRING_ON_LINE( - "BringOnLine"), EMPTY("Empty"), UNKNOWN("Unknown"); + private final String value; - private final String value; + private TRequestType(String value) { - private TRequestType(String value) { + this.value = value; + } - this.value = value; - } + public String getValue() { - public String getValue() { + return value; + } - return value; - } + /** + * Facility method that returns a TRequestType object given its String representation. If no + * TRequestType is found for the given String, an IllegalArgumentException is thrown. + */ + public static TRequestType getTRequestType(String type) throws IllegalArgumentException { - /** - * Facility method that returns a TRequestType object given its String - * representation. If no TRequestType is found for the given String, an - * IllegalArgumentException is thrown. - */ - public static TRequestType getTRequestType(String type) - throws IllegalArgumentException { + for (TRequestType requestType : TRequestType.values()) { + if (requestType.getValue().equals(type)) { + return requestType; + } + } + return UNKNOWN; + } - for (TRequestType requestType : TRequestType.values()) { - if (requestType.getValue().equals(type)) { - return requestType; - } - } - return UNKNOWN; - } + public boolean isEmpty() { - public boolean isEmpty() { + return this.equals(EMPTY); + } - return this.equals(EMPTY); - } + public String toString() { - public String toString() { - - return value; - } + return value; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TRetentionPolicy.java b/src/main/java/it/grid/storm/srm/types/TRetentionPolicy.java index 80e83212..31c8b5e8 100644 --- a/src/main/java/it/grid/storm/srm/types/TRetentionPolicy.java +++ b/src/main/java/it/grid/storm/srm/types/TRetentionPolicy.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TRetentionPolicy SRM type. - * + * * @author Alberto Forti * @author CNAF - INFN Bologna * @date Luglio, 2006 @@ -16,84 +15,75 @@ public class TRetentionPolicy { - public static String PNAME_retentionPolicy = "retentionPolicy"; - private String retentionPolicy = null; - - public static final TRetentionPolicy REPLICA = new TRetentionPolicy("REPLICA"), - OUTPUT = new TRetentionPolicy("OUTPUT"), CUSTODIAL = new TRetentionPolicy( - "CUSTODIAL"), EMPTY = new TRetentionPolicy("EMPTY"); - - private TRetentionPolicy(String retPol) { - - this.retentionPolicy = retPol; - } - - public final static TRetentionPolicy getTRetentionPolicy(int idx) { - - switch (idx) { - case 0: - return REPLICA; - case 1: - return OUTPUT; - case 2: - return CUSTODIAL; - default: - return EMPTY; - } - - } - - /** - * decode() method creates a TRetentionPolicy object from the inforation - * contained into the structured parameter received from the FE. - * - * @param inputParam - * hashtable structure - * @param fieldName - * field name - * @return - */ - public final static TRetentionPolicy decode(Map inputParam, String fieldName) { - - Integer val; - - val = (Integer) inputParam.get(fieldName); - if (val == null) - return EMPTY; - - return TRetentionPolicy.getTRetentionPolicy(val.intValue()); - } - - /** - * encode() method creates structured parameter representing this ogbject. It - * is passed to the FE. - * - * @param outputParam - * hashtable structure - * @param fieldName - * field name - */ - public void encode(Map outputParam, String fieldName) { - - Integer value = null; - - if (this.equals(TRetentionPolicy.REPLICA)) - value = Integer.valueOf(0); - if (this.equals(TRetentionPolicy.OUTPUT)) - value = Integer.valueOf(1); - if (this.equals(TRetentionPolicy.CUSTODIAL)) - value = Integer.valueOf(2); - - outputParam.put(fieldName, value); - } - - public String toString() { - - return retentionPolicy; - } - - public String getValue() { - - return retentionPolicy; - } + public static String PNAME_retentionPolicy = "retentionPolicy"; + private String retentionPolicy = null; + + public static final TRetentionPolicy REPLICA = new TRetentionPolicy("REPLICA"), + OUTPUT = new TRetentionPolicy("OUTPUT"), + CUSTODIAL = new TRetentionPolicy("CUSTODIAL"), + EMPTY = new TRetentionPolicy("EMPTY"); + + private TRetentionPolicy(String retPol) { + + this.retentionPolicy = retPol; + } + + public static final TRetentionPolicy getTRetentionPolicy(int idx) { + + switch (idx) { + case 0: + return REPLICA; + case 1: + return OUTPUT; + case 2: + return CUSTODIAL; + default: + return EMPTY; + } + } + + /** + * decode() method creates a TRetentionPolicy object from the inforation contained into the + * structured parameter received from the FE. + * + * @param inputParam hashtable structure + * @param fieldName field name + * @return + */ + public static final TRetentionPolicy decode(Map inputParam, String fieldName) { + + Integer val; + + val = (Integer) inputParam.get(fieldName); + if (val == null) return EMPTY; + + return TRetentionPolicy.getTRetentionPolicy(val.intValue()); + } + + /** + * encode() method creates structured parameter representing this ogbject. It is passed to the FE. + * + * @param outputParam hashtable structure + * @param fieldName field name + */ + public void encode(Map outputParam, String fieldName) { + + Integer value = null; + + if (this.equals(TRetentionPolicy.REPLICA)) value = Integer.valueOf(0); + if (this.equals(TRetentionPolicy.OUTPUT)) value = Integer.valueOf(1); + if (this.equals(TRetentionPolicy.CUSTODIAL)) value = Integer.valueOf(2); + + outputParam.put(fieldName, value); + } + + public String toString() { + + return retentionPolicy; + } + + public String getValue() { + + return retentionPolicy; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TRetentionPolicyInfo.java b/src/main/java/it/grid/storm/srm/types/TRetentionPolicyInfo.java index 2cda1c76..9f8d0923 100644 --- a/src/main/java/it/grid/storm/srm/types/TRetentionPolicyInfo.java +++ b/src/main/java/it/grid/storm/srm/types/TRetentionPolicyInfo.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the TRetentionPolicyInfo additional data associated - * with the SRM request. - * + * This class represents the TRetentionPolicyInfo additional data associated with the SRM request. + * * @author Alberto Forti * @author CNAF -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; import java.io.Serializable; @@ -20,121 +17,109 @@ public class TRetentionPolicyInfo implements Serializable { - private static final long serialVersionUID = -8530924298311412411L; - - /* Hashtable field names for encode() and decode() methods */ - public static String PNAME_retentionPolicyInfo = "retentionPolicyInfo"; - - public static final TRetentionPolicyInfo TAPE0_DISK1_RETENTION_POLICY = new TRetentionPolicyInfo( - TRetentionPolicy.REPLICA, TAccessLatency.ONLINE); - public static final TRetentionPolicyInfo TAPE1_DISK1_RETENTION_POLICY = new TRetentionPolicyInfo( - TRetentionPolicy.CUSTODIAL, TAccessLatency.ONLINE); - private TRetentionPolicy retentionPolicy; - private TAccessLatency accessLatency; - - public TRetentionPolicyInfo() { - - } - - public TRetentionPolicyInfo(TRetentionPolicy retentionPolicy, - TAccessLatency accessLatency) { - - this.retentionPolicy = retentionPolicy; - this.accessLatency = accessLatency; - } - - /** - * decode() method creates a TRetentionPolicyInfo object from the inforation - * contained into the structured parameter received from the FE. - * - * @param inputParam - * hashtable structure - * @param fieldName - * field name - * @return - */ - public static TRetentionPolicyInfo decode(Map inputParam, String fieldName) { - - Map param = (Map) inputParam.get(fieldName); - if (param == null) - return null; - TRetentionPolicy retPol = TRetentionPolicy.decode(param, - TRetentionPolicy.PNAME_retentionPolicy); - TAccessLatency accLat = TAccessLatency.decode(param, - TAccessLatency.PNAME_accessLatency); - - return new TRetentionPolicyInfo(retPol, accLat); - } - - /** - * encode() method creates structured parameter representing this ogbject. It - * is passed to the FE. - * - * @param outputParam - * hashtable structure - * @param fieldName - * field name - */ - public void encode(Map outputParam, String fieldName) { - - Map param = new HashMap(); - - retentionPolicy.encode(param, TRetentionPolicy.PNAME_retentionPolicy); - accessLatency.encode(param, TAccessLatency.PNAME_accessLatency); - - outputParam.put(fieldName, param); - } - - /** - * Get Retention Policy. - * - * @return TRetentionPolicy - */ - public TRetentionPolicy getRetentionPolicy() { - - return retentionPolicy; - } - - /** - * Set Retention Policy. - * - * @param retentionPolicy - * TRetentionPolicy - */ - public void setRetentionPolicy(TRetentionPolicy retentionPolicy) { - - this.retentionPolicy = retentionPolicy; - } - - /** - * Get AccessLatency. - * - * @return TAccessLatency - */ - public TAccessLatency getAccessLatency() { - - return accessLatency; - } - - /** - * Set AccessLatency. - * - * @param accessLatency - * TAccessLatency - */ - public void setAccessLatency(TAccessLatency accessLatency) { - - this.accessLatency = accessLatency; - } - - public String toString() { - - StringBuilder buf = new StringBuilder("RetentionPolicyInfo: "); - buf.append("["); - buf.append("retentionPolicy: " + retentionPolicy); - buf.append("] , ["); - buf.append("accessLatency: " + accessLatency); - buf.append("]"); - return buf.toString(); - } + private static final long serialVersionUID = -8530924298311412411L; + + /* Hashtable field names for encode() and decode() methods */ + public static String PNAME_retentionPolicyInfo = "retentionPolicyInfo"; + + public static final TRetentionPolicyInfo TAPE0_DISK1_RETENTION_POLICY = + new TRetentionPolicyInfo(TRetentionPolicy.REPLICA, TAccessLatency.ONLINE); + public static final TRetentionPolicyInfo TAPE1_DISK1_RETENTION_POLICY = + new TRetentionPolicyInfo(TRetentionPolicy.CUSTODIAL, TAccessLatency.ONLINE); + private TRetentionPolicy retentionPolicy; + private TAccessLatency accessLatency; + + public TRetentionPolicyInfo() {} + + public TRetentionPolicyInfo(TRetentionPolicy retentionPolicy, TAccessLatency accessLatency) { + + this.retentionPolicy = retentionPolicy; + this.accessLatency = accessLatency; + } + + /** + * decode() method creates a TRetentionPolicyInfo object from the inforation contained into the + * structured parameter received from the FE. + * + * @param inputParam hashtable structure + * @param fieldName field name + * @return + */ + public static TRetentionPolicyInfo decode(Map inputParam, String fieldName) { + + Map param = (Map) inputParam.get(fieldName); + if (param == null) return null; + TRetentionPolicy retPol = + TRetentionPolicy.decode(param, TRetentionPolicy.PNAME_retentionPolicy); + TAccessLatency accLat = TAccessLatency.decode(param, TAccessLatency.PNAME_accessLatency); + + return new TRetentionPolicyInfo(retPol, accLat); + } + + /** + * encode() method creates structured parameter representing this ogbject. It is passed to the FE. + * + * @param outputParam hashtable structure + * @param fieldName field name + */ + public void encode(Map outputParam, String fieldName) { + + Map param = new HashMap(); + + retentionPolicy.encode(param, TRetentionPolicy.PNAME_retentionPolicy); + accessLatency.encode(param, TAccessLatency.PNAME_accessLatency); + + outputParam.put(fieldName, param); + } + + /** + * Get Retention Policy. + * + * @return TRetentionPolicy + */ + public TRetentionPolicy getRetentionPolicy() { + + return retentionPolicy; + } + + /** + * Set Retention Policy. + * + * @param retentionPolicy TRetentionPolicy + */ + public void setRetentionPolicy(TRetentionPolicy retentionPolicy) { + + this.retentionPolicy = retentionPolicy; + } + + /** + * Get AccessLatency. + * + * @return TAccessLatency + */ + public TAccessLatency getAccessLatency() { + + return accessLatency; + } + + /** + * Set AccessLatency. + * + * @param accessLatency TAccessLatency + */ + public void setAccessLatency(TAccessLatency accessLatency) { + + this.accessLatency = accessLatency; + } + + public String toString() { + + StringBuilder buf = new StringBuilder("RetentionPolicyInfo: "); + buf.append("["); + buf.append("retentionPolicy: " + retentionPolicy); + buf.append("] , ["); + buf.append("accessLatency: " + accessLatency); + buf.append("]"); + return buf.toString(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TReturnStatus.java b/src/main/java/it/grid/storm/srm/types/TReturnStatus.java index 16507338..6a8fc972 100644 --- a/src/main/java/it/grid/storm/srm/types/TReturnStatus.java +++ b/src/main/java/it/grid/storm/srm/types/TReturnStatus.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TReturnStatus value in SRM request. It is composed by a TStatusCode and * an explanetion String - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 @@ -17,189 +16,182 @@ import java.util.Calendar; import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TReturnStatus implements Serializable { - private static final long serialVersionUID = -4550845540710062810L; + private static final long serialVersionUID = -4550845540710062810L; - private static final Logger log = LoggerFactory.getLogger(TReturnStatus.class); + private static final Logger log = LoggerFactory.getLogger(TReturnStatus.class); - protected TStatusCode statusCode = null; - protected String explanation = null; - private Long lastUpdateTIme = null; + protected TStatusCode statusCode = null; + protected String explanation = null; + private Long lastUpdateTIme = null; - private static final String EMPTY_EXPLANATION = ""; - private static final int MAX_EXPLANATION_LENGTH = 255; + private static final String EMPTY_EXPLANATION = ""; + private static final int MAX_EXPLANATION_LENGTH = 255; - public static final String PNAME_RETURNSTATUS = "returnStatus"; - public static final String PNAME_STATUS = "status"; + public static final String PNAME_RETURNSTATUS = "returnStatus"; + public static final String PNAME_STATUS = "status"; - public TReturnStatus(TStatusCode statusCode, String explanation) { + public TReturnStatus(TStatusCode statusCode, String explanation) { - setStatusCode(statusCode); - setExplanation(explanation); - } + setStatusCode(statusCode); + setExplanation(explanation); + } - public TReturnStatus(TStatusCode statusCode) { + public TReturnStatus(TStatusCode statusCode) { - this(statusCode, EMPTY_EXPLANATION); - } + this(statusCode, EMPTY_EXPLANATION); + } - public TReturnStatus(TReturnStatus status) { + public TReturnStatus(TReturnStatus status) { - this(status.getStatusCode(), status.getExplanation()); - } + this(status.getStatusCode(), status.getExplanation()); + } - /** - * Returns the status code - * - * @return TStatusCode - */ - public TStatusCode getStatusCode() { + /** + * Returns the status code + * + * @return TStatusCode + */ + public TStatusCode getStatusCode() { - return statusCode; - } + return statusCode; + } - /** - * @param statusCode the statusCode to set - */ - protected void setStatusCode(TStatusCode statusCode) { - - if (statusCode == null) { - throw new IllegalArgumentException( - "Cannot set the status code, received null argument: statusCode=" + statusCode); - } - this.statusCode = statusCode; - updated(); - } + /** @param statusCode the statusCode to set */ + protected void setStatusCode(TStatusCode statusCode) { - /** - * Set explanation string - * - * @param expl String - */ - protected void setExplanation(String explanationString) { - - if (explanationString == null) { - explanation = EMPTY_EXPLANATION; - } else if (explanationString.length() <= MAX_EXPLANATION_LENGTH) { - explanation = explanationString; - } else { - explanation = explanationString.substring(0, MAX_EXPLANATION_LENGTH); - log.warn("TReturnStatus: Explanation string truncated at {} characters: " + "'{}'", - MAX_EXPLANATION_LENGTH, explanation); - } - updated(); + if (statusCode == null) { + throw new IllegalArgumentException( + "Cannot set the status code, received null argument: statusCode=" + statusCode); } + this.statusCode = statusCode; + updated(); + } + + /** + * Set explanation string + * + * @param expl String + */ + protected void setExplanation(String explanationString) { + + if (explanationString == null) { + explanation = EMPTY_EXPLANATION; + } else if (explanationString.length() <= MAX_EXPLANATION_LENGTH) { + explanation = explanationString; + } else { + explanation = explanationString.substring(0, MAX_EXPLANATION_LENGTH); + log.warn( + "TReturnStatus: Explanation string truncated at {} characters: " + "'{}'", + MAX_EXPLANATION_LENGTH, + explanation); + } + updated(); + } - /** - * Returns the explanation string - * - * @return String - */ - public String getExplanation() { + /** + * Returns the explanation string + * + * @return String + */ + public String getExplanation() { - return explanation; - } + return explanation; + } - /** - * @return the lastUpdateTIme - */ - public Long getLastUpdateTIme() { + /** @return the lastUpdateTIme */ + public Long getLastUpdateTIme() { - return lastUpdateTIme; - } + return lastUpdateTIme; + } - private void updated() { + private void updated() { - this.lastUpdateTIme = Calendar.getInstance().getTimeInMillis(); - } + this.lastUpdateTIme = Calendar.getInstance().getTimeInMillis(); + } - /** - * This method encode a TReturnStatus Object into an Hashtable used for xmlrpc communication. - */ - public void encode(Map outputParam, String name) { + /** This method encode a TReturnStatus Object into an Hashtable used for xmlrpc communication. */ + public void encode(Map outputParam, String name) { - // Return STATUS OF REQUEST - HashMap globalStatus = new HashMap(); - globalStatus.put("statusCode", getStatusCode().getValue()); - globalStatus.put("explanation", getExplanation()); + // Return STATUS OF REQUEST + HashMap globalStatus = new HashMap(); + globalStatus.put("statusCode", getStatusCode().getValue()); + globalStatus.put("explanation", getExplanation()); - // Insert TReturnStatus struct into global Output structure - outputParam.put(name, globalStatus); + // Insert TReturnStatus struct into global Output structure + outputParam.put(name, globalStatus); + } - } + public String toString() { - public String toString() { + return statusCode + ": " + explanation; + } - return statusCode + ": " + explanation; - } + public boolean isSRM_SUCCESS() { - public boolean isSRM_SUCCESS() { + return statusCode.equals(TStatusCode.SRM_SUCCESS); + } - return statusCode.equals(TStatusCode.SRM_SUCCESS); - } + public void extendExplaination(String string) { - public void extendExplaination(String string) { + setExplanation(getExplanation() + " [ " + string + " ]"); + } - setExplanation(getExplanation() + " [ " + string + " ]"); - } + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + ((explanation == null) ? 0 : explanation.hashCode()); - result = prime * result + ((lastUpdateTIme == null) ? 0 : lastUpdateTIme.hashCode()); - result = prime * result + ((statusCode == null) ? 0 : statusCode.hashCode()); - return result; - } + final int prime = 31; + int result = 1; + result = prime * result + ((explanation == null) ? 0 : explanation.hashCode()); + result = prime * result + ((lastUpdateTIme == null) ? 0 : lastUpdateTIme.hashCode()); + result = prime * result + ((statusCode == null) ? 0 : statusCode.hashCode()); + return result; + } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TReturnStatus other = (TReturnStatus) obj; - if (explanation == null) { - if (other.explanation != null) { - return false; - } - } else if (!explanation.equals(other.explanation)) { - return false; - } - if (lastUpdateTIme == null) { - if (other.lastUpdateTIme != null) { - return false; - } - } else if (!lastUpdateTIme.equals(other.lastUpdateTIme)) { - return false; - } - if (statusCode != other.statusCode) { - return false; - } - return true; - } + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TReturnStatus other = (TReturnStatus) obj; + if (explanation == null) { + if (other.explanation != null) { + return false; + } + } else if (!explanation.equals(other.explanation)) { + return false; + } + if (lastUpdateTIme == null) { + if (other.lastUpdateTIme != null) { + return false; + } + } else if (!lastUpdateTIme.equals(other.lastUpdateTIme)) { + return false; + } + if (statusCode != other.statusCode) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSURL.java b/src/main/java/it/grid/storm/srm/types/TSURL.java index 58420a37..6d0a5138 100644 --- a/src/main/java/it/grid/storm/srm/types/TSURL.java +++ b/src/main/java/it/grid/storm/srm/types/TSURL.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -18,19 +17,16 @@ import it.grid.storm.config.Configuration; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.naming.SURL; - import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a TSURL, that is a Site URL. It is made up of a - * SiteProtocol and a SFN. - * + * This class represents a TSURL, that is a Site URL. It is made up of a SiteProtocol and a SFN. + * * @author Ezio Corso - Magnoni Luca * @author EGRID ICTP Trieste / CNAF INFN Bologna * @date Avril, 2005 @@ -38,101 +34,97 @@ */ public class TSURL { - private static Logger log = LoggerFactory.getLogger(TSURL.class); - - private static final String EMPTY_STRING = ""; + private static Logger log = LoggerFactory.getLogger(TSURL.class); - /** - * The surl as provided by User - */ - private final String rawSurl; - private final SiteProtocol sp; - private final SFN sfn; - private String normalizedStFN = null; - private int uniqueID = 0; + private static final String EMPTY_STRING = ""; - private boolean empty = true; + /** The surl as provided by User */ + private final String rawSurl; - public static final String PNAME_SURL = "surl"; - public static final String PNAME_FROMSURL = "fromSURL"; - public static final String PNAME_TOSURL = "toSURL"; + private final SiteProtocol sp; + private final SFN sfn; + private String normalizedStFN = null; + private int uniqueID = 0; - private static ArrayList tsurlManaged = new ArrayList(); - private static LinkedList defaultPorts = new LinkedList(); + private boolean empty = true; - static { + public static final String PNAME_SURL = "surl"; + public static final String PNAME_FROMSURL = "fromSURL"; + public static final String PNAME_TOSURL = "toSURL"; - // Lazy initialization from Configuration - if (tsurlManaged.isEmpty()) { + private static ArrayList tsurlManaged = new ArrayList(); + private static LinkedList defaultPorts = new LinkedList(); - TSURL checkTSURL; - String[] surlValid = Configuration.getInstance().getManagedSURLs(); - for (String checkSurl : surlValid) { - try { + static { - checkTSURL = TSURL.makeFromStringWellFormed(checkSurl); - tsurlManaged.add(checkTSURL); - log.debug("### SURL Managed : {}",checkTSURL); + // Lazy initialization from Configuration + if (tsurlManaged.isEmpty()) { - } catch (InvalidTSURLAttributesException e) { + TSURL checkTSURL; + String[] surlValid = Configuration.getInstance().getManagedSURLs(); + for (String checkSurl : surlValid) { + try { - log.error("Unable to build a TSURL : {}", checkSurl, e); - } - } - } + checkTSURL = TSURL.makeFromStringWellFormed(checkSurl); + tsurlManaged.add(checkTSURL); + log.debug("### SURL Managed : {}", checkTSURL); - if (defaultPorts.isEmpty()) { + } catch (InvalidTSURLAttributesException e) { - Integer[] ports = Configuration.getInstance() - .getManagedSurlDefaultPorts(); + log.error("Unable to build a TSURL : {}", checkSurl, e); + } + } + } - for (Integer portInteger : ports) { - try { + if (defaultPorts.isEmpty()) { - defaultPorts.add(Port.make(portInteger.intValue())); - log.debug("### Default SURL port : {}", defaultPorts.getLast()); + Integer[] ports = Configuration.getInstance().getManagedSurlDefaultPorts(); - } catch (InvalidPortAttributeException e) { + for (Integer portInteger : ports) { + try { - log.error("Unable to build a Port : {}", portInteger , e); + defaultPorts.add(Port.make(portInteger.intValue())); + log.debug("### Default SURL port : {}", defaultPorts.getLast()); - } - } - } - } + } catch (InvalidPortAttributeException e) { - private TSURL(SiteProtocol sp, SFN sfn, String rawSurl, boolean empty) { + log.error("Unable to build a Port : {}", portInteger, e); + } + } + } + } - this.sp = sp; - this.sfn = sfn; - this.rawSurl = rawSurl; - this.empty = empty; + private TSURL(SiteProtocol sp, SFN sfn, String rawSurl, boolean empty) { - } + this.sp = sp; + this.sfn = sfn; + this.rawSurl = rawSurl; + this.empty = empty; + } - /** - * Method that create a TSURL from structure received from FE. - * - * @throws InvalidTSURLAttributesException - */ - public static TSURL decode(Map inputParam, String name) - throws InvalidTSURLAttributesException { + /** + * Method that create a TSURL from structure received from FE. + * + * @throws InvalidTSURLAttributesException + */ + public static TSURL decode(Map inputParam, String name) + throws InvalidTSURLAttributesException { - String surlstring = (String) inputParam.get(name); - return TSURL.makeFromStringWellFormed(surlstring); - } + String surlstring = (String) inputParam.get(name); + return TSURL.makeFromStringWellFormed(surlstring); + } - /** - * Build a TSURL by extracting the content of the received SURL object and - * storing the received raw surl string - * - * @param surl - * @param rawSurl - * @return - * @throws InvalidTSURLAttributesException - */ + /** + * Build a TSURL by extracting the content of the received SURL object and storing the received + * raw surl string + * + * @param surl + * @param rawSurl + * @return + * @throws InvalidTSURLAttributesException + */ public static TSURL getWellFormed(SURL surl, String rawSurl) - throws InvalidTSURLAttributesException { + throws InvalidTSURLAttributesException { TSURL result; SFN sfn; @@ -157,14 +149,12 @@ public static TSURL getWellFormed(SURL surl, String rawSurl) serviceEndpoint = EndPoint.make(serviceEndPointString); log.debug("EndPoint: {}", serviceEndpoint); - } if (surl.getServiceHostPort() > -1) { port = Port.make(surl.getServiceHostPort()); log.debug("Port: {}", port); - } if (port != null) { @@ -214,351 +204,324 @@ public static TSURL getWellFormed(SURL surl, String rawSurl) log.error("Error building SFN: {}", e.getMessage(), e); throw new InvalidTSURLAttributesException(null, null); } + } + + /** + * Static factory method that returns a TSURL and that requires the SiteProtocol and the SFN of + * this TSURL: if any is null or empty an InvalidTSURLAttributesException is thrown. Check for + * ".." in Storage File Name for security issues. + */ + private static TSURL make(SiteProtocol sp, SFN sfn, String userSurl) + throws InvalidTSURLAttributesException { + + if ((sp == null) || (sfn == null) || (sp == SiteProtocol.EMPTY) || sfn.isEmpty()) { + throw new InvalidTSURLAttributesException(sp, sfn); + } + return new TSURL(sp, sfn, userSurl, false); + } + /** Static factory method that returns an empty TSURL. */ + public static TSURL makeEmpty() { + + return new TSURL(SiteProtocol.EMPTY, SFN.makeEmpty(), "", true); } - /** - * Static factory method that returns a TSURL and that requires the - * SiteProtocol and the SFN of this TSURL: if any is null or empty an - * InvalidTSURLAttributesException is thrown. Check for ".." in Storage File - * Name for security issues. - */ - private static TSURL make(SiteProtocol sp, SFN sfn, String userSurl) - throws InvalidTSURLAttributesException { - - if ((sp == null) || (sfn == null) || (sp == SiteProtocol.EMPTY) - || sfn.isEmpty()) { - throw new InvalidTSURLAttributesException(sp, sfn); - } - return new TSURL(sp, sfn, userSurl, false); - } - - /** - * Static factory method that returns an empty TSURL. - */ - public static TSURL makeEmpty() { - - return new TSURL(SiteProtocol.EMPTY, SFN.makeEmpty(), "", true); - } - - /** - * Static factory method that returns a TSURL from a String representation: if - * it is null or malformed then an Invalid TSURLAttributesException is thrown. - */ - public static TSURL makeFromStringWellFormed(String surlString) - throws InvalidTSURLAttributesException { - - TSURL result = null; - if (surlString == null) { - throw new InvalidTSURLAttributesException(null, null); - } - // first occurrences of :// - int separator = surlString.indexOf("://"); - if ((separator == -1) || (separator == 0)) { - // separator not found or right at the beginning! - throw new InvalidTSURLAttributesException(null, null); - } - String spString = surlString.substring(0, separator); - SiteProtocol sp = null; - try { - sp = SiteProtocol.fromString(spString); - } catch (IllegalArgumentException e) { - // do nothing - sp remains null and that is fine! - log.warn("TSURL: Site protocol by {} is empty, but that's fine.", - spString); - } - if ((separator + 3) > (surlString.length())) { - // separator found at the end! - throw new InvalidTSURLAttributesException(sp, null); - } - - log.debug("SURL string: {}", surlString); - SURL surl; - - try { - surl = SURL.makeSURLfromString(surlString); - } catch (NamespaceException ex) { - log.error("Invalid surl: {}", surlString, ex); - throw new InvalidTSURLAttributesException(null, null); - } - - result = getWellFormed(surl, surlString); - - return result; - } - - /** - * Static factory method that returns a TSURL from a String representation: if - * it is null or malformed then an Invalid TSURLAttributesException is thrown. - */ - public static TSURL makeFromStringValidate(String surlString) - throws InvalidTSURLAttributesException { - - TSURL tsurl = makeFromStringWellFormed(surlString); - - if (!isValid(tsurl)) { - - log.warn("SURL {} is not managed by this StoRM instance.", tsurl); - throw new InvalidTSURLAttributesException(tsurl.sp, tsurl.sfn()); - - } - return tsurl; - } - - /** - * Auxiliary method that returns true if the supplied TSURL corresponds to - * some managed SURL as declared in Configuration. - * - */ - public static boolean isValid(TSURL surl) { - return isManaged(surl, TSURL.tsurlManaged); - } - - public static boolean isManaged(TSURL surl, List managedSurls) { - - boolean result = false; - for (TSURL tsurlReference : managedSurls) { - if (isSURLManaged(surl, tsurlReference)) { - result = true; - break; - } - } - return result; - } - - /** - * A SURL is managed by a managed SURL if their hosts are the same and if the - * comingSURL specifies a port this port is the same as the one specified on - * the managed SURL or, if the managed SURL doesn't specifies a port this port - * is listed in the default ports - * - * @param comingSURL - * @param managedSURL - * @return - */ - private static boolean isSURLManaged(TSURL comingSURL, TSURL managedSURL) { - - boolean result = false; - String serviceHost = comingSURL.sfn().machine().toString(); - String expectedServiceHost = managedSURL.sfn().machine().toString(); - - log.debug("SURL VALID [ coming-service-host = {}, expected = {} ]", - serviceHost, expectedServiceHost); - - if (comingSURL.sfn().port().isEmpty()) { - - if (serviceHost.equalsIgnoreCase(expectedServiceHost)) { - result = true; - } - } else { - - if (!managedSURL.sfn().port().isEmpty()) { - - int expectedServicePort = managedSURL.sfn().port().toInt(); - int port = comingSURL.sfn().port().toInt(); - - log.debug("SURL VALID [ coming-service-port = {}, expected = {} ]", - port, expectedServicePort); - - if ((serviceHost.equalsIgnoreCase(expectedServiceHost)) - && (expectedServicePort == port)) { - result = true; - } - } else { - int port = comingSURL.sfn().port().toInt(); - try { - Port comingPort = Port.make(port); - if ((serviceHost.equalsIgnoreCase(expectedServiceHost)) - && (defaultPorts.contains(comingPort))) { - result = true; - } - } catch (InvalidPortAttributeException e) { - log.error("Invalid surl: {}", comingSURL, e); - } - } - } - return result; - } - - public void encode(Map param, String name) { - - param.put(name, toString()); - } - - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result + (empty ? 1231 : 1237); - result = prime * result - + ((normalizedStFN() == null) ? 0 : normalizedStFN().hashCode()); - result = prime * result + ((rawSurl == null) ? 0 : rawSurl.hashCode()); - result = prime * result + ((sfn() == null) ? 0 : sfn().hashCode()); - result = prime * result - + ((protocol() == null) ? 0 : protocol().hashCode()); - result = prime * result + uniqueId(); - return result; - } - - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TSURL other = (TSURL) obj; - if (empty != other.empty) { - return false; - } - if (normalizedStFN() == null) { - if (other.normalizedStFN() != null) { - return false; - } - } else if (!normalizedStFN().equals(other.normalizedStFN())) { - return false; - } - if (rawSurl == null) { - if (other.rawSurl != null) { - return false; - } - } else if (!rawSurl.equals(other.rawSurl)) { - return false; - } - if (sfn() == null) { - if (other.sfn() != null) { - return false; - } - } else if (!sfn().equals(other.sfn())) { - return false; - } - if (protocol() == null) { - if (other.protocol() != null) { - return false; - } - } else if (!protocol().equals(other.protocol())) { - return false; - } - if (uniqueId() != other.uniqueId()) { - return false; - } - return true; - } - - /** - * Returns a string representation of the SURL. - * - * @return String - */ - public String getSURLString() { - - if (empty) { - return EMPTY_STRING; - } - - StringBuilder builder = new StringBuilder(); - builder.append(sp); - builder.append("://"); - builder.append(sfn); - - return builder.toString(); - } - - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns the SiteProtocol of this TSURL. If this is empty, then - * an empty SiteProtocol is returned. - */ - public SiteProtocol protocol() { - - if (empty) { - return SiteProtocol.EMPTY; - } - return sp; - } - - /** - * @return the rawSurl - */ - public String rawSurl() { - - return rawSurl; - } - - /** - * Method that returns the SFN of this SURL. If this is empty, then an empty - * SFN is returned. - */ - public SFN sfn() { - - if (empty) { - return SFN.makeEmpty(); - } - return sfn; - } - - /** - * @return - */ - public String normalizedStFN() { - - if (this.normalizedStFN == null) { - this.normalizedStFN = this.sfn.stfn().toString(); - } - return this.normalizedStFN; - } - - /** - * @param normalizedStFN - * the normalizedStFN to set - */ - public void setNormalizedStFN(String normalizedStFN) { - - this.normalizedStFN = normalizedStFN; - } - - /** - * @param uniqueID - * the uniqueID to set - */ - public void setUniqueID(int uniqueID) { - - this.uniqueID = uniqueID; - } - - /** - * @return - */ - public int uniqueId() { - - if (this.uniqueID == 0) { - this.uniqueID = this.sfn.stfn().hashCode(); - } - return this.uniqueID; - } - - @Override - public String toString() { - - if (empty) { - return "Empty TSURL"; - } - - StringBuilder builder = new StringBuilder(); - builder.append(sp); - builder.append("://"); - builder.append(sfn); - - return builder.toString(); - } + /** + * Static factory method that returns a TSURL from a String representation: if it is null or + * malformed then an Invalid TSURLAttributesException is thrown. + */ + public static TSURL makeFromStringWellFormed(String surlString) + throws InvalidTSURLAttributesException { + + TSURL result = null; + if (surlString == null) { + throw new InvalidTSURLAttributesException(null, null); + } + // first occurrences of :// + int separator = surlString.indexOf("://"); + if ((separator == -1) || (separator == 0)) { + // separator not found or right at the beginning! + throw new InvalidTSURLAttributesException(null, null); + } + String spString = surlString.substring(0, separator); + SiteProtocol sp = null; + try { + sp = SiteProtocol.fromString(spString); + } catch (IllegalArgumentException e) { + // do nothing - sp remains null and that is fine! + log.warn("TSURL: Site protocol by {} is empty, but that's fine.", spString); + } + if ((separator + 3) > (surlString.length())) { + // separator found at the end! + throw new InvalidTSURLAttributesException(sp, null); + } + + log.debug("SURL string: {}", surlString); + SURL surl; + + try { + surl = SURL.makeSURLfromString(surlString); + } catch (NamespaceException ex) { + log.error("Invalid surl: {}", surlString, ex); + throw new InvalidTSURLAttributesException(null, null); + } + + result = getWellFormed(surl, surlString); + + return result; + } + + /** + * Static factory method that returns a TSURL from a String representation: if it is null or + * malformed then an Invalid TSURLAttributesException is thrown. + */ + public static TSURL makeFromStringValidate(String surlString) + throws InvalidTSURLAttributesException { + + TSURL tsurl = makeFromStringWellFormed(surlString); + + if (!isValid(tsurl)) { + + log.warn("SURL {} is not managed by this StoRM instance.", tsurl); + throw new InvalidTSURLAttributesException(tsurl.sp, tsurl.sfn()); + } + return tsurl; + } + + /** + * Auxiliary method that returns true if the supplied TSURL corresponds to some managed SURL as + * declared in Configuration. + */ + public static boolean isValid(TSURL surl) { + return isManaged(surl, TSURL.tsurlManaged); + } + + public static boolean isManaged(TSURL surl, List managedSurls) { + + boolean result = false; + for (TSURL tsurlReference : managedSurls) { + if (isSURLManaged(surl, tsurlReference)) { + result = true; + break; + } + } + return result; + } + + /** + * A SURL is managed by a managed SURL if their hosts are the same and if the comingSURL specifies + * a port this port is the same as the one specified on the managed SURL or, if the managed SURL + * doesn't specifies a port this port is listed in the default ports + * + * @param comingSURL + * @param managedSURL + * @return + */ + private static boolean isSURLManaged(TSURL comingSURL, TSURL managedSURL) { + + boolean result = false; + String serviceHost = comingSURL.sfn().machine().toString(); + String expectedServiceHost = managedSURL.sfn().machine().toString(); + + log.debug( + "SURL VALID [ coming-service-host = {}, expected = {} ]", serviceHost, expectedServiceHost); + + if (comingSURL.sfn().port().isEmpty()) { + + if (serviceHost.equalsIgnoreCase(expectedServiceHost)) { + result = true; + } + } else { + + if (!managedSURL.sfn().port().isEmpty()) { + + int expectedServicePort = managedSURL.sfn().port().toInt(); + int port = comingSURL.sfn().port().toInt(); + + log.debug( + "SURL VALID [ coming-service-port = {}, expected = {} ]", port, expectedServicePort); + + if ((serviceHost.equalsIgnoreCase(expectedServiceHost)) && (expectedServicePort == port)) { + result = true; + } + } else { + int port = comingSURL.sfn().port().toInt(); + try { + Port comingPort = Port.make(port); + if ((serviceHost.equalsIgnoreCase(expectedServiceHost)) + && (defaultPorts.contains(comingPort))) { + result = true; + } + } catch (InvalidPortAttributeException e) { + log.error("Invalid surl: {}", comingSURL, e); + } + } + } + return result; + } + + public void encode(Map param, String name) { + + param.put(name, toString()); + } + + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + (empty ? 1231 : 1237); + result = prime * result + ((normalizedStFN() == null) ? 0 : normalizedStFN().hashCode()); + result = prime * result + ((rawSurl == null) ? 0 : rawSurl.hashCode()); + result = prime * result + ((sfn() == null) ? 0 : sfn().hashCode()); + result = prime * result + ((protocol() == null) ? 0 : protocol().hashCode()); + result = prime * result + uniqueId(); + return result; + } + + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TSURL other = (TSURL) obj; + if (empty != other.empty) { + return false; + } + if (normalizedStFN() == null) { + if (other.normalizedStFN() != null) { + return false; + } + } else if (!normalizedStFN().equals(other.normalizedStFN())) { + return false; + } + if (rawSurl == null) { + if (other.rawSurl != null) { + return false; + } + } else if (!rawSurl.equals(other.rawSurl)) { + return false; + } + if (sfn() == null) { + if (other.sfn() != null) { + return false; + } + } else if (!sfn().equals(other.sfn())) { + return false; + } + if (protocol() == null) { + if (other.protocol() != null) { + return false; + } + } else if (!protocol().equals(other.protocol())) { + return false; + } + if (uniqueId() != other.uniqueId()) { + return false; + } + return true; + } + + /** + * Returns a string representation of the SURL. + * + * @return String + */ + public String getSURLString() { + + if (empty) { + return EMPTY_STRING; + } + + StringBuilder builder = new StringBuilder(); + builder.append(sp); + builder.append("://"); + builder.append(sfn); + + return builder.toString(); + } + + public boolean isEmpty() { + + return empty; + } + + /** + * Method that returns the SiteProtocol of this TSURL. If this is empty, then an empty + * SiteProtocol is returned. + */ + public SiteProtocol protocol() { + + if (empty) { + return SiteProtocol.EMPTY; + } + return sp; + } + + /** @return the rawSurl */ + public String rawSurl() { + + return rawSurl; + } + + /** Method that returns the SFN of this SURL. If this is empty, then an empty SFN is returned. */ + public SFN sfn() { + + if (empty) { + return SFN.makeEmpty(); + } + return sfn; + } + + /** @return */ + public String normalizedStFN() { + + if (this.normalizedStFN == null) { + this.normalizedStFN = this.sfn.stfn().toString(); + } + return this.normalizedStFN; + } + + /** @param normalizedStFN the normalizedStFN to set */ + public void setNormalizedStFN(String normalizedStFN) { + + this.normalizedStFN = normalizedStFN; + } + + /** @param uniqueID the uniqueID to set */ + public void setUniqueID(int uniqueID) { + + this.uniqueID = uniqueID; + } + + /** @return */ + public int uniqueId() { + + if (this.uniqueID == 0) { + this.uniqueID = this.sfn.stfn().hashCode(); + } + return this.uniqueID; + } + + @Override + public String toString() { + + if (empty) { + return "Empty TSURL"; + } + + StringBuilder builder = new StringBuilder(); + builder.append(sp); + builder.append("://"); + builder.append(sfn); + + return builder.toString(); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSURLInfo.java b/src/main/java/it/grid/storm/srm/types/TSURLInfo.java index 4f61e30f..751afe45 100644 --- a/src/main/java/it/grid/storm/srm/types/TSURLInfo.java +++ b/src/main/java/it/grid/storm/srm/types/TSURLInfo.java @@ -1,66 +1,51 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the TSURLInfo data associated with the SRM request, - * that is it contains info about: TSURL , StorageSystemInfo * @author Magnoni - * Luca - * + * This class represents the TSURLInfo data associated with the SRM request, that is it contains + * info about: TSURL , StorageSystemInfo * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.srm.types; public class TSURLInfo { - private TSURL surl = null; - private TStorageSystemInfo systemInfo = null; - - public TSURLInfo() { - - } - - public TSURLInfo(TSURL surl, TStorageSystemInfo info) - throws InvalidTSURLInfoAttributeException { + private TSURL surl = null; + private TStorageSystemInfo systemInfo = null; - boolean ok = (!(surl == null)); - if (!ok) - throw new InvalidTSURLInfoAttributeException(surl); - this.surl = surl; - this.systemInfo = info; - } + public TSURLInfo() {} - /** - * Method that return SURL specified in SRM request. - */ + public TSURLInfo(TSURL surl, TStorageSystemInfo info) throws InvalidTSURLInfoAttributeException { - public TSURL getSurl() { + boolean ok = (!(surl == null)); + if (!ok) throw new InvalidTSURLInfoAttributeException(surl); + this.surl = surl; + this.systemInfo = info; + } - return surl; - } + /** Method that return SURL specified in SRM request. */ + public TSURL getSurl() { - public void setSurl(TSURL surl) { + return surl; + } - this.surl = surl; - } + public void setSurl(TSURL surl) { - /** - * Set StorageSystemInfo - */ - public void setInfo(TStorageSystemInfo info) { + this.surl = surl; + } - this.systemInfo = info; - } + /** Set StorageSystemInfo */ + public void setInfo(TStorageSystemInfo info) { - /** - * Get StorageSystemInfo - */ - public TStorageSystemInfo getInfo() { + this.systemInfo = info; + } - return this.systemInfo; - } + /** Get StorageSystemInfo */ + public TStorageSystemInfo getInfo() { + return this.systemInfo; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSURLLifetimeReturnStatus.java b/src/main/java/it/grid/storm/srm/types/TSURLLifetimeReturnStatus.java index 6c247e2d..7f58293c 100644 --- a/src/main/java/it/grid/storm/srm/types/TSURLLifetimeReturnStatus.java +++ b/src/main/java/it/grid/storm/srm/types/TSURLLifetimeReturnStatus.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the TSURLLifetimeReturnStatus data associated with the - * SRM request. - * + * This class represents the TSURLLifetimeReturnStatus data associated with the SRM request. + * * @author Alberto Forti * @author CNAF-INFN Bologna * @date Dec 2006 @@ -19,132 +17,125 @@ public class TSURLLifetimeReturnStatus { - private TSURL surl = null; - private TReturnStatus returnStatus = null; - private TLifeTimeInSeconds fileLifetime = null; - private TLifeTimeInSeconds pinLifetime = null; - - public TSURLLifetimeReturnStatus() { - - } - - public TSURLLifetimeReturnStatus(TSURL surl, TReturnStatus status, - TLifeTimeInSeconds fileLifetime, TLifeTimeInSeconds pinLifetime) - throws InvalidTSURLLifetimeReturnStatusAttributeException { - - boolean ok = (surl != null); - if (!ok) - throw new InvalidTSURLLifetimeReturnStatusAttributeException(surl); - this.surl = surl; - this.returnStatus = status; - this.fileLifetime = fileLifetime; - this.pinLifetime = pinLifetime; - } - - /** - * Returns the SURL. - * - * @return TSURL - */ - public TSURL getSurl() { - - return surl; - } - - /** - * Sets the SURL - * - * @param surl - * TSURL - */ - public void setSurl(TSURL surl) { - - this.surl = surl; - } - - /** - * Set the status. - * - * @param status - * TReturnStatus - */ - public void setStatus(TReturnStatus status) { - - this.returnStatus = status; - } - - /** - * Get the status. - * - * @return TReturnStatus - */ - public TReturnStatus getStatus() { - - return this.returnStatus; - } - - /** - * Get fileLifetime. - * - * @return TLifeTimeInSeconds - */ - public TLifeTimeInSeconds getFileLifetime() { - - return this.fileLifetime; - } - - /** - * Set fileLifetime. - * - * @param fileLifetime - */ - public void setFileLifetime(TLifeTimeInSeconds fileLifetime) { - - this.fileLifetime = fileLifetime; - } - - /** - * Get pinLifetime. - * - * @return TLifeTimeInSeconds - */ - public TLifeTimeInSeconds getpinLifetime() { - - return this.pinLifetime; - } - - /** - * Set pinLifetime. - * - * @param fileLifetime - */ - public void setpinLifetime(TLifeTimeInSeconds pinLifetime) { - - this.pinLifetime = pinLifetime; - } - - /** - * Add an element to 'outputVector'. The element is a Hashtable structure of - * this instance of TSURLLifetimeReturnStatus (used to comunicate with the - * FE). - * - * @param outputVector - * Vector - */ - public void encode(List outputVector) { - - Map surlRetStatusParam = new HashMap(); - if (this.surl != null) - this.surl.encode(surlRetStatusParam, TSURL.PNAME_SURL); - if (this.returnStatus != null) - this.returnStatus.encode(surlRetStatusParam, TReturnStatus.PNAME_STATUS); - if (this.fileLifetime != null) - this.fileLifetime.encode(surlRetStatusParam, - TLifeTimeInSeconds.PNAME_FILELIFETIME); - if (this.pinLifetime != null) - this.pinLifetime.encode(surlRetStatusParam, - TLifeTimeInSeconds.PNAME_PINLIFETIME); - - outputVector.add(surlRetStatusParam); - } + private TSURL surl = null; + private TReturnStatus returnStatus = null; + private TLifeTimeInSeconds fileLifetime = null; + private TLifeTimeInSeconds pinLifetime = null; + + public TSURLLifetimeReturnStatus() {} + + public TSURLLifetimeReturnStatus( + TSURL surl, + TReturnStatus status, + TLifeTimeInSeconds fileLifetime, + TLifeTimeInSeconds pinLifetime) + throws InvalidTSURLLifetimeReturnStatusAttributeException { + + boolean ok = (surl != null); + if (!ok) throw new InvalidTSURLLifetimeReturnStatusAttributeException(surl); + this.surl = surl; + this.returnStatus = status; + this.fileLifetime = fileLifetime; + this.pinLifetime = pinLifetime; + } + + /** + * Returns the SURL. + * + * @return TSURL + */ + public TSURL getSurl() { + + return surl; + } + + /** + * Sets the SURL + * + * @param surl TSURL + */ + public void setSurl(TSURL surl) { + + this.surl = surl; + } + + /** + * Set the status. + * + * @param status TReturnStatus + */ + public void setStatus(TReturnStatus status) { + + this.returnStatus = status; + } + + /** + * Get the status. + * + * @return TReturnStatus + */ + public TReturnStatus getStatus() { + + return this.returnStatus; + } + + /** + * Get fileLifetime. + * + * @return TLifeTimeInSeconds + */ + public TLifeTimeInSeconds getFileLifetime() { + + return this.fileLifetime; + } + + /** + * Set fileLifetime. + * + * @param fileLifetime + */ + public void setFileLifetime(TLifeTimeInSeconds fileLifetime) { + + this.fileLifetime = fileLifetime; + } + + /** + * Get pinLifetime. + * + * @return TLifeTimeInSeconds + */ + public TLifeTimeInSeconds getpinLifetime() { + + return this.pinLifetime; + } + + /** + * Set pinLifetime. + * + * @param fileLifetime + */ + public void setpinLifetime(TLifeTimeInSeconds pinLifetime) { + + this.pinLifetime = pinLifetime; + } + + /** + * Add an element to 'outputVector'. The element is a Hashtable structure of this instance of + * TSURLLifetimeReturnStatus (used to comunicate with the FE). + * + * @param outputVector Vector + */ + public void encode(List outputVector) { + + Map surlRetStatusParam = new HashMap(); + if (this.surl != null) this.surl.encode(surlRetStatusParam, TSURL.PNAME_SURL); + if (this.returnStatus != null) + this.returnStatus.encode(surlRetStatusParam, TReturnStatus.PNAME_STATUS); + if (this.fileLifetime != null) + this.fileLifetime.encode(surlRetStatusParam, TLifeTimeInSeconds.PNAME_FILELIFETIME); + if (this.pinLifetime != null) + this.pinLifetime.encode(surlRetStatusParam, TLifeTimeInSeconds.PNAME_PINLIFETIME); + + outputVector.add(surlRetStatusParam); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSURLReturnStatus.java b/src/main/java/it/grid/storm/srm/types/TSURLReturnStatus.java index ca540a5f..2b35c0ad 100644 --- a/src/main/java/it/grid/storm/srm/types/TSURLReturnStatus.java +++ b/src/main/java/it/grid/storm/srm/types/TSURLReturnStatus.java @@ -1,12 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the TSURLReturnStatus data associated with the SRM - * request, that is it contains info about: TSURL , StorageSystemInfo * @author - * Magnoni Luca - * + * This class represents the TSURLReturnStatus data associated with the SRM request, that is it + * contains info about: TSURL , StorageSystemInfo * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -19,134 +17,120 @@ public class TSURLReturnStatus { - private TSURL surl = null; - private TReturnStatus returnStatus = null; - - public TSURLReturnStatus() { - - } - - public TSURLReturnStatus(TSURL surl, TReturnStatus status) { - - if (surl == null) - throw new IllegalArgumentException("SURL is null"); - this.surl = surl; - this.returnStatus = status; - } - - /** - * Method that return SURL specified in SRM request. - */ - - public TSURL getSurl() { - - return surl; - } - - public void setSurl(TSURL surl) { - - this.surl = surl; - } - - /** - * Set Status - */ - public void setStatus(TReturnStatus status) { - - this.returnStatus = status; - } - - /** - * Get Status - */ - public TReturnStatus getStatus() { - - return this.returnStatus; - } - - /* - * Encode function used to fill output structure for FE communication. - */ - public void encode(List outputVector) { - - // Creation of a single TMetaPathDetail struct - Map surlRetStatusParam = new HashMap(); - // Member name "surl" - if (this.surl != null) - this.surl.encode(surlRetStatusParam, TSURL.PNAME_SURL); - if (this.returnStatus != null) - this.returnStatus.encode(surlRetStatusParam, TReturnStatus.PNAME_STATUS); - - outputVector.add(surlRetStatusParam); - - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("TSURLReturnStatus [surl="); - builder.append(surl); - builder.append(", returnStatus="); - builder.append(returnStatus); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result - + ((returnStatus == null) ? 0 : returnStatus.hashCode()); - result = prime * result + ((surl == null) ? 0 : surl.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TSURLReturnStatus other = (TSURLReturnStatus) obj; - if (returnStatus == null) { - if (other.returnStatus != null) { - return false; - } - } else if (!returnStatus.equals(other.returnStatus)) { - return false; - } - if (surl == null) { - if (other.surl != null) { - return false; - } - } else if (!surl.equals(other.surl)) { - return false; - } - return true; - } - + private TSURL surl = null; + private TReturnStatus returnStatus = null; + + public TSURLReturnStatus() {} + + public TSURLReturnStatus(TSURL surl, TReturnStatus status) { + + if (surl == null) throw new IllegalArgumentException("SURL is null"); + this.surl = surl; + this.returnStatus = status; + } + + /** Method that return SURL specified in SRM request. */ + public TSURL getSurl() { + + return surl; + } + + public void setSurl(TSURL surl) { + + this.surl = surl; + } + + /** Set Status */ + public void setStatus(TReturnStatus status) { + + this.returnStatus = status; + } + + /** Get Status */ + public TReturnStatus getStatus() { + + return this.returnStatus; + } + + /* + * Encode function used to fill output structure for FE communication. + */ + public void encode(List outputVector) { + + // Creation of a single TMetaPathDetail struct + Map surlRetStatusParam = new HashMap(); + // Member name "surl" + if (this.surl != null) this.surl.encode(surlRetStatusParam, TSURL.PNAME_SURL); + if (this.returnStatus != null) + this.returnStatus.encode(surlRetStatusParam, TReturnStatus.PNAME_STATUS); + + outputVector.add(surlRetStatusParam); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("TSURLReturnStatus [surl="); + builder.append(surl); + builder.append(", returnStatus="); + builder.append(returnStatus); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((returnStatus == null) ? 0 : returnStatus.hashCode()); + result = prime * result + ((surl == null) ? 0 : surl.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TSURLReturnStatus other = (TSURLReturnStatus) obj; + if (returnStatus == null) { + if (other.returnStatus != null) { + return false; + } + } else if (!returnStatus.equals(other.returnStatus)) { + return false; + } + if (surl == null) { + if (other.surl != null) { + return false; + } + } else if (!surl.equals(other.surl)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSizeInBytes.java b/src/main/java/it/grid/storm/srm/types/TSizeInBytes.java index 70f5bab9..33d4ac7f 100644 --- a/src/main/java/it/grid/storm/srm/types/TSizeInBytes.java +++ b/src/main/java/it/grid/storm/srm/types/TSizeInBytes.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the file size as a long and contains a unit of measure - * + * * @author Ezio Corso - Magnoni Luca * @author EGRID ICTP Trieste / CNAF INFN Bologna * @date Avril, 2005 @@ -13,178 +12,152 @@ package it.grid.storm.srm.types; import it.grid.storm.common.types.SizeUnit; - import java.io.Serializable; import java.util.Map; public class TSizeInBytes implements Serializable { - public static String PNAME_SIZE = "size"; - public static String PNAME_DESIREDSIZEOFTOTALSPACE = "desiredSizeOfTotalSpace"; - public static String PNAME_DESIREDSIZEOFGUARANTEEDSPACE = "desiredSizeOfGuaranteedSpace"; - public static String PNAME_SIZEOFTOTALRESERVEDSPACE = "sizeOfTotalReservedSpace"; - public static String PNAME_SIZEOFGUARANTEEDRESERVEDSPACE = "sizeOfGuaranteedReservedSpace"; - public static String PNAME_TOTALSIZE = "totalSize"; - public static String PNAME_GUARANTEEDSIZE = "guaranteedSize"; - public static String PNAME_UNUSEDSIZE = "unusedSize"; - - private long size = -1; - private SizeUnit unit = SizeUnit.EMPTY; - private boolean empty = true; - static private TSizeInBytes emptySize = null; // only instance of empty - // TSizeInBytes! - - /** - * Constructor requiring the size as a long, and the unit of measure SizeUnit. - */ - private TSizeInBytes(long size, SizeUnit unit, boolean empty) { - - this.size = size; - this.unit = unit; - this.empty = empty; - } - - /** - * Factory method that returns a TSizeInBytes object; an - * InvalidTSizeAttributesException is thrown if a null SizeUnit is passed, or - * if a negative long is passed as size. - */ - public static TSizeInBytes make(long size, SizeUnit unit) - throws InvalidTSizeAttributesException { - - if ((unit == null) || (size < 0)) - throw new InvalidTSizeAttributesException(size, unit); - return new TSizeInBytes(size, unit, false); - } - - /** - * Method that returns an empty TSizeInBytes object. - */ - public static TSizeInBytes makeEmpty() { - - if (emptySize != null) - return emptySize; - emptySize = new TSizeInBytes(-1, SizeUnit.EMPTY, true); - return emptySize; - } - - /** - * Method that returns a TSizeInBytes object retrieving its value by the - * Hashtable used for comunicating with the FE - */ - public static TSizeInBytes decode(Map inputParam, String fieldName) { - - String size = (String) inputParam.get(fieldName); - - if (size == null) - return TSizeInBytes.makeEmpty(); - long sizeLong = Long.parseLong(size); - - TSizeInBytes decodedSize = null; - try { - decodedSize = TSizeInBytes.make(sizeLong, SizeUnit.BYTES); - } catch (InvalidTSizeAttributesException e) { - return TSizeInBytes.makeEmpty(); - } - - return decodedSize; - } - - /** - * Method that converts this FileSize to the specified SizeUnit; beware that - * converting back will _not_ satisfy equality because of rounding in - * calculation. In case the wanted unit is null, or this TSizeInBytes is - * empty, -1 is returned. - */ - public double getSizeIn(SizeUnit unit) { - - if ((unit != null) && (!empty)) { - Long l_size = Long.valueOf(size); - double result = l_size.doubleValue() - * (this.unit.conversionFactor() / unit.conversionFactor()); - return result; - } else - return -1; - } - - /** - * Method that returns a long that represents the value with which this - * TSizeInBytes was created. In case this is empty, -1 is returned. - */ - public long value() { - - if (empty) - return -1; - return size; - } - - /** - * Method that returns the SizeUnit with which this TSizeInBytes was created. - * In case this is empty, SizeUnit.EMPTY is returned. - */ - public SizeUnit unit() { - - if (empty) - return SizeUnit.EMPTY; - return unit; - } - - /** - * Method that returns whether this is an empty TSizeInBytes. - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method uses to encode value for FE communication. - */ - public void encode(Map param, String fieldName) { - - if (empty) - return; - - long size_out; - Long sizeInBytes = Long.valueOf(this.value()); - if (sizeInBytes != null) - size_out = sizeInBytes.longValue(); - else - size_out = -1; - - param.put(fieldName, String.valueOf(size_out)); - } - - public String toString() { - - if (empty) - return "Empty"; - return size + " " + unit; - } - - /** - * Beware that this equality will _not_ return true for the same quantity - * expressed in different units of measure! - */ - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof TSizeInBytes)) - return false; - TSizeInBytes fs = (TSizeInBytes) o; - if ((empty) && (fs.empty)) - return true; - return ((!empty) && (!fs.empty) && (this.size == fs.size) && (this.unit == fs.unit)); - } - - public int hashCode() { - - if (empty) - return 0; - int hash = 17; - hash = 37 * hash + (Long.valueOf(size)).hashCode(); - hash = 37 * hash + unit.hashCode(); - return hash; - } + public static String PNAME_SIZE = "size"; + public static String PNAME_DESIREDSIZEOFTOTALSPACE = "desiredSizeOfTotalSpace"; + public static String PNAME_DESIREDSIZEOFGUARANTEEDSPACE = "desiredSizeOfGuaranteedSpace"; + public static String PNAME_SIZEOFTOTALRESERVEDSPACE = "sizeOfTotalReservedSpace"; + public static String PNAME_SIZEOFGUARANTEEDRESERVEDSPACE = "sizeOfGuaranteedReservedSpace"; + public static String PNAME_TOTALSIZE = "totalSize"; + public static String PNAME_GUARANTEEDSIZE = "guaranteedSize"; + public static String PNAME_UNUSEDSIZE = "unusedSize"; + + private long size = -1; + private SizeUnit unit = SizeUnit.EMPTY; + private boolean empty = true; + private static TSizeInBytes emptySize = null; // only instance of empty + // TSizeInBytes! + + /** Constructor requiring the size as a long, and the unit of measure SizeUnit. */ + private TSizeInBytes(long size, SizeUnit unit, boolean empty) { + + this.size = size; + this.unit = unit; + this.empty = empty; + } + + /** + * Factory method that returns a TSizeInBytes object; an InvalidTSizeAttributesException is thrown + * if a null SizeUnit is passed, or if a negative long is passed as size. + */ + public static TSizeInBytes make(long size, SizeUnit unit) throws InvalidTSizeAttributesException { + + if ((unit == null) || (size < 0)) throw new InvalidTSizeAttributesException(size, unit); + return new TSizeInBytes(size, unit, false); + } + + /** Method that returns an empty TSizeInBytes object. */ + public static TSizeInBytes makeEmpty() { + + if (emptySize != null) return emptySize; + emptySize = new TSizeInBytes(-1, SizeUnit.EMPTY, true); + return emptySize; + } + + /** + * Method that returns a TSizeInBytes object retrieving its value by the Hashtable used for + * comunicating with the FE + */ + public static TSizeInBytes decode(Map inputParam, String fieldName) { + + String size = (String) inputParam.get(fieldName); + + if (size == null) return TSizeInBytes.makeEmpty(); + long sizeLong = Long.parseLong(size); + + TSizeInBytes decodedSize = null; + try { + decodedSize = TSizeInBytes.make(sizeLong, SizeUnit.BYTES); + } catch (InvalidTSizeAttributesException e) { + return TSizeInBytes.makeEmpty(); + } + + return decodedSize; + } + + /** + * Method that converts this FileSize to the specified SizeUnit; beware that converting back will + * _not_ satisfy equality because of rounding in calculation. In case the wanted unit is null, or + * this TSizeInBytes is empty, -1 is returned. + */ + public double getSizeIn(SizeUnit unit) { + + if ((unit != null) && (!empty)) { + Long l_size = Long.valueOf(size); + double result = + l_size.doubleValue() * (this.unit.conversionFactor() / unit.conversionFactor()); + return result; + } else return -1; + } + + /** + * Method that returns a long that represents the value with which this TSizeInBytes was created. + * In case this is empty, -1 is returned. + */ + public long value() { + + if (empty) return -1; + return size; + } + + /** + * Method that returns the SizeUnit with which this TSizeInBytes was created. In case this is + * empty, SizeUnit.EMPTY is returned. + */ + public SizeUnit unit() { + + if (empty) return SizeUnit.EMPTY; + return unit; + } + + /** Method that returns whether this is an empty TSizeInBytes. */ + public boolean isEmpty() { + + return empty; + } + + /** Method uses to encode value for FE communication. */ + public void encode(Map param, String fieldName) { + + if (empty) return; + + long size_out; + Long sizeInBytes = Long.valueOf(this.value()); + if (sizeInBytes != null) size_out = sizeInBytes.longValue(); + else size_out = -1; + + param.put(fieldName, String.valueOf(size_out)); + } + + public String toString() { + + if (empty) return "Empty"; + return size + " " + unit; + } + + /** + * Beware that this equality will _not_ return true for the same quantity expressed in different + * units of measure! + */ + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof TSizeInBytes)) return false; + TSizeInBytes fs = (TSizeInBytes) o; + if ((empty) && (fs.empty)) return true; + return ((!empty) && (!fs.empty) && (this.size == fs.size) && (this.unit == fs.unit)); + } + + public int hashCode() { + + if (empty) return 0; + int hash = 17; + hash = 37 * hash + (Long.valueOf(size)).hashCode(); + hash = 37 * hash + unit.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSpaceToken.java b/src/main/java/it/grid/storm/srm/types/TSpaceToken.java index 752d464d..66bd16cb 100644 --- a/src/main/java/it/grid/storm/srm/types/TSpaceToken.java +++ b/src/main/java/it/grid/storm/srm/types/TSpaceToken.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents a Space Token - * + * * @author EGRID ICTP Trieste / CNAF Bologna * @date March 23rd, 2005 * @version 2.0 @@ -17,118 +16,104 @@ public class TSpaceToken implements Serializable { - /** - * - */ - private static final long serialVersionUID = 4511316534032776357L; - - public static String PNAME_SPACETOKEN = "spaceToken"; - - private String token = ""; // string representing the token! - private boolean empty = true; - - static private TSpaceToken emptyToken; // only instance of empty spaceToken - - private TSpaceToken(String token, boolean empty) { - - this.token = token; - this.empty = empty; - } - - /** - * Factory method thta requires a String; if it is null, an - * InvalidTSpaceTokenAttributeException is thrown. - */ - public static TSpaceToken make(String s) - throws InvalidTSpaceTokenAttributesException { - - if (s == null) - throw new InvalidTSpaceTokenAttributesException(); - return new TSpaceToken(s, false); - } - - /** - * Factory method that returns an Empty TSpaceToken - */ - public static TSpaceToken makeEmpty() { - - if (emptyToken != null) - return emptyToken; - emptyToken = new TSpaceToken("", true); - return emptyToken; - } - - public String getValue() { - - return token; - } - - public boolean isEmpty() { - - return empty; - } - - public String toString() { - - if (empty) - return "Empty"; - return token; - } - - public boolean equals(Object o) { - - if (o == this) - return true; - if (!(o instanceof TSpaceToken)) - return false; - TSpaceToken st = (TSpaceToken) o; - if ((empty) && (st.empty)) - return true; - return (!empty) && (!st.empty) && token.equals(st.token); - } - - public int hashCode() { - - if (empty) - return 0; - int hash = 17; - return 37 * hash + token.hashCode(); - } - - /** - * Decode method, used to represents this object into a structured parameter for - * FE communication. - * - * @param vector - */ - public final static TSpaceToken decode(Map param, String name) { - - String tokenString = (String) param.get(name); - TSpaceToken spaceToken = TSpaceToken.makeEmpty(); - if (tokenString != null) { - // Creation of srm TSpaceToken - try { - spaceToken = TSpaceToken.make(tokenString); - } catch (InvalidTSpaceTokenAttributesException e) { - ;// log.warn("Error creating TSpaceToken:"+e); - } - } - return spaceToken; - } - - /** - * Encode method, used to represents this object into a structured parameter for - * FE communication. - * - * @param vector - */ - public void encode(List list) { - - list.add(this.toString()); - } - - public void encode(Map outputParam, String fieldName) { - - outputParam.put(fieldName, (String) token); - } + /** */ + private static final long serialVersionUID = 4511316534032776357L; + + public static String PNAME_SPACETOKEN = "spaceToken"; + + private String token = ""; // string representing the token! + private boolean empty = true; + + private static TSpaceToken emptyToken; // only instance of empty spaceToken + + private TSpaceToken(String token, boolean empty) { + + this.token = token; + this.empty = empty; + } + + /** + * Factory method thta requires a String; if it is null, an InvalidTSpaceTokenAttributeException + * is thrown. + */ + public static TSpaceToken make(String s) throws InvalidTSpaceTokenAttributesException { + + if (s == null) throw new InvalidTSpaceTokenAttributesException(); + return new TSpaceToken(s, false); + } + + /** Factory method that returns an Empty TSpaceToken */ + public static TSpaceToken makeEmpty() { + + if (emptyToken != null) return emptyToken; + emptyToken = new TSpaceToken("", true); + return emptyToken; + } + + public String getValue() { + + return token; + } + + public boolean isEmpty() { + + return empty; + } + + public String toString() { + + if (empty) return "Empty"; + return token; + } + + public boolean equals(Object o) { + + if (o == this) return true; + if (!(o instanceof TSpaceToken)) return false; + TSpaceToken st = (TSpaceToken) o; + if ((empty) && (st.empty)) return true; + return (!empty) && (!st.empty) && token.equals(st.token); + } + + public int hashCode() { + + if (empty) return 0; + int hash = 17; + return 37 * hash + token.hashCode(); + } + + /** + * Decode method, used to represents this object into a structured parameter for FE communication. + * + * @param vector + */ + public static final TSpaceToken decode(Map param, String name) { + + String tokenString = (String) param.get(name); + TSpaceToken spaceToken = TSpaceToken.makeEmpty(); + if (tokenString != null) { + // Creation of srm TSpaceToken + try { + spaceToken = TSpaceToken.make(tokenString); + } catch ( + InvalidTSpaceTokenAttributesException e) {; // log.warn("Error creating TSpaceToken:"+e); + } + } + return spaceToken; + } + + /** + * Encode method, used to represents this object into a structured parameter for FE communication. + * + * @param vector + */ + public void encode(List list) { + + list.add(this.toString()); + } + + public void encode(Map outputParam, String fieldName) { + + outputParam.put(fieldName, (String) token); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TSpaceType.java b/src/main/java/it/grid/storm/srm/types/TSpaceType.java index be24b418..957f793a 100644 --- a/src/main/java/it/grid/storm/srm/types/TSpaceType.java +++ b/src/main/java/it/grid/storm/srm/types/TSpaceType.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TSpaceType of a Space Area managed by Srm. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 @@ -16,59 +15,52 @@ public class TSpaceType implements Serializable { - private String type = null; + private String type = null; - public static final TSpaceType VOLATILE = new TSpaceType("Volatile"); - public static final TSpaceType PERMANENT = new TSpaceType("Permanent"); - public static final TSpaceType DURABLE = new TSpaceType("Durable"); - public static final TSpaceType VOSPACE = new TSpaceType("VOSpace"); - public static final TSpaceType EMPTY = new TSpaceType("Empty"); + public static final TSpaceType VOLATILE = new TSpaceType("Volatile"); + public static final TSpaceType PERMANENT = new TSpaceType("Permanent"); + public static final TSpaceType DURABLE = new TSpaceType("Durable"); + public static final TSpaceType VOSPACE = new TSpaceType("VOSpace"); + public static final TSpaceType EMPTY = new TSpaceType("Empty"); - private TSpaceType(String type) { + private TSpaceType(String type) { - this.type = type; - } + this.type = type; + } - public boolean isEmpty() { + public boolean isEmpty() { - if (this == this.EMPTY) - return true; - else - return false; - } + if (this == this.EMPTY) return true; + else return false; + } - public String toString() { + public String toString() { - return type; - } + return type; + } - public String getValue() { + public String getValue() { - return type; - } + return type; + } - public static TSpaceType getTSpaceType(String type) { + public static TSpaceType getTSpaceType(String type) { - if (type == null) - return EMPTY; + if (type == null) return EMPTY; - if (type.toLowerCase().replaceAll(" ", "") - .equals(VOLATILE.getValue().toLowerCase())) { - return VOLATILE; - } - if (type.toLowerCase().replaceAll(" ", "") - .equals(PERMANENT.getValue().toLowerCase())) { - return PERMANENT; - } - if (type.toLowerCase().replaceAll(" ", "") - .equals(DURABLE.getValue().toLowerCase())) { - return DURABLE; - } - if (type.toLowerCase().replaceAll(" ", "") - .equals(VOSPACE.getValue().toLowerCase())) { - return VOSPACE; - } else { - return EMPTY; - } - } + if (type.toLowerCase().replaceAll(" ", "").equals(VOLATILE.getValue().toLowerCase())) { + return VOLATILE; + } + if (type.toLowerCase().replaceAll(" ", "").equals(PERMANENT.getValue().toLowerCase())) { + return PERMANENT; + } + if (type.toLowerCase().replaceAll(" ", "").equals(DURABLE.getValue().toLowerCase())) { + return DURABLE; + } + if (type.toLowerCase().replaceAll(" ", "").equals(VOSPACE.getValue().toLowerCase())) { + return VOSPACE; + } else { + return EMPTY; + } + } } diff --git a/src/main/java/it/grid/storm/srm/types/TStatusCode.java b/src/main/java/it/grid/storm/srm/types/TStatusCode.java index e4fc4717..12367c20 100644 --- a/src/main/java/it/grid/storm/srm/types/TStatusCode.java +++ b/src/main/java/it/grid/storm/srm/types/TStatusCode.java @@ -1,104 +1,128 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TStatusCode of TReturnStatus - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; import java.util.Collection; import java.util.LinkedList; public enum TStatusCode { - - EMPTY, SRM_SUCCESS, SRM_FAILURE, SRM_AUTHENTICATION_FAILURE, SRM_AUTHORIZATION_FAILURE, SRM_INVALID_REQUEST, SRM_INVALID_PATH, SRM_FILE_LIFETIME_EXPIRED, SRM_SPACE_LIFETIME_EXPIRED, SRM_EXCEED_ALLOCATION, SRM_NO_USER_SPACE, SRM_NO_FREE_SPACE, SRM_DUPLICATION_ERROR, SRM_NON_EMPTY_DIRECTORY, SRM_TOO_MANY_RESULTS, SRM_INTERNAL_ERROR, SRM_FATAL_INTERNAL_ERROR, SRM_NOT_SUPPORTED, SRM_REQUEST_QUEUED( - false), SRM_REQUEST_INPROGRESS(false), SRM_REQUEST_SUSPENDED(false), SRM_ABORTED, SRM_RELEASED, SRM_FILE_PINNED( - false), SRM_FILE_IN_CACHE(false), SRM_FILE_BUSY, SRM_SPACE_AVAILABLE(false), SRM_LOWER_SPACE_GRANTED, SRM_DONE, SRM_PARTIAL_SUCCESS, SRM_REQUEST_TIMED_OUT, SRM_LAST_COPY, SRM_FILE_LOST, SRM_FILE_UNAVAILABLE, SRM_CUSTOM_STATUS( - false); - - static { - SRM_FILE_PINNED.addIncompatibleStatus(SRM_REQUEST_SUSPENDED); - SRM_FILE_PINNED.addIncompatibleStatus(SRM_SPACE_AVAILABLE); - SRM_FILE_PINNED.addIncompatibleStatus(SRM_FILE_BUSY); - SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_REQUEST_SUSPENDED); - SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_FILE_PINNED); - SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_FILE_BUSY); - } - private final boolean finalStatus; - - private final LinkedList incompatibleStatuses = new LinkedList(); - - private TStatusCode(boolean isFinal) { - - this.finalStatus = isFinal; - } - - private void addIncompatibleStatus(TStatusCode incompatibleStatus) { - - incompatibleStatuses.add(incompatibleStatus); - } - - private TStatusCode() { - - this(true); - } - - public String getValue() { - - return this.name(); - } - - public boolean isFinalStatus() throws IllegalArgumentException { - - return finalStatus; - } - - /* - * For a given surl checks the compatibility of all associated statuses with - * the final status of the current operation (e.g.: SRM_FILE_PINNED for - * PtG/BoL or SRM_SPACE_AVAILABLE for PtP) - */ - public boolean isCompatibleWith(Collection statuses) { - - for (TReturnStatus status : statuses) { - if (!this.isCompatibleWith(status.getStatusCode())) { - return false; - } - } - return true; - } - - /* - * To be compatible with the final status for the invoked operation - * (PtP/PtG/BoL) the passed-in status must be different and it must belong to - * the list of compatible statuses or just be a final status itself. For the - * PtG operation the compatibility must be true even if the provided - * statuscode is equal to the final status of the operation (SRM_FILE_PINNED) - */ - public boolean isCompatibleWith(TStatusCode statusCode) { - - if (statusCode.finalStatus) { - return !finalStatus; - } - - if (this.incompatibleStatuses.contains(statusCode)) - return false; - else { - if (this.equals(statusCode)) { - if (statusCode.equals(SRM_FILE_PINNED)) - return true; - else - return false; - } else - return true; - } - } - + EMPTY, + SRM_SUCCESS, + SRM_FAILURE, + SRM_AUTHENTICATION_FAILURE, + SRM_AUTHORIZATION_FAILURE, + SRM_INVALID_REQUEST, + SRM_INVALID_PATH, + SRM_FILE_LIFETIME_EXPIRED, + SRM_SPACE_LIFETIME_EXPIRED, + SRM_EXCEED_ALLOCATION, + SRM_NO_USER_SPACE, + SRM_NO_FREE_SPACE, + SRM_DUPLICATION_ERROR, + SRM_NON_EMPTY_DIRECTORY, + SRM_TOO_MANY_RESULTS, + SRM_INTERNAL_ERROR, + SRM_FATAL_INTERNAL_ERROR, + SRM_NOT_SUPPORTED, + SRM_REQUEST_QUEUED(false), + SRM_REQUEST_INPROGRESS(false), + SRM_REQUEST_SUSPENDED(false), + SRM_ABORTED, + SRM_RELEASED, + SRM_FILE_PINNED(false), + SRM_FILE_IN_CACHE(false), + SRM_FILE_BUSY, + SRM_SPACE_AVAILABLE(false), + SRM_LOWER_SPACE_GRANTED, + SRM_DONE, + SRM_PARTIAL_SUCCESS, + SRM_REQUEST_TIMED_OUT, + SRM_LAST_COPY, + SRM_FILE_LOST, + SRM_FILE_UNAVAILABLE, + SRM_CUSTOM_STATUS(false); + + static { + SRM_FILE_PINNED.addIncompatibleStatus(SRM_REQUEST_SUSPENDED); + SRM_FILE_PINNED.addIncompatibleStatus(SRM_SPACE_AVAILABLE); + SRM_FILE_PINNED.addIncompatibleStatus(SRM_FILE_BUSY); + SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_REQUEST_SUSPENDED); + SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_FILE_PINNED); + SRM_SPACE_AVAILABLE.addIncompatibleStatus(SRM_FILE_BUSY); + } + + private final boolean finalStatus; + + private final LinkedList incompatibleStatuses = new LinkedList(); + + private TStatusCode(boolean isFinal) { + + this.finalStatus = isFinal; + } + + private void addIncompatibleStatus(TStatusCode incompatibleStatus) { + + incompatibleStatuses.add(incompatibleStatus); + } + + private TStatusCode() { + + this(true); + } + + public String getValue() { + + return this.name(); + } + + public boolean isFinalStatus() throws IllegalArgumentException { + + return finalStatus; + } + + /* + * For a given surl checks the compatibility of all associated statuses with + * the final status of the current operation (e.g.: SRM_FILE_PINNED for + * PtG/BoL or SRM_SPACE_AVAILABLE for PtP) + */ + public boolean isCompatibleWith(Collection statuses) { + + for (TReturnStatus status : statuses) { + if (!this.isCompatibleWith(status.getStatusCode())) { + return false; + } + } + return true; + } + + /* + * To be compatible with the final status for the invoked operation + * (PtP/PtG/BoL) the passed-in status must be different and it must belong to + * the list of compatible statuses or just be a final status itself. For the + * PtG operation the compatibility must be true even if the provided + * statuscode is equal to the final status of the operation (SRM_FILE_PINNED) + */ + public boolean isCompatibleWith(TStatusCode statusCode) { + + if (statusCode.finalStatus) { + return !finalStatus; + } + + if (this.incompatibleStatuses.contains(statusCode)) return false; + else { + if (this.equals(statusCode)) { + if (statusCode.equals(SRM_FILE_PINNED)) return true; + else return false; + } else return true; + } + } } diff --git a/src/main/java/it/grid/storm/srm/types/TStorageSystemInfo.java b/src/main/java/it/grid/storm/srm/types/TStorageSystemInfo.java index f924900b..c202c7d0 100644 --- a/src/main/java/it/grid/storm/srm/types/TStorageSystemInfo.java +++ b/src/main/java/it/grid/storm/srm/types/TStorageSystemInfo.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * Class that represents the TStorageSystemInfo. - * + * * @author: CNAF Bologna * @version: 1.0 * @date: May 2005 @@ -15,37 +14,37 @@ public class TStorageSystemInfo implements Serializable { - private String info = null; - private boolean empty = true; + private String info = null; + private boolean empty = true; - public String toString() { + public String toString() { - return info; - } + return info; + } - public TStorageSystemInfo(String s, boolean empty) { + public TStorageSystemInfo(String s, boolean empty) { - this.info = s; - this.empty = empty; - } + this.info = s; + this.empty = empty; + } - public static TStorageSystemInfo makeEmpty() { + public static TStorageSystemInfo makeEmpty() { - return new TStorageSystemInfo("", true); - } + return new TStorageSystemInfo("", true); + } - public static TStorageSystemInfo make(String s) { + public static TStorageSystemInfo make(String s) { - return new TStorageSystemInfo(s, false); - } + return new TStorageSystemInfo(s, false); + } - public boolean isEmpty() { + public boolean isEmpty() { - return empty; - } + return empty; + } - public String getValue() { + public String getValue() { - return info; - } + return info; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TTURL.java b/src/main/java/it/grid/storm/srm/types/TTURL.java index 23d9e79f..eb20c2a5 100644 --- a/src/main/java/it/grid/storm/srm/types/TTURL.java +++ b/src/main/java/it/grid/storm/srm/types/TTURL.java @@ -1,190 +1,176 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; -import java.util.Map; - import it.grid.storm.common.types.InvalidPFNAttributeException; import it.grid.storm.common.types.InvalidTFNAttributesException; import it.grid.storm.common.types.PFN; import it.grid.storm.common.types.TFN; import it.grid.storm.common.types.TransferProtocol; - +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents a TURL, that is a Transfer URL. It is made up of a - * TransferProtocol and a TFN. - * + * This class represents a TURL, that is a Transfer URL. It is made up of a TransferProtocol and a + * TFN. + * * @author EGRID ICTP Trieste - CNAF Bologna * @date March 26th, 2005 * @version 2.0 */ public class TTURL { - private static Logger log = LoggerFactory.getLogger(TTURL.class);; - private TransferProtocol tp; - private TFN tfn; - private boolean empty = true; // boolean true if this is an empty object - - public static final String PNAME_TURL = "turl"; - - private TTURL(TransferProtocol tp, TFN tfn, boolean empty) { - - this.tp = tp; - this.tfn = tfn; - this.empty = empty; - } - - /** - * Static method that returns an empty TTURL. - */ - public static TTURL makeEmpty() { - - return new TTURL(TransferProtocol.EMPTY, TFN.makeEmpty(), true); - } - - /** - * Static method that requires the TransferProtocol and the TFN of this TURL: - * if any is null or empty, an InvalidTURLAttributesException is thrown. - */ - public static TTURL make(TransferProtocol tp, TFN tfn) - throws InvalidTTURLAttributesException { - - if ((tp == null) || (tfn == null) || (tp == TransferProtocol.EMPTY) - || (tfn.isEmpty())) { - throw new InvalidTTURLAttributesException(tp, tfn); - } - return new TTURL(tp, tfn, false); - } - - /** - * Static factory method that returns a TTURL from a String representation: if - * it is null or malformed then an InvalidTTURLAttributesException is thrown. - */ - public static TTURL makeFromString(String s) - throws InvalidTTURLAttributesException { - - if (s == null) { - throw new InvalidTTURLAttributesException(null, null); - } - int separator = s.indexOf("://"); // first occurence of :// - if ((separator == -1) || (separator == 0)) { - throw new InvalidTTURLAttributesException(null, null); // separator not - // found or right - // at the - // beginning! - } - String tpString = s.substring(0, separator); - TransferProtocol tp = null; - try { - tp = TransferProtocol.getTransferProtocol(tpString); - } catch (IllegalArgumentException e) { - log.warn("TTURL: Transfer protocol by {} is empty, but that's fine.", - tpString); - } - if ((separator + 3) > (s.length())) { - throw new InvalidTTURLAttributesException(tp, null); // separator found at - // the end! - } - String tfnString = s.substring(separator + 3, s.length()); - TFN tfn = null; - if (tfnString.startsWith("/")) { - try { - tfn = TFN.makeByPFN(PFN.make(tfnString)); - } catch (InvalidTFNAttributesException e) { - log.warn("TFN by {} is empty, but that's fine.", tfnString); - } catch (InvalidPFNAttributeException ex) { - log.error("Invalid PFN: {}", tfnString, ex); - } - } else { - try { - tfn = TFN.makeFromString(tfnString); - } catch (InvalidTFNAttributesException e) { - log.warn("TFN by {} is empty, but that's fine.", tfnString); - } - } - return TTURL.make(tp, tfn); - } - - /** - * Method that returns true if this object is an empty TTURL - */ - public boolean isEmpty() { - - return empty; - } - - /** - * Method that returns the TransferProtocol of this TURL. If it is an empty - * TTURL, then an empty TransferProtocol is returned. - */ - public TransferProtocol protocol() { - - if (empty) { - return TransferProtocol.EMPTY; - } - return tp; - } - - /** - * Method that returns the TFN of this TURL. If it is an empty TTURL, then an - * empty TFN is returned. - */ - public TFN tfn() { - - if (empty) { - return TFN.makeEmpty(); - } - return tfn; - } - - /** - * Encode TTURL for xmlrpc communication. - */ - public void encode(Map param, String name) { - - param.put(name, toString()); - } - - @Override - public String toString() { - - if (empty) { - return "Empty TTURL"; - } - return tp + "://" + tfn; - } - - @Override - public boolean equals(Object o) { - - if (o == this) { - return true; - } - if (!(o instanceof TTURL)) { - return false; - } - TTURL turlo = (TTURL) o; - if (empty && turlo.empty) { - return true; - } - return (!empty) && (!turlo.empty) && tp.equals(turlo.tp) - && tfn.equals(turlo.tfn); - } - - @Override - public int hashCode() { - - if (empty) { - return 0; - } - int hash = 17; - hash = 37 * hash + tp.hashCode(); - hash = 37 * hash + tfn.hashCode(); - return hash; - } + private static Logger log = LoggerFactory.getLogger(TTURL.class);; + private TransferProtocol tp; + private TFN tfn; + private boolean empty = true; // boolean true if this is an empty object + + public static final String PNAME_TURL = "turl"; + + private TTURL(TransferProtocol tp, TFN tfn, boolean empty) { + + this.tp = tp; + this.tfn = tfn; + this.empty = empty; + } + + /** Static method that returns an empty TTURL. */ + public static TTURL makeEmpty() { + + return new TTURL(TransferProtocol.EMPTY, TFN.makeEmpty(), true); + } + + /** + * Static method that requires the TransferProtocol and the TFN of this TURL: if any is null or + * empty, an InvalidTURLAttributesException is thrown. + */ + public static TTURL make(TransferProtocol tp, TFN tfn) throws InvalidTTURLAttributesException { + + if ((tp == null) || (tfn == null) || (tp == TransferProtocol.EMPTY) || (tfn.isEmpty())) { + throw new InvalidTTURLAttributesException(tp, tfn); + } + return new TTURL(tp, tfn, false); + } + + /** + * Static factory method that returns a TTURL from a String representation: if it is null or + * malformed then an InvalidTTURLAttributesException is thrown. + */ + public static TTURL makeFromString(String s) throws InvalidTTURLAttributesException { + + if (s == null) { + throw new InvalidTTURLAttributesException(null, null); + } + int separator = s.indexOf("://"); // first occurence of :// + if ((separator == -1) || (separator == 0)) { + throw new InvalidTTURLAttributesException(null, null); // separator not + // found or right + // at the + // beginning! + } + String tpString = s.substring(0, separator); + TransferProtocol tp = null; + try { + tp = TransferProtocol.getTransferProtocol(tpString); + } catch (IllegalArgumentException e) { + log.warn("TTURL: Transfer protocol by {} is empty, but that's fine.", tpString); + } + if ((separator + 3) > (s.length())) { + throw new InvalidTTURLAttributesException(tp, null); // separator found at + // the end! + } + String tfnString = s.substring(separator + 3, s.length()); + TFN tfn = null; + if (tfnString.startsWith("/")) { + try { + tfn = TFN.makeByPFN(PFN.make(tfnString)); + } catch (InvalidTFNAttributesException e) { + log.warn("TFN by {} is empty, but that's fine.", tfnString); + } catch (InvalidPFNAttributeException ex) { + log.error("Invalid PFN: {}", tfnString, ex); + } + } else { + try { + tfn = TFN.makeFromString(tfnString); + } catch (InvalidTFNAttributesException e) { + log.warn("TFN by {} is empty, but that's fine.", tfnString); + } + } + return TTURL.make(tp, tfn); + } + + /** Method that returns true if this object is an empty TTURL */ + public boolean isEmpty() { + + return empty; + } + + /** + * Method that returns the TransferProtocol of this TURL. If it is an empty TTURL, then an empty + * TransferProtocol is returned. + */ + public TransferProtocol protocol() { + + if (empty) { + return TransferProtocol.EMPTY; + } + return tp; + } + + /** + * Method that returns the TFN of this TURL. If it is an empty TTURL, then an empty TFN is + * returned. + */ + public TFN tfn() { + + if (empty) { + return TFN.makeEmpty(); + } + return tfn; + } + + /** Encode TTURL for xmlrpc communication. */ + public void encode(Map param, String name) { + + param.put(name, toString()); + } + + @Override + public String toString() { + + if (empty) { + return "Empty TTURL"; + } + return tp + "://" + tfn; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (!(o instanceof TTURL)) { + return false; + } + TTURL turlo = (TTURL) o; + if (empty && turlo.empty) { + return true; + } + return (!empty) && (!turlo.empty) && tp.equals(turlo.tp) && tfn.equals(turlo.tfn); + } + + @Override + public int hashCode() { + + if (empty) { + return 0; + } + int hash = 17; + hash = 37 * hash + tp.hashCode(); + hash = 37 * hash + tfn.hashCode(); + return hash; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TTransferParameters.java b/src/main/java/it/grid/storm/srm/types/TTransferParameters.java index 6b93db3f..b5c7515b 100644 --- a/src/main/java/it/grid/storm/srm/types/TTransferParameters.java +++ b/src/main/java/it/grid/storm/srm/types/TTransferParameters.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TTransferParameters SRM type. - * + * * @author Alberto Forti * @author Cnaf -INFN Bologna * @date July, 2006 @@ -19,141 +18,134 @@ public class TTransferParameters implements Serializable { - private static final long serialVersionUID = 7309411351545907539L; - - public static final String PNAME_transferParameters = "transferParameters"; - - private TAccessPattern accessPattern = null; - private TConnectionType connectionType = null; - private String[] arrayOfClientNetworks; - private String[] arrayOfTransferProtocols; - - public TTransferParameters() { - - } - - public TTransferParameters(TAccessPattern accessPattern, - TConnectionType connectionType, String[] arrayOfClientNetworks, - String[] arrayOfTransferProtocols) { - - this.accessPattern = accessPattern; - this.connectionType = connectionType; - this.arrayOfClientNetworks = arrayOfClientNetworks; - this.arrayOfTransferProtocols = arrayOfTransferProtocols; - } - - /** - * Fills this class using the values found in a structure inside a Hashtable. - * The Hashtable may contain different structures inside, all are identifiend - * by a name. Used for communication with the FE. - * - * @param inputParam - * Hashtable to read. - * @param fieldName - * Name that identifies the TTransferParameters structure in the - * Hashtable. - * @return A new TTransferParameters instance. - */ - public static TTransferParameters decode(Map inputParam, String fieldName) { - - Map param; - param = (Map) inputParam.get(fieldName); - if (param == null) { - return null; - } - - return TTransferParameters.decode(param); - } - - /** - * Fills this class using a Hashtable structure. The Hashtable contains only - * the TTransferParameters fields. Used for communication with the FE. - * - * @param param - * Hashtable to read. - * @return A new TTransferParameters instance - */ - public static TTransferParameters decode(Map param) { - - String[] clientNetworks = null; - String[] transferProtocols = null; - String memberName; - List vector = null; - - TAccessPattern accPatt = TAccessPattern.decode(param, - TAccessPattern.PNAME_accessPattern); - - TConnectionType connType = TConnectionType.decode(param, - TConnectionType.PNAME_connectionType); - - memberName = new String("arrayOfClientNetworks"); - try { - vector = Arrays.asList((Object[]) param.get(memberName)); - } catch (NullPointerException e) { - } - - if (vector != null) { - int arraySize = vector.size(); - clientNetworks = new String[arraySize]; - for (int i = 0; i < arraySize; i++) { - clientNetworks[i] = (String) vector.get(i); - } - } - - memberName = new String("arrayOfTransferProtocols"); - try { - vector = Arrays.asList((Object[]) param.get(memberName)); - } catch (NullPointerException e) { - } - if (vector != null) { - int arraySize = vector.size(); - transferProtocols = new String[arraySize]; - for (int i = 0; i < arraySize; i++) { - transferProtocols[i] = (String) vector.get(i); - } - } - - return new TTransferParameters(accPatt, connType, clientNetworks, - transferProtocols); - } - - /** - * Returns the accessPattern value - * - * @return accessPattern is of type TAccessPattern - */ - public TAccessPattern getAccessPattern() { - - return accessPattern; - } - - /** - * Returns the connectionType value - * - * @return connectionType is of type TConnectionType - */ - public TConnectionType getConnectionType() { - - return connectionType; - } - - /** - * Returns the arrayOfClientNetworks field - * - * @return arrayOfClientNetworks is of type String[] - */ - public String[] getArrayOfClientNetworks() { - - return arrayOfClientNetworks; - } - - /** - * Returns the arrayOfTransferProtocols value - * - * @return arrayOfTransferProtocols is of type String[] - */ - public String[] getArrayOfTransferProtocols() { - - return arrayOfTransferProtocols; - } + private static final long serialVersionUID = 7309411351545907539L; + + public static final String PNAME_transferParameters = "transferParameters"; + + private TAccessPattern accessPattern = null; + private TConnectionType connectionType = null; + private String[] arrayOfClientNetworks; + private String[] arrayOfTransferProtocols; + + public TTransferParameters() {} + + public TTransferParameters( + TAccessPattern accessPattern, + TConnectionType connectionType, + String[] arrayOfClientNetworks, + String[] arrayOfTransferProtocols) { + + this.accessPattern = accessPattern; + this.connectionType = connectionType; + this.arrayOfClientNetworks = arrayOfClientNetworks; + this.arrayOfTransferProtocols = arrayOfTransferProtocols; + } + + /** + * Fills this class using the values found in a structure inside a Hashtable. The Hashtable may + * contain different structures inside, all are identifiend by a name. Used for communication with + * the FE. + * + * @param inputParam Hashtable to read. + * @param fieldName Name that identifies the TTransferParameters structure in the Hashtable. + * @return A new TTransferParameters instance. + */ + public static TTransferParameters decode(Map inputParam, String fieldName) { + + Map param; + param = (Map) inputParam.get(fieldName); + if (param == null) { + return null; + } + + return TTransferParameters.decode(param); + } + + /** + * Fills this class using a Hashtable structure. The Hashtable contains only the + * TTransferParameters fields. Used for communication with the FE. + * + * @param param Hashtable to read. + * @return A new TTransferParameters instance + */ + public static TTransferParameters decode(Map param) { + + String[] clientNetworks = null; + String[] transferProtocols = null; + String memberName; + List vector = null; + + TAccessPattern accPatt = TAccessPattern.decode(param, TAccessPattern.PNAME_accessPattern); + + TConnectionType connType = TConnectionType.decode(param, TConnectionType.PNAME_connectionType); + + memberName = new String("arrayOfClientNetworks"); + try { + vector = Arrays.asList((Object[]) param.get(memberName)); + } catch (NullPointerException e) { + } + + if (vector != null) { + int arraySize = vector.size(); + clientNetworks = new String[arraySize]; + for (int i = 0; i < arraySize; i++) { + clientNetworks[i] = (String) vector.get(i); + } + } + + memberName = new String("arrayOfTransferProtocols"); + try { + vector = Arrays.asList((Object[]) param.get(memberName)); + } catch (NullPointerException e) { + } + if (vector != null) { + int arraySize = vector.size(); + transferProtocols = new String[arraySize]; + for (int i = 0; i < arraySize; i++) { + transferProtocols[i] = (String) vector.get(i); + } + } + + return new TTransferParameters(accPatt, connType, clientNetworks, transferProtocols); + } + + /** + * Returns the accessPattern value + * + * @return accessPattern is of type TAccessPattern + */ + public TAccessPattern getAccessPattern() { + + return accessPattern; + } + + /** + * Returns the connectionType value + * + * @return connectionType is of type TConnectionType + */ + public TConnectionType getConnectionType() { + + return connectionType; + } + + /** + * Returns the arrayOfClientNetworks field + * + * @return arrayOfClientNetworks is of type String[] + */ + public String[] getArrayOfClientNetworks() { + + return arrayOfClientNetworks; + } + + /** + * Returns the arrayOfTransferProtocols value + * + * @return arrayOfTransferProtocols is of type String[] + */ + public String[] getArrayOfTransferProtocols() { + + return arrayOfTransferProtocols; + } } diff --git a/src/main/java/it/grid/storm/srm/types/TUserID.java b/src/main/java/it/grid/storm/srm/types/TUserID.java index c8bfdb10..71afa789 100644 --- a/src/main/java/it/grid/storm/srm/types/TUserID.java +++ b/src/main/java/it/grid/storm/srm/types/TUserID.java @@ -1,68 +1,63 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the TUserID managed by Srm. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - package it.grid.storm.srm.types; import java.io.Serializable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TUserID implements Serializable { - private static final long serialVersionUID = -7547071983406828938L; - private static final Logger log = LoggerFactory.getLogger(TUserID.class); - public static String PNAME_USERID = "userID"; - public static String PNAME_OWNER = "owner"; - - private String userID = new String(); + private static final long serialVersionUID = -7547071983406828938L; + private static final Logger log = LoggerFactory.getLogger(TUserID.class); + public static String PNAME_USERID = "userID"; + public static String PNAME_OWNER = "owner"; - // TO Complete with Exception if null string speified - public TUserID(String id) throws InvalidTUserIDAttributeException { + private String userID = new String(); - if ((id == null) || (id.length() == 0)) - throw new InvalidTUserIDAttributeException(id); - userID = id; - } + // TO Complete with Exception if null string speified + public TUserID(String id) throws InvalidTUserIDAttributeException { - public static TUserID makeEmpty() { + if ((id == null) || (id.length() == 0)) throw new InvalidTUserIDAttributeException(id); + userID = id; + } - try { + public static TUserID makeEmpty() { - return new TUserID("Unknown."); + try { - } catch (InvalidTUserIDAttributeException e) { + return new TUserID("Unknown."); - log.error("Invalid TUserID: {}", e.getMessage(), e); - } + } catch (InvalidTUserIDAttributeException e) { - return null; - } + log.error("Invalid TUserID: {}", e.getMessage(), e); + } - public String toString() { + return null; + } - return userID; - } + public String toString() { - public String getValue() { + return userID; + } - return userID; - } + public String getValue() { - public void encode(Map param, String name) { + return userID; + } - param.put(name, userID); - } + public void encode(Map param, String name) { + param.put(name, userID); + } } diff --git a/src/main/java/it/grid/storm/srm/types/TUserPermission.java b/src/main/java/it/grid/storm/srm/types/TUserPermission.java index 95bd94a5..849f5f99 100644 --- a/src/main/java/it/grid/storm/srm/types/TUserPermission.java +++ b/src/main/java/it/grid/storm/srm/types/TUserPermission.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.srm.types; @@ -9,65 +8,64 @@ /** * This class represents the TUserPermission in Srm request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Avril, 2005 * @version 1.0 */ - public class TUserPermission { - private TUserID userID; - private TPermissionMode permissionMode; + private TUserID userID; + private TPermissionMode permissionMode; - public static String PNAME_OWNERPERMISSION = "ownerPermission"; + public static String PNAME_OWNERPERMISSION = "ownerPermission"; - public TUserPermission(TUserID userID, TPermissionMode permMode) { + public TUserPermission(TUserID userID, TPermissionMode permMode) { - this.userID = userID; - this.permissionMode = permMode; - } + this.userID = userID; + this.permissionMode = permMode; + } - public static TUserPermission makeEmpty() { + public static TUserPermission makeEmpty() { - return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.NONE); - } + return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.NONE); + } - public TUserID getUserID() { + public TUserID getUserID() { - return userID; - } + return userID; + } - public TPermissionMode getPermissionMode() { + public TPermissionMode getPermissionMode() { - return permissionMode; - } + return permissionMode; + } - public static TUserPermission makeDirectoryDefault() { + public static TUserPermission makeDirectoryDefault() { - return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.X); - } + return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.X); + } - public static TUserPermission makeFileDefault() { + public static TUserPermission makeFileDefault() { - return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.R); - } + return new TUserPermission(TUserID.makeEmpty(), TPermissionMode.R); + } - /** - * Encode method use to provide a represnetation of this object into a - * structures paramter for communication to FE component. - * - * @param param - * @param name - */ - public void encode(Map param, String name) { + /** + * Encode method use to provide a represnetation of this object into a structures paramter for + * communication to FE component. + * + * @param param + * @param name + */ + public void encode(Map param, String name) { - Map paramStructure = new HashMap(); - if ((userID != null) && (permissionMode != null)) { - userID.encode(paramStructure, TUserID.PNAME_USERID); - permissionMode.encode(paramStructure, TPermissionMode.PNAME_MODE); - param.put(name, paramStructure); - } - } + Map paramStructure = new HashMap(); + if ((userID != null) && (permissionMode != null)) { + userID.encode(paramStructure, TUserID.PNAME_USERID); + permissionMode.encode(paramStructure, TPermissionMode.PNAME_MODE); + param.put(name, paramStructure); + } + } } diff --git a/src/main/java/it/grid/storm/startup/Bootstrap.java b/src/main/java/it/grid/storm/startup/Bootstrap.java index d9133618..16615074 100644 --- a/src/main/java/it/grid/storm/startup/Bootstrap.java +++ b/src/main/java/it/grid/storm/startup/Bootstrap.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.startup; import ch.qos.logback.classic.LoggerContext; @@ -14,72 +11,66 @@ import it.grid.storm.authz.AuthzDirector; import it.grid.storm.authz.DirectorException; import it.grid.storm.info.SpaceInfoManager; - import java.io.File; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - */ +/** @author zappi */ public class Bootstrap { - - private static Logger log = LoggerFactory.getLogger(Bootstrap.class); - - - public static void configureLogging(String loggingConfigFilePath) { - - log.info("Configuring logging from {}", loggingConfigFilePath); - - File f = new File(loggingConfigFilePath); - - if (!f.exists() || !f.canRead()) { - - String message = String.format("Error loading logging configuration: " - + "'%s' does not exist or is not readable.",loggingConfigFilePath); - - log.error(message); - - throw new RuntimeException(message); - } - - LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory(); - JoranConfigurator configurator = new JoranConfigurator(); - - configurator.setContext(lc); - lc.reset(); - - try { - configurator.doConfigure(loggingConfigFilePath); - - } catch (JoranException e) { - - throw new RuntimeException(e); - - } finally { - - StatusPrinter.printInCaseOfErrorsOrWarnings(lc); - - } - } - - public static void initializePathAuthz(String pathAuthzDBFileName) - throws BootstrapException { - - try { - AuthzDirector.initializePathAuthz(pathAuthzDBFileName); - } catch (DirectorException e) { - - log.error("Unable to initialize the AuthzDirector: {}", - e.getMessage(), e); - - throw new BootstrapException("Unable to initialize the AuthzDirector",e); - } - } - - public static void initializeUsedSpace() { - - SpaceInfoManager.getInstance().initializeUsedSpace(); - } + + private static Logger log = LoggerFactory.getLogger(Bootstrap.class); + + public static void configureLogging(String loggingConfigFilePath) { + + log.info("Configuring logging from {}", loggingConfigFilePath); + + File f = new File(loggingConfigFilePath); + + if (!f.exists() || !f.canRead()) { + + String message = + String.format( + "Error loading logging configuration: " + "'%s' does not exist or is not readable.", + loggingConfigFilePath); + + log.error(message); + + throw new RuntimeException(message); + } + + LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory(); + JoranConfigurator configurator = new JoranConfigurator(); + + configurator.setContext(lc); + lc.reset(); + + try { + configurator.doConfigure(loggingConfigFilePath); + + } catch (JoranException e) { + + throw new RuntimeException(e); + + } finally { + + StatusPrinter.printInCaseOfErrorsOrWarnings(lc); + } + } + + public static void initializePathAuthz(String pathAuthzDBFileName) throws BootstrapException { + + try { + AuthzDirector.initializePathAuthz(pathAuthzDBFileName); + } catch (DirectorException e) { + + log.error("Unable to initialize the AuthzDirector: {}", e.getMessage(), e); + + throw new BootstrapException("Unable to initialize the AuthzDirector", e); + } + } + + public static void initializeUsedSpace() { + + SpaceInfoManager.getInstance().initializeUsedSpace(); + } } diff --git a/src/main/java/it/grid/storm/startup/BootstrapException.java b/src/main/java/it/grid/storm/startup/BootstrapException.java index dc4c993a..b6ffe7ed 100644 --- a/src/main/java/it/grid/storm/startup/BootstrapException.java +++ b/src/main/java/it/grid/storm/startup/BootstrapException.java @@ -1,30 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.startup; public class BootstrapException extends Exception { - private static final long serialVersionUID = -3495820491163614689L; + private static final long serialVersionUID = -3495820491163614689L; - public BootstrapException() { + public BootstrapException() {} - } + public BootstrapException(String message) { - public BootstrapException(String message) { + super(message); + } - super(message); - } + public BootstrapException(Throwable cause) { - public BootstrapException(Throwable cause) { + super(cause); + } - super(cause); - } - - public BootstrapException(String message, Throwable cause) { - - super(message, cause); - } + public BootstrapException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/synchcall/FileSystemUtility.java b/src/main/java/it/grid/storm/synchcall/FileSystemUtility.java index b9bbd24f..cc25127a 100644 --- a/src/main/java/it/grid/storm/synchcall/FileSystemUtility.java +++ b/src/main/java/it/grid/storm/synchcall/FileSystemUtility.java @@ -1,14 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall; import static it.grid.storm.filesystem.RandomWaitFilesystemAdapter.maybeWrapFilesystem; import static it.grid.storm.metrics.StormMetricRegistry.METRIC_REGISTRY; -import org.slf4j.Logger; - import it.grid.storm.filesystem.Filesystem; import it.grid.storm.filesystem.FilesystemIF; import it.grid.storm.filesystem.LocalFile; @@ -17,49 +14,46 @@ import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.model.VirtualFS; +import org.slf4j.Logger; public class FileSystemUtility { - private static Logger log = NamespaceDirector.getLogger(); + private static Logger log = NamespaceDirector.getLogger(); + + public static LocalFile getLocalFileByAbsolutePath(String absolutePath) + throws NamespaceException { + + LocalFile file = null; + VirtualFS vfs = null; + genericfs fsDriver = null; + FilesystemIF fs = null; + try { + vfs = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath(absolutePath); + } catch (NamespaceException ex) { + log.error("Unable to retrieve VFS by Absolute Path", ex); + } + if (vfs == null) { + throw new NamespaceException("No VFS found in StoRM for this file :'" + absolutePath + "'"); + } + + try { + fsDriver = (genericfs) (vfs.getFSDriver()).newInstance(); - public static LocalFile getLocalFileByAbsolutePath(String absolutePath) - throws NamespaceException { + FilesystemIF wrappedFs = new Filesystem(fsDriver); - LocalFile file = null; - VirtualFS vfs = null; - genericfs fsDriver = null; - FilesystemIF fs = null; - try { - vfs = NamespaceDirector.getNamespace().resolveVFSbyAbsolutePath( - absolutePath); - } catch (NamespaceException ex) { - log.error("Unable to retrieve VFS by Absolute Path", ex); - } - if (vfs == null) { - throw new NamespaceException("No VFS found in StoRM for this file :'" - + absolutePath + "'"); - } + wrappedFs = maybeWrapFilesystem(wrappedFs); - try { - fsDriver = (genericfs) (vfs.getFSDriver()).newInstance(); - - FilesystemIF wrappedFs = new Filesystem(fsDriver); - - wrappedFs = maybeWrapFilesystem(wrappedFs); - - fs = new MetricsFilesystemAdapter(wrappedFs, - METRIC_REGISTRY.getRegistry()); - - file = new LocalFile(absolutePath, fs); - } catch (NamespaceException ex1) { - log.error("Error while retrieving FS driver", ex1); - } catch (IllegalAccessException ex1) { - log.error("Error while using reflection in FS Driver", ex1); - } catch (InstantiationException ex1) { - log.error("Error while instancing new FS driver", ex1); - } + fs = new MetricsFilesystemAdapter(wrappedFs, METRIC_REGISTRY.getRegistry()); - return file; - } + file = new LocalFile(absolutePath, fs); + } catch (NamespaceException ex1) { + log.error("Error while retrieving FS driver", ex1); + } catch (IllegalAccessException ex1) { + log.error("Error while using reflection in FS Driver", ex1); + } catch (InstantiationException ex1) { + log.error("Error while instancing new FS driver", ex1); + } + return file; + } } diff --git a/src/main/java/it/grid/storm/synchcall/SimpleSynchcallDispatcher.java b/src/main/java/it/grid/storm/synchcall/SimpleSynchcallDispatcher.java index 569d8f3e..85814d35 100644 --- a/src/main/java/it/grid/storm/synchcall/SimpleSynchcallDispatcher.java +++ b/src/main/java/it/grid/storm/synchcall/SimpleSynchcallDispatcher.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall; import static it.grid.storm.metrics.StormMetricRegistry.METRIC_REGISTRY; import com.codahale.metrics.Timer; - import it.grid.storm.common.OperationType; import it.grid.storm.synchcall.command.Command; import it.grid.storm.synchcall.command.CommandFactory; @@ -17,33 +15,30 @@ /** * This class is part of the StoRM project. - * - * This class contains the logic to process the different synchcall request. - * This dispatcher simply execute a new request when it's just arrived. A more - * complex version can have thread pools and more complicated pattern. - * + * + *

This class contains the logic to process the different synchcall request. This dispatcher + * simply execute a new request when it's just arrived. A more complex version can have thread pools + * and more complicated pattern. + * * @author lucamag @date May 27, 2008 - * */ - public class SimpleSynchcallDispatcher implements SynchcallDispatcher { public static final String SYNCH_CALL_TIMER_NAME = "synch"; public OutputData processRequest(OperationType type, InputData inputData) - throws IllegalArgumentException, CommandException { + throws IllegalArgumentException, CommandException { - final Timer timer = METRIC_REGISTRY.getRegistry() - .timer(SYNCH_CALL_TIMER_NAME); + final Timer timer = METRIC_REGISTRY.getRegistry().timer(SYNCH_CALL_TIMER_NAME); // This provides metrics for all synch calls final Timer.Context synchContext = timer.time(); Command cmd = CommandFactory.getCommand(type); - - // This provides metrics for the specific synch call type + + // This provides metrics for the specific synch call type final Timer.Context context = type.getTimer().time(); - + try { return cmd.execute(inputData); } finally { @@ -51,5 +46,4 @@ public OutputData processRequest(OperationType type, InputData inputData) synchContext.stop(); } } - } diff --git a/src/main/java/it/grid/storm/synchcall/SynchcallDispatcher.java b/src/main/java/it/grid/storm/synchcall/SynchcallDispatcher.java index 1390d644..fb66eb3d 100644 --- a/src/main/java/it/grid/storm/synchcall/SynchcallDispatcher.java +++ b/src/main/java/it/grid/storm/synchcall/SynchcallDispatcher.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall; @@ -11,28 +10,24 @@ /** * This class is part of the StoRM project. - * - * This class contains the logic to process the different synchcall request. - * This dispatcher simply execute a new request when it's just arrived. A more - * complex version can have thread pools and more complicated pattern. - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * + * + *

This class contains the logic to process the different synchcall request. This dispatcher + * simply execute a new request when it's just arrived. A more complex version can have thread pools + * and more complicated pattern. + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @date May 28, 2008 - * */ public interface SynchcallDispatcher { - /** - * @param type - * Type of the SRM request to execute. - * @param inputData - * InputDaata contining input information. - * @return outputData OutputData containing outputData. - * @throws IllegalArgumentException - */ - public abstract OutputData processRequest(OperationType type, - InputData inputData) throws IllegalArgumentException, CommandException; - -} \ No newline at end of file + /** + * @param type Type of the SRM request to execute. + * @param inputData InputDaata contining input information. + * @return outputData OutputData containing outputData. + * @throws IllegalArgumentException + */ + public abstract OutputData processRequest(OperationType type, InputData inputData) + throws IllegalArgumentException, CommandException; +} diff --git a/src/main/java/it/grid/storm/synchcall/SynchcallDispatcherFactory.java b/src/main/java/it/grid/storm/synchcall/SynchcallDispatcherFactory.java index fa42b475..a1d7a343 100644 --- a/src/main/java/it/grid/storm/synchcall/SynchcallDispatcherFactory.java +++ b/src/main/java/it/grid/storm/synchcall/SynchcallDispatcherFactory.java @@ -1,26 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall; /** * This class is part of the StoRM project. - * - * This class can choose the right dispatcher using configuration parameter etc. - * + * + *

This class can choose the right dispatcher using configuration parameter etc. + * * @author lucamag * @date May 27, 2008 - * */ public class SynchcallDispatcherFactory { - /** - * @return SynchcallDispatcher - */ + /** @return SynchcallDispatcher */ + public static SynchcallDispatcher getDispatcher() { - public static SynchcallDispatcher getDispatcher() { - - return new SimpleSynchcallDispatcher(); - } + return new SimpleSynchcallDispatcher(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/AbstractCommand.java b/src/main/java/it/grid/storm/synchcall/command/AbstractCommand.java index 892d3df2..052e1140 100644 --- a/src/main/java/it/grid/storm/synchcall/command/AbstractCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/AbstractCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; @@ -9,16 +8,15 @@ import it.grid.storm.synchcall.data.IdentityInputData; import it.grid.storm.synchcall.data.InputData; - public abstract class AbstractCommand implements Command { - - protected static Configuration config = Configuration.getInstance(); - public static GridUserInterface getUserFromInputData(InputData id){ - - if (id instanceof IdentityInputData){ - return ((IdentityInputData)id).getUser(); - } - return null; - } + protected static Configuration config = Configuration.getInstance(); + + public static GridUserInterface getUserFromInputData(InputData id) { + + if (id instanceof IdentityInputData) { + return ((IdentityInputData) id).getUser(); + } + return null; + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/Command.java b/src/main/java/it/grid/storm/synchcall/command/Command.java index 40247171..5a856bbc 100644 --- a/src/main/java/it/grid/storm/synchcall/command/Command.java +++ b/src/main/java/it/grid/storm/synchcall/command/Command.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; @@ -10,13 +9,9 @@ public interface Command { - /** - * - * @param inputData - * Contains information about input data for the request. - * @return OutputData Contains output data - */ - public OutputData execute(InputData inputData) - throws IllegalArgumentException, CommandException; - -} \ No newline at end of file + /** + * @param inputData Contains information about input data for the request. + * @return OutputData Contains output data + */ + public OutputData execute(InputData inputData) throws IllegalArgumentException, CommandException; +} diff --git a/src/main/java/it/grid/storm/synchcall/command/CommandFactory.java b/src/main/java/it/grid/storm/synchcall/command/CommandFactory.java index 095e107c..31bfae76 100644 --- a/src/main/java/it/grid/storm/synchcall/command/CommandFactory.java +++ b/src/main/java/it/grid/storm/synchcall/command/CommandFactory.java @@ -1,11 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.common.OperationType; import it.grid.storm.synchcall.command.datatransfer.AbortFilesCommand; import it.grid.storm.synchcall.command.datatransfer.AbortRequestCommand; @@ -26,72 +23,69 @@ import it.grid.storm.synchcall.command.space.GetSpaceTokensCommand; import it.grid.storm.synchcall.command.space.ReleaseSpaceCommand; import it.grid.storm.synchcall.command.space.ReserveSpaceCommand; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. - * + * * @author lucamag * @date May 27, 2008 */ - public class CommandFactory { - private static final Logger log = LoggerFactory - .getLogger(CommandFactory.class); - - public static Command getCommand(OperationType type) - throws IllegalArgumentException { - - switch (type) { - case RM: - return new RmCommand(); - case RMD: - return new RmdirCommand(); - case MKD: - return new MkdirCommand(); - case MV: - return new MvCommand(); - case LS: - return new LsCommand(); + private static final Logger log = LoggerFactory.getLogger(CommandFactory.class); - case PNG: - return new PingCommand(); + public static Command getCommand(OperationType type) throws IllegalArgumentException { - case GST: - return new GetSpaceTokensCommand(); - case GSM: - return new GetSpaceMetaDataCommand(); - case RESSP: - return new ReserveSpaceCommand(); - case RELSP: - return new ReleaseSpaceCommand(); + switch (type) { + case RM: + return new RmCommand(); + case RMD: + return new RmdirCommand(); + case MKD: + return new MkdirCommand(); + case MV: + return new MvCommand(); + case LS: + return new LsCommand(); - case PD: - return new PutDoneCommand(); - case RF: - return new ReleaseFilesCommand(); - case EFL: - return new ExtendFileLifeTimeCommand(); - case AF: - return new AbortFilesCommand(); - case AR: - return new AbortRequestCommand(); + case PNG: + return new PingCommand(); - case PTP: - return new PrepareToPutRequestCommand(); - case SPTP: - return new PrepareToPutRequestStatusCommand(); - case PTG: - return new PrepareToGetRequestCommand(); - case SPTG: - return new PrepareToGetRequestStatusCommand(); + case GST: + return new GetSpaceTokensCommand(); + case GSM: + return new GetSpaceMetaDataCommand(); + case RESSP: + return new ReserveSpaceCommand(); + case RELSP: + return new ReleaseSpaceCommand(); - default: - String msg = String.format("No command found for OperationType %s", type); - log.error(msg); - throw new IllegalArgumentException(msg); - } + case PD: + return new PutDoneCommand(); + case RF: + return new ReleaseFilesCommand(); + case EFL: + return new ExtendFileLifeTimeCommand(); + case AF: + return new AbortFilesCommand(); + case AR: + return new AbortRequestCommand(); - } + case PTP: + return new PrepareToPutRequestCommand(); + case SPTP: + return new PrepareToPutRequestStatusCommand(); + case PTG: + return new PrepareToGetRequestCommand(); + case SPTG: + return new PrepareToGetRequestStatusCommand(); + default: + String msg = String.format("No command found for OperationType %s", type); + log.error(msg); + throw new IllegalArgumentException(msg); + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/CommandHelper.java b/src/main/java/it/grid/storm/synchcall/command/CommandHelper.java index fe8efbb4..b7614a51 100644 --- a/src/main/java/it/grid/storm/synchcall/command/CommandHelper.java +++ b/src/main/java/it/grid/storm/synchcall/command/CommandHelper.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; @@ -12,159 +11,174 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.synchcall.data.DataHelper; import it.grid.storm.synchcall.data.InputData; - import java.util.HashMap; import java.util.List; import java.util.Map; - import org.slf4j.Logger; public class CommandHelper { - + public static final Map STATUS_MESSAGES; private static final String FAILED_STRING = "failed"; - static{ - + static { STATUS_MESSAGES = new HashMap(); STATUS_MESSAGES.put(TStatusCode.SRM_SUCCESS, "succesfully done"); STATUS_MESSAGES.put(TStatusCode.SRM_SPACE_AVAILABLE, "succesfully done"); STATUS_MESSAGES.put(TStatusCode.SRM_FILE_PINNED, "succesfully done"); STATUS_MESSAGES.put(TStatusCode.SRM_PARTIAL_SUCCESS, "partially succeded"); - } - private static String getStatusMessage(TStatusCode statusCode){ - + private static String getStatusMessage(TStatusCode statusCode) { + String result = STATUS_MESSAGES.get(statusCode); - if (result == null) - return FAILED_STRING; + if (result == null) return FAILED_STRING; return result; } - - public static TReturnStatus buildStatus(TStatusCode statusCode, - String explaination) throws IllegalArgumentException, IllegalStateException { - - if (statusCode == null) { - throw new IllegalArgumentException( - "Unable to build the status, null arguments: statusCode=" + statusCode); - } - if (explaination == null || explaination.isEmpty()) { - throw new IllegalArgumentException( - "Unable to build the status, null or empty argument: explaination=" - + explaination); - } - return new TReturnStatus(statusCode, explaination); - } - - public static TSURLReturnStatus buildStatus(TSURL surl, - TReturnStatus returnStatus) throws IllegalArgumentException, - IllegalStateException { - - if (surl == null || returnStatus == null) { - throw new IllegalArgumentException( - "Unable to build the status, null arguments: surl=" + surl - + " returnStatus=" + returnStatus); - } - return new TSURLReturnStatus(surl, returnStatus); - } - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status) { - - log.info("{}: Request {} with: [status: {}]", - srmCommand, - getStatusMessage(status.getStatusCode()), - status); - } - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData) { - - log.info("{}: user <{}> Request {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - getStatusMessage(status.getStatusCode()), - status); - - } - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData, List surls) { - - log.info("{}: user <{}> Request for [SURL: {}] {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - surls, - getStatusMessage(status.getStatusCode()), - status); - } - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData, TRequestToken token, ArrayOfSURLs surls) { - - log.info("{}: user <{}> Request for [token: {}] for [SURL: {}] {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - token, - surls, - getStatusMessage(status.getStatusCode()), - status); - } - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData, TRequestToken token) { - - log.info("{}: user <{}> Request for [token: {}] {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - token, - getStatusMessage(status.getStatusCode()), - status); - - } - - - public static void printRequestOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData, TRequestToken token, - List surls) { - - log.info("{}: user<{}> Request for [token: {}] for [SURL: {}] {} with " - +" [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - token, - surls, - getStatusMessage(status.getStatusCode()), - status); - } - - public static void printSurlOutcome(String srmCommand, Logger log, - TReturnStatus status, InputData inputData, TSURL surl) { - - log.info("{}: user <{}> operation on [SURL: {}] {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - surl, - getStatusMessage(status.getStatusCode()), - status); - - } - - public static void printSurlOutcome(String srmCommand, - Logger log, - TReturnStatus status, - InputData inputData, - TRequestToken token, - TSURL surl) { - - log.info("{}: user <{}> operation for token [token:{}] on [SURL: {}] {} with: [status: {}]", - srmCommand, - DataHelper.getRequestor(inputData), - token, - surl, - getStatusMessage(status.getStatusCode()), - status); - } -} \ No newline at end of file + + public static TReturnStatus buildStatus(TStatusCode statusCode, String explaination) + throws IllegalArgumentException, IllegalStateException { + + if (statusCode == null) { + throw new IllegalArgumentException( + "Unable to build the status, null arguments: statusCode=" + statusCode); + } + if (explaination == null || explaination.isEmpty()) { + throw new IllegalArgumentException( + "Unable to build the status, null or empty argument: explaination=" + explaination); + } + return new TReturnStatus(statusCode, explaination); + } + + public static TSURLReturnStatus buildStatus(TSURL surl, TReturnStatus returnStatus) + throws IllegalArgumentException, IllegalStateException { + + if (surl == null || returnStatus == null) { + throw new IllegalArgumentException( + "Unable to build the status, null arguments: surl=" + + surl + + " returnStatus=" + + returnStatus); + } + return new TSURLReturnStatus(surl, returnStatus); + } + + public static void printRequestOutcome(String srmCommand, Logger log, TReturnStatus status) { + + log.info( + "{}: Request {} with: [status: {}]", + srmCommand, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printRequestOutcome( + String srmCommand, Logger log, TReturnStatus status, InputData inputData) { + + log.info( + "{}: user <{}> Request {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printRequestOutcome( + String srmCommand, + Logger log, + TReturnStatus status, + InputData inputData, + List surls) { + + log.info( + "{}: user <{}> Request for [SURL: {}] {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + surls, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printRequestOutcome( + String srmCommand, + Logger log, + TReturnStatus status, + InputData inputData, + TRequestToken token, + ArrayOfSURLs surls) { + + log.info( + "{}: user <{}> Request for [token: {}] for [SURL: {}] {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + token, + surls, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printRequestOutcome( + String srmCommand, + Logger log, + TReturnStatus status, + InputData inputData, + TRequestToken token) { + + log.info( + "{}: user <{}> Request for [token: {}] {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + token, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printRequestOutcome( + String srmCommand, + Logger log, + TReturnStatus status, + InputData inputData, + TRequestToken token, + List surls) { + + log.info( + "{}: user<{}> Request for [token: {}] for [SURL: {}] {} with " + " [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + token, + surls, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printSurlOutcome( + String srmCommand, Logger log, TReturnStatus status, InputData inputData, TSURL surl) { + + log.info( + "{}: user <{}> operation on [SURL: {}] {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + surl, + getStatusMessage(status.getStatusCode()), + status); + } + + public static void printSurlOutcome( + String srmCommand, + Logger log, + TReturnStatus status, + InputData inputData, + TRequestToken token, + TSURL surl) { + + log.info( + "{}: user <{}> operation for token [token:{}] on [SURL: {}] {} with: [status: {}]", + srmCommand, + DataHelper.getRequestor(inputData), + token, + surl, + getStatusMessage(status.getStatusCode()), + status); + } +} diff --git a/src/main/java/it/grid/storm/synchcall/command/DataTransferCommand.java b/src/main/java/it/grid/storm/synchcall/command/DataTransferCommand.java index b0705d0d..f6678656 100644 --- a/src/main/java/it/grid/storm/synchcall/command/DataTransferCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/DataTransferCommand.java @@ -1,10 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; - -public abstract class DataTransferCommand extends AbstractCommand { - -} +public abstract class DataTransferCommand extends AbstractCommand {} diff --git a/src/main/java/it/grid/storm/synchcall/command/DirectoryCommand.java b/src/main/java/it/grid/storm/synchcall/command/DirectoryCommand.java index 5184698c..58c38d44 100644 --- a/src/main/java/it/grid/storm/synchcall/command/DirectoryCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/DirectoryCommand.java @@ -1,10 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; - -public abstract class DirectoryCommand extends AbstractCommand { - -} +public abstract class DirectoryCommand extends AbstractCommand {} diff --git a/src/main/java/it/grid/storm/synchcall/command/DiscoveryCommand.java b/src/main/java/it/grid/storm/synchcall/command/DiscoveryCommand.java index 8be2900f..d667c97d 100644 --- a/src/main/java/it/grid/storm/synchcall/command/DiscoveryCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/DiscoveryCommand.java @@ -1,10 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; - -public abstract class DiscoveryCommand extends AbstractCommand { - -} +public abstract class DiscoveryCommand extends AbstractCommand {} diff --git a/src/main/java/it/grid/storm/synchcall/command/SpaceCommand.java b/src/main/java/it/grid/storm/synchcall/command/SpaceCommand.java index db93a43b..f3269340 100644 --- a/src/main/java/it/grid/storm/synchcall/command/SpaceCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/SpaceCommand.java @@ -1,10 +1,6 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command; - -public abstract class SpaceCommand extends AbstractCommand { - -} +public abstract class SpaceCommand extends AbstractCommand {} diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortExecutorInterface.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortExecutorInterface.java index 412db86f..64b16786 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortExecutorInterface.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortExecutorInterface.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the Abort Executor Interface for the SRM request Abort* - * + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 @@ -17,6 +16,5 @@ public interface AbortExecutorInterface { - public AbortGeneralOutputData doIt(AbortInputData inputData); - + public AbortGeneralOutputData doIt(AbortInputData inputData); } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortFilesCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortFilesCommand.java index e85e987c..77689534 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortFilesCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortFilesCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * @author Magnoni Luca @@ -10,6 +9,10 @@ */ package it.grid.storm.synchcall.command.datatransfer; +import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; +import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; + import it.grid.storm.asynch.AdvancedPicker; import it.grid.storm.authz.AuthzException; import it.grid.storm.catalogs.RequestSummaryCatalog; @@ -33,11 +36,6 @@ import it.grid.storm.synchcall.data.datatransfer.AbortInputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - -import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; -import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,8 +43,7 @@ public class AbortFilesCommand extends DataTransferCommand implements Command { private static final String SRM_COMMAND = "srmAbortFiles"; - private static final Logger log = LoggerFactory - .getLogger(AbortFilesCommand.class); + private static final Logger log = LoggerFactory.getLogger(AbortFilesCommand.class); private AdvancedPicker advancedPicker; private AbortExecutorInterface executor; @@ -64,61 +61,62 @@ public OutputData execute(InputData data) { AbortFilesCommand.log.debug("Started AbortRequest function."); if (inputData == null - || inputData.getRequestToken() == null - || (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) && inputData - .getArrayOfSURLs() == null)) { + || inputData.getRequestToken() == null + || (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) + && inputData.getArrayOfSURLs() == null)) { log.debug("srmAbortFiles: Invalid input parameter specified"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Missing mandatory parameters"); + globalStatus = + new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Missing mandatory parameters"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.error("srmAbortFiles: <> Request for [token:] [SURL:] failed with " - + "[status: {}]", globalStatus); + log.error( + "srmAbortFiles: <> Request for [token:] [SURL:] failed with " + "[status: {}]", + globalStatus); return outputData; } - /********************************** Start to manage the request ***********************************/ + /** + * ******************************** Start to manage the request + * ********************************** + */ /** * We can identify 3 different phases of execution: - * - * 1) Look for the request into the pending DB table, in such case the - * request is still in SRM_QUEUED status and the AbortRequest can be - * satisfied simply removing the request from the pending table, updating - * the request status to SRM_ABORTED and copying it into the appropriate + * + *

1) Look for the request into the pending DB table, in such case the request is still in + * SRM_QUEUED status and the AbortRequest can be satisfied simply removing the request from the + * pending table, updating the request status to SRM_ABORTED and copying it into the appropriate * table. - * - * 2) If we are not in the first case, look for the request into the - * scheduler internal structures. If the request is found and removed, the - * request status into the appropriate table should be updated to - * SRM_ABORTED. - * - * 3) In this case the request to abort is under execution. The behaviour is - * different depending on the request type. For the SrmPrepareToPut and - * SrmPrepareToGet, we decide to wait until the ending of execution, and - * then perform a rollback and mark the request as SRM_ABORTED. In case of - * SrmCopy, we need to stop the Copy execution so the dedicated + * + *

2) If we are not in the first case, look for the request into the scheduler internal + * structures. If the request is found and removed, the request status into the appropriate + * table should be updated to SRM_ABORTED. + * + *

3) In this case the request to abort is under execution. The behaviour is different + * depending on the request type. For the SrmPrepareToPut and SrmPrepareToGet, we decide to wait + * until the ending of execution, and then perform a rollback and mark the request as + * SRM_ABORTED. In case of SrmCopy, we need to stop the Copy execution so the dedicated * AbortExecutor invoke an appropriate abort method. - * */ - TRequestToken requestToken = inputData.getRequestToken(); ArrayOfSURLs surlArray = inputData.getArrayOfSURLs(); AbortFilesCommand.log.debug("srmAbortFiles: requestToken={}", requestToken); - /****************************** PHASE (1) LOOKING INTO PENDING DB AND ADVANCED PICKER ***************************/ + /** + * **************************** PHASE (1) LOOKING INTO PENDING DB AND ADVANCED PICKER + * ************************** + */ /** - * Note: If a global request if found to be be in SRM_QUEUED status in the - * SummaryCatalog it means that both the global status and each chunk are - * still in SRM_QUEUED. There is not the possibility of partial execution, - * to abort it is sufficient transit both global status and each chunk in - * SRM_ABORTED. + * Note: If a global request if found to be be in SRM_QUEUED status in the SummaryCatalog it + * means that both the global status and each chunk are still in SRM_QUEUED. There is not the + * possibility of partial execution, to abort it is sufficient transit both global status and + * each chunk in SRM_ABORTED. */ if (inputData.getType().equals(AbortInputData.AbortType.ABORT_REQUEST)) { @@ -126,9 +124,8 @@ public OutputData execute(InputData data) { GridUserInterface user = getUserFromInputData(inputData); - SURLStatusManager manager = SURLStatusManagerFactory - .newSURLStatusManager(); - + SURLStatusManager manager = SURLStatusManagerFactory.newSURLStatusManager(); + boolean hasErrors = false; try { @@ -137,8 +134,7 @@ public OutputData execute(InputData data) { } catch (UnknownTokenException e) { hasErrors = true; - log.info("Unable to update surls status on token {}: {}", requestToken, - e.getMessage(), e); + log.info("Unable to update surls status on token {}: {}", requestToken, e.getMessage(), e); globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, "Invalid request token"); } catch (ExpiredTokenException e) { @@ -146,34 +142,37 @@ public OutputData execute(InputData data) { hasErrors = true; log.info("Expired token: {}. {}", requestToken, e.getMessage(), e); globalStatus = new TReturnStatus(SRM_REQUEST_TIMED_OUT, "Request expired"); - + } catch (AuthzException e) { hasErrors = true; log.info("Authorization error: {}", e.getMessage()); globalStatus = new TReturnStatus(SRM_AUTHORIZATION_FAILURE, e.getMessage()); - + } catch (IllegalArgumentException e) { - + hasErrors = true; log.info("Invalid request error: {}", e.getMessage()); globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, e.getMessage()); - + } finally { - + if (hasErrors) { - + outputData.setArrayOfFileStatuses(null); outputData.setReturnStatus(globalStatus); - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken()); return outputData; } } - RequestSummaryCatalog.getInstance().updateFromPreviousGlobalStatus( - requestToken, TStatusCode.SRM_REQUEST_QUEUED, TStatusCode.SRM_ABORTED, - "User aborted request!"); + RequestSummaryCatalog.getInstance() + .updateFromPreviousGlobalStatus( + requestToken, + TStatusCode.SRM_REQUEST_QUEUED, + TStatusCode.SRM_ABORTED, + "User aborted request!"); res = false; } else if (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES)) { @@ -188,8 +187,7 @@ public OutputData execute(InputData data) { arrayOfTSURLReturnStatus = new ArrayOfTSURLReturnStatus(); - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "Abort sucessfully completed."); + globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, "Abort sucessfully completed."); outputData.setReturnStatus(globalStatus); if (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES)) { @@ -197,38 +195,41 @@ public OutputData execute(InputData data) { TSURLReturnStatus surlRetStatus = new TSURLReturnStatus(); surlRetStatus.setSurl(surlArray.getTSURL(i)); - surlRetStatus.setStatus(new TReturnStatus(TStatusCode.SRM_SUCCESS, - "File request aborted.")); + surlRetStatus.setStatus( + new TReturnStatus(TStatusCode.SRM_SUCCESS, "File request aborted.")); - CommandHelper.printSurlOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken(), surlArray.getTSURL(i)); + CommandHelper.printSurlOutcome( + SRM_COMMAND, + log, + globalStatus, + inputData, + inputData.getRequestToken(), + surlArray.getTSURL(i)); arrayOfTSURLReturnStatus.addTSurlReturnStatus(surlRetStatus); } - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken(), surlArray); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken(), surlArray); } else { outputData.setArrayOfFileStatuses(null); - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken()); } return outputData; } - /******** Phase 1.B Look in the AdvancedPicker ************/ + /** ****** Phase 1.B Look in the AdvancedPicker *********** */ /** - * Note: There is the possibility that the global request status is changed - * in SRM_IN_PROGESS but each chunk is not really yet executed, (each chunk - * status is still in SRM_QUEUED). The only component able to manage this - * situation is the advanced picker. There is not the possibility of partial - * execution, to abort it is sufficient ask to advancePicker to transit both - * global status and each chunk in SRM_ABORTED. + * Note: There is the possibility that the global request status is changed in SRM_IN_PROGESS + * but each chunk is not really yet executed, (each chunk status is still in SRM_QUEUED). The + * only component able to manage this situation is the advanced picker. There is not the + * possibility of partial execution, to abort it is sufficient ask to advancePicker to transit + * both global status and each chunk in SRM_ABORTED. */ - if (inputData.getType().equals(AbortInputData.AbortType.ABORT_REQUEST)) { log.debug("Phase (1.B) AbortRequest: SurlArray Not specified."); advancedPicker.abortRequest(inputData.getRequestToken()); @@ -245,41 +246,40 @@ public OutputData execute(InputData data) { arrayOfTSURLReturnStatus = new ArrayOfTSURLReturnStatus(); - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "Abort sucessfully completed."); + globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, "Abort sucessfully completed."); outputData.setReturnStatus(globalStatus); if (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES)) { for (int i = 0; i < surlArray.size(); i++) { TSURLReturnStatus surlRetStatus = new TSURLReturnStatus(); surlRetStatus.setSurl(surlArray.getTSURL(i)); - surlRetStatus.setStatus(new TReturnStatus(TStatusCode.SRM_SUCCESS, - "File request aborted.")); + surlRetStatus.setStatus( + new TReturnStatus(TStatusCode.SRM_SUCCESS, "File request aborted.")); - CommandHelper.printSurlOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken(), surlArray.getTSURL(i)); + CommandHelper.printSurlOutcome( + SRM_COMMAND, + log, + globalStatus, + inputData, + inputData.getRequestToken(), + surlArray.getTSURL(i)); arrayOfTSURLReturnStatus.addTSurlReturnStatus(surlRetStatus); } - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken(), surlArray); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken(), surlArray); } else { outputData.setArrayOfFileStatuses(null); - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, - inputData, inputData.getRequestToken()); - + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken()); } return outputData; } - - - TRequestType rtype = RequestSummaryCatalog.getInstance().typeOf( - requestToken); - + TRequestType rtype = RequestSummaryCatalog.getInstance().typeOf(requestToken); if (rtype == TRequestType.PREPARE_TO_GET) { executor = new PtGAbortExecutor(); @@ -291,11 +291,13 @@ public OutputData execute(InputData data) { } else { log.debug("srmAbortFiles : Invalid input parameter specified"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token. Abort only works for PtG and PtP."); + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, + "Invalid request token. Abort only works for PtG and PtP."); - CommandHelper.printRequestOutcome(SRM_COMMAND, log, globalStatus, inputData, - inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, globalStatus, inputData, inputData.getRequestToken()); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); @@ -303,5 +305,4 @@ public OutputData execute(InputData data) { } } } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortRequestCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortRequestCommand.java index dc82e87a..2bde693e 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortRequestCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/AbortRequestCommand.java @@ -1,9 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; +import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; +import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; +import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_QUEUED; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; +import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; + import it.grid.storm.asynch.AdvancedPicker; import it.grid.storm.authz.AuthzException; import it.grid.storm.catalogs.RequestSummaryCatalog; @@ -23,42 +29,25 @@ import it.grid.storm.synchcall.data.datatransfer.AbortRequestOutputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - -import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; -import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; -import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_QUEUED; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; -import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * - * Authors: - * + * + *

Authors: + * * @author lucamag luca.magnoniATcnaf.infn.it - * * @date = Oct 10, 2008 - * */ - public class AbortRequestCommand extends DataTransferCommand implements Command { - private static final Logger log = LoggerFactory - .getLogger(AbortRequestCommand.class); + private static final Logger log = LoggerFactory.getLogger(AbortRequestCommand.class); private AdvancedPicker advancedPicker = null; private AbortExecutorInterface executor = null; - public AbortRequestCommand() { - - } + public AbortRequestCommand() {} public OutputData execute(InputData data) { @@ -71,16 +60,16 @@ public OutputData execute(InputData data) { AbortRequestCommand.log.debug("Started AbortRequest function."); - if (inputData == null || inputData.getRequestToken() == null - || (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES))) { + if (inputData == null + || inputData.getRequestToken() == null + || (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES))) { log.debug("SrmAbortRequest: Invalid input parameter specified"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Missing mandatory parameters"); + globalStatus = + new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Missing mandatory parameters"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log - .error( + log.error( "srmAbortRequest: <> Request for [token:] [SURL:] failed with [status: {}]", globalStatus); return outputData; @@ -91,12 +80,10 @@ public OutputData execute(InputData data) { if (inputData.getType().equals(AbortInputData.AbortType.ABORT_REQUEST)) { - AbortRequestCommand.log - .debug("Phase (1.A) AbortRequest: SurlArray Not specified."); + AbortRequestCommand.log.debug("Phase (1.A) AbortRequest: SurlArray Not specified."); GridUserInterface user = getUserFromInputData(inputData); - SURLStatusManager manager = SURLStatusManagerFactory - .newSURLStatusManager(); + SURLStatusManager manager = SURLStatusManagerFactory.newSURLStatusManager(); boolean hasErrors = false; try { @@ -125,26 +112,29 @@ public OutputData execute(InputData data) { globalStatus = new TReturnStatus(SRM_AUTHORIZATION_FAILURE, e.getMessage()); } catch (IllegalArgumentException e) { - + hasErrors = true; log.info("Invalid request error: {}", e.getMessage()); globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, e.getMessage()); - + } finally { - + if (hasErrors) { - + outputData.setArrayOfFileStatuses(null); outputData.setReturnStatus(globalStatus); - CommandHelper.printRequestOutcome("srmAbortRequest", log, globalStatus, - inputData, inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + "srmAbortRequest", log, globalStatus, inputData, inputData.getRequestToken()); return outputData; } } RequestSummaryCatalog.getInstance() - .updateFromPreviousGlobalStatus(inputData.getRequestToken(), SRM_REQUEST_QUEUED, - SRM_ABORTED, "User aborted request!"); + .updateFromPreviousGlobalStatus( + inputData.getRequestToken(), + SRM_REQUEST_QUEUED, + SRM_ABORTED, + "User aborted request!"); res = false; @@ -173,16 +163,14 @@ public OutputData execute(InputData data) { log.debug("Phase (1.B) AbortRequest: Token not found."); } - - TRequestType rtype = RequestSummaryCatalog.getInstance().typeOf( - requestToken); + TRequestType rtype = RequestSummaryCatalog.getInstance().typeOf(requestToken); if (rtype == TRequestType.EMPTY) { globalStatus = new TReturnStatus(SRM_SUCCESS, "Request aborted succesfully"); - CommandHelper.printRequestOutcome("srmAbortRequest", log, globalStatus, - inputData, inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + "srmAbortRequest", log, globalStatus, inputData, inputData.getRequestToken()); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); @@ -197,16 +185,16 @@ public OutputData execute(InputData data) { } else { log.debug("SrmAbortRequest : Invalid input parameter specified"); - globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, - "Invalid request token. Abort only works for PtG and PtP."); + globalStatus = + new TReturnStatus( + SRM_INVALID_REQUEST, "Invalid request token. Abort only works for PtG and PtP."); - CommandHelper.printRequestOutcome("srmAbortRequest", log, globalStatus, - inputData, inputData.getRequestToken()); + CommandHelper.printRequestOutcome( + "srmAbortRequest", log, globalStatus, inputData, inputData.getRequestToken()); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); return outputData; } } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/CommandException.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/CommandException.java index 9c281d79..376e3786 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/CommandException.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/CommandException.java @@ -1,37 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class CommandException extends Exception { - /** - * - */ - private static final long serialVersionUID = -2644088500951303729L; - - public CommandException() { - - } + /** */ + private static final long serialVersionUID = -2644088500951303729L; - public CommandException(String message) { + public CommandException() {} - super(message); - } + public CommandException(String message) { - public CommandException(Throwable cause) { + super(message); + } - super(cause); - } + public CommandException(Throwable cause) { - public CommandException(String message, Throwable cause) { + super(cause); + } - super(message, cause); - } + public CommandException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/ExtendFileLifeTimeCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/ExtendFileLifeTimeCommand.java index 99f9c11f..8a051ed9 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/ExtendFileLifeTimeCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/ExtendFileLifeTimeCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -36,586 +35,630 @@ import it.grid.storm.synchcall.data.datatransfer.IdentityExtendFileLifeTimeInputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - import java.util.Calendar; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * Authors: - * + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it * @author Alberto Forti * @date = Oct 10, 2008 */ -public class ExtendFileLifeTimeCommand extends DataTransferCommand implements - Command { - - private static final Logger log = LoggerFactory - .getLogger(ExtendFileLifeTimeCommand.class); - private static final String SRM_COMMAND = "srmExtendFileLifeTime"; - - public ExtendFileLifeTimeCommand() { - - }; - - /** - * Executes an srmExtendFileLifeTime(). - * - * @param inputData - * ExtendFileLifeTimeInputData - * @return ExtendFileLifeTimeOutputData - */ - - public OutputData execute(InputData data) { - - final String funcName = "ExtendFileLifeTime: "; - ExtendFileLifeTimeOutputData outputData = new ExtendFileLifeTimeOutputData(); - IdentityExtendFileLifeTimeInputData inputData; - if (data instanceof IdentityInputData) { - inputData = (IdentityExtendFileLifeTimeInputData) data; - } else { - outputData.setReturnStatus(CommandHelper.buildStatus( - TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" - + SRM_COMMAND)); - outputData.setArrayOfFileStatuses(null); - printRequestOutcome(outputData.getReturnStatus(), - (ExtendFileLifeTimeInputData) data); - return outputData; - } - - TReturnStatus globalStatus = null; - - ExtendFileLifeTimeCommand.log.debug(funcName + "Started."); - - /****************************** Check for malformed request ******************************/ - if (inputData.getArrayOfSURLs() == null) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Missing mandatory parameter 'arrayOfSURLs'"); - } else if (inputData.getArrayOfSURLs().size() < 1) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Parameter 'arrayOfSURLs': invalid size"); - } else if (!(inputData.getNewPinLifetime().isEmpty()) - && !(inputData.getNewFileLifetime().isEmpty()) - && (inputData.getRequestToken() != null)) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Cannot update both FileLifetime and PinLifetime"); - } else if (inputData.getNewPinLifetime().isEmpty() - && !(inputData.getNewFileLifetime().isEmpty()) - && (inputData.getRequestToken() != null)) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Do not specify the request token to update the FileLifetime"); - } else if (!(inputData.getNewPinLifetime().isEmpty()) - && !(inputData.getNewFileLifetime().isEmpty()) - && (inputData.getRequestToken() == null)) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Attempt to extend PinLifetime without request token"); - } else if (!(inputData.getNewPinLifetime().isEmpty()) - && inputData.getNewFileLifetime().isEmpty() - && (inputData.getRequestToken() == null)) { - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Attempt to extend PinLifetime without request token"); - } - - if (globalStatus != null) { - ExtendFileLifeTimeCommand.log.debug(funcName - + globalStatus.getExplanation()); - outputData.setReturnStatus(globalStatus); - outputData.setArrayOfFileStatuses(null); - printRequestOutcome(outputData.getReturnStatus(), inputData); - return outputData; - } - - /********************** Check user authentication and authorization ******************************/ - GridUserInterface user = inputData.getUser(); - if (user == null) { - ExtendFileLifeTimeCommand.log.debug(funcName + "The user field is NULL"); - outputData.setReturnStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHENTICATION_FAILURE, - "Unable to get user credential!")); - printRequestOutcome(outputData.getReturnStatus(), inputData); - outputData.setArrayOfFileStatuses(null); - return outputData; - } - - /********************************** Start to manage the request ***********************************/ - ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatus = new ArrayOfTSURLLifetimeReturnStatus(); - - if ((inputData.getRequestToken() == null) - && (inputData.getNewPinLifetime().isEmpty())) { - log.debug(funcName + "Extending SURL lifetime..."); - globalStatus = manageExtendSURLLifetime(inputData.getNewFileLifetime(), - inputData.getArrayOfSURLs(), user, arrayOfFileStatus, - inputData.getRequestToken()); - } else { - log.debug(funcName + "Extending PIN lifetime..."); - try { - globalStatus = manageExtendPinLifetime(inputData.getRequestToken(), - inputData.getNewPinLifetime(), inputData.getArrayOfSURLs(), user, - arrayOfFileStatus); - } catch (IllegalArgumentException e) { - log.error(funcName + "Unexpected IllegalArgumentException: " - + e.getMessage()); - globalStatus = CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, "Request Failed, retry."); - outputData.setReturnStatus(globalStatus); - outputData.setArrayOfFileStatuses(null); - printRequestOutcome(outputData.getReturnStatus(), inputData); - return outputData; - } - } - - outputData.setReturnStatus(globalStatus); - outputData.setArrayOfFileStatuses(arrayOfFileStatus); - printRequestOutcome(outputData.getReturnStatus(), inputData); - log.debug(funcName + "Finished."); - - return outputData; - } - - /** - * Extend the lifetime of a SURL. The parameter details is filled by this - * method and contains file level information on the execution of the request. - * - * @param newLifetime - * TLifeTimeInSeconds. - * @param arrayOfSURLS - * ArrayOfSURLs. - * @param guser - * VomsGridUser. - * @param arrayOfFileLifetimeStatus - * . ArrayOfTSURLLifetimeReturnStatus The returned file level - * information. - * @return TReturnStatus. The request status. - */ - private TReturnStatus manageExtendSURLLifetime( - TLifeTimeInSeconds newLifetime, ArrayOfSURLs arrayOfSURLS, - GridUserInterface guser, ArrayOfTSURLLifetimeReturnStatus details, - TRequestToken requestToken) { - - if (details == null) { - ExtendFileLifeTimeCommand.log - .debug("Function manageExtendSURLLifetime, class ExtendFileLifeTimeExecutor: parameter details is NULL"); - } - NamespaceInterface namespace = NamespaceDirector.getNamespace(); - VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog.getInstance(); - boolean requestSuccess = true; - boolean requestFailure = true; - - // For each requested SURL, try to extend its lifetime. - for (int i = 0; i < arrayOfSURLS.size(); i++) { - TSURL surl = arrayOfSURLS.getTSURL(i); - StoRI stori = null; - TStatusCode fileStatusCode; - String fileStatusExplanation; - try { - try { - stori = namespace.resolveStoRIbySURL(surl, guser); - } catch (IllegalArgumentException e) { - ExtendFileLifeTimeCommand.log.error( - "Unable to build StoRI by SURL and user", e); - fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; - fileStatusExplanation = "Unable to build StoRI by SURL and user"; - } catch (UnapprochableSurlException e) { - log.info("Unable to build a stori for surl " + surl + " for user " - + guser + " UnapprochableSurlException: " + e.getMessage()); - fileStatusCode = TStatusCode.SRM_AUTHORIZATION_FAILURE; - fileStatusExplanation = e.getMessage(); - } catch (NamespaceException e) { - log.info("Unable to build a stori for surl " + surl + " for user " - + guser + " NamespaceException: " + e.getMessage()); - fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; - fileStatusExplanation = e.getMessage(); - } catch (InvalidSURLException e) { - log.info("Unable to build a stori for surl " + surl + " for user " - + guser + " InvalidSURLException: " + e.getMessage()); - fileStatusCode = TStatusCode.SRM_INVALID_PATH; - fileStatusExplanation = e.getMessage(); - } - if (stori != null) { - LocalFile localFile = stori.getLocalFile(); - if (localFile.exists()) { - ExtendFileLifeTimeCommand.log.debug(stori.getPFN().toString()); - List volatileInfo = catalog.volatileInfoOn(stori.getPFN()); - if (volatileInfo.isEmpty()) { - fileStatusCode = TStatusCode.SRM_SUCCESS; - fileStatusExplanation = "Nothing to do, SURL is permanent"; - newLifetime = TLifeTimeInSeconds.makeInfinite(); - requestFailure = false; - } else if (volatileInfo.size() > 2) { - fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; - fileStatusExplanation = "Found more than one entry.... that's a BUG."; - // For lifetimes infinite means also unknown - newLifetime = TLifeTimeInSeconds.makeInfinite(); - requestSuccess = false; - } else if (isStoRISURLBusy(stori)) { - fileStatusCode = TStatusCode.SRM_FILE_BUSY; - fileStatusExplanation = "File status is SRM_SPACE_AVAILABLE. SURL lifetime cannot be extend (try with PIN lifetime)"; - // For lifetimes infinite means also unknown - newLifetime = TLifeTimeInSeconds.makeInfinite(); - requestSuccess = false; - } else { // Ok, extend the lifetime of the SURL - // Update the DB with the new lifetime - catalog.trackVolatile(stori.getPFN(), - (Calendar) volatileInfo.get(0), newLifetime); - // TODO: return the correct lifetime, i.e. the one which is - // written to the DB. - // TLifeTimeInSeconds writtenLifetime = (TLifeTimeInSeconds) - // volatileInfo.get(1); - - fileStatusCode = TStatusCode.SRM_SUCCESS; - fileStatusExplanation = "Lifetime extended"; - requestFailure = false; - } - } else { // Requested SURL does not exists in the filesystem - fileStatusCode = TStatusCode.SRM_INVALID_PATH; - fileStatusExplanation = "File does not exist"; - requestSuccess = false; - } - - // Set the file level information to be returned. - TReturnStatus fileStatus = new TReturnStatus(fileStatusCode, - fileStatusExplanation); - if (fileStatus.getStatusCode().equals(TStatusCode.SRM_SUCCESS)) { - ExtendFileLifeTimeCommand.log.info("srmExtendFileLifeTime: <" - + guser + "> Request for [token:" + requestToken + "] for [SURL:" - + surl + "] with [lifetime:" + newLifetime - + " ] successfully done with: [status:" + fileStatus + "]"); - } else { - ExtendFileLifeTimeCommand.log.error("srmExtendFileLifeTime: <" - + guser + "> Request for [token:" + requestToken + "] for [SURL:" - + surl + "] with [lifetime:" + newLifetime - + "] failed with: [status:" + fileStatus + "]"); - } - TSURLLifetimeReturnStatus lifetimeReturnStatus = new TSURLLifetimeReturnStatus( - surl, fileStatus, newLifetime, null); - details.addTSurlReturnStatus(lifetimeReturnStatus); - } - } catch (InvalidTSURLLifetimeReturnStatusAttributeException e3) { - ExtendFileLifeTimeCommand.log - .debug("Thrown InvalidTSURLLifetimeReturnStatusAttributeException"); - } - } - TReturnStatus globalStatus = null; - // Set global status - if (requestFailure) { - globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, - "All file requests are failed"); - } else if (requestSuccess) { - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "All file requests are successfully completed"); - } else { - globalStatus = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Details are on the file statuses"); - } - return globalStatus; - } - - /** - * Returns true if the status of the SURL of the received StoRI is - * SRM_SPACE_AVAILABLE, false otherwise. This method queries the DB, therefore - * pay attention to possible performance issues. - * - * @return boolean - */ - private boolean isStoRISURLBusy(StoRI element) { - - SURLStatusManager checker = SURLStatusManagerFactory - .newSURLStatusManager(); - - return checker.isSURLBusy(element.getSURL()); - } - - /** - * Extend the PIN lifetime of a SURL. The parameter details is filled by this - * method and contains file level information on the execution of the request. - * - * @param requestToken - * TRequestToken. - * @param newPINLifetime - * TLifeTimeInSeconds. - * @param arrayOfSURLS - * ArrayOfSURLs. - * @param guser - * VomsGridUser. - * @param details - * ArrayOfTSURLLifetimeReturnStatus. - * @return TReturnStatus. The request status. - * @throws UnknownTokenException - * @throws IllegalArgumentException - */ - private TReturnStatus manageExtendPinLifetime(TRequestToken requestToken, - TLifeTimeInSeconds newPINLifetime, ArrayOfSURLs arrayOfSURLS, - GridUserInterface guser, ArrayOfTSURLLifetimeReturnStatus details) - throws IllegalArgumentException { - - if (details == null) { - ExtendFileLifeTimeCommand.log - .debug("Function manageExtendSURLLifetime, class ExtendFileLifeTimeExecutor: parameter details is NULL"); - } - TReturnStatus globalStatus = null; - List requestSURLsList; - try { - requestSURLsList = getListOfSURLsInTheRequest(guser, requestToken); - } catch (UnknownTokenException e4) { - return CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token"); - } catch (ExpiredTokenException e) { - return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, - "Request expired"); - } catch (AuthzException e) { - return CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - e.getMessage()); - } - if (requestSURLsList.isEmpty()) { - return CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token"); - } - // Once we have the list of SURLs belonging to the request, we must check - // that the SURLs given by the user are consistent, that the resulting - // lifetime could be lower than the one requested (and for this we must read - // the Volatile table of the DB), that the SURLs are not released, aborted, - // expired or suspended and so on... therefore the purpose of all that stuff - // is to return the right information. I mean, no PIN lifetime is - // effectively extend, in StoRM the TURL corresponds to the SURL. - boolean requestSuccess = true; - boolean requestFailure = true; - TLifeTimeInSeconds PINLifetime; - TLifeTimeInSeconds dbLifetime = null; - for (int i = 0; i < arrayOfSURLS.size(); i++) { - TSURL surl = arrayOfSURLS.getTSURL(i); - TStatusCode statusOfTheSURL = null; - TStatusCode fileStatusCode; - String fileStatusExplanation; - boolean surlFound = false; - // Check if the current SURL belongs to the request token - for (int j = 0; j < requestSURLsList.size(); j++) { - SURLData surlData = (SURLData) requestSURLsList.get(j); - if (surl.equals(surlData.surl)) { - statusOfTheSURL = surlData.statusCode; - requestSURLsList.remove(j); - surlFound = true; - break; - } - } - try { - if (surlFound) { - ExtendFileLifeTimeCommand.log.debug("Found SURL: " - + surl.getSURLString() + " (status: " + statusOfTheSURL.toString() - + ")"); - NamespaceInterface namespace = NamespaceDirector.getNamespace(); - StoRI stori = null; - try { - stori = namespace.resolveStoRIbySURL(surl, guser); - } catch (IllegalArgumentException e) { - log.error("Unable to build StoRI by SURL and user", e); - } catch (Exception e) { - log.info(String.format( - "Unable to build a stori for surl %s for user %s, %s: %s", surl, - guser, e.getClass().getCanonicalName(), e.getMessage())); - } - if (stori != null) { - LocalFile localFile = stori.getLocalFile(); - if (localFile.exists()) { - VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog - .getInstance(); - List volatileInfo = catalog.volatileInfoOn(stori.getPFN()); - - if ((statusOfTheSURL != TStatusCode.SRM_FILE_PINNED) - && (statusOfTheSURL != TStatusCode.SRM_SPACE_AVAILABLE) - && (statusOfTheSURL != TStatusCode.SRM_SUCCESS)) - { - fileStatusCode = TStatusCode.SRM_INVALID_REQUEST; - fileStatusExplanation = "No TURL available"; - PINLifetime = null; - requestSuccess = false; - } else if (volatileInfo.size() > 2) { - fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; - fileStatusExplanation = "Found more than one entry.... that's a BUG."; - // For lifetimes infinite means also unknown - PINLifetime = TLifeTimeInSeconds.makeInfinite(); - requestSuccess = false; - } else { // OK, extend the PIN lifetime. - // If the status is success the extension will not take place, - // only in case of empty parameter the current value are - // returned, otherwaise the request must - // fail! - - if ((statusOfTheSURL == TStatusCode.SRM_SUCCESS) - && (!newPINLifetime.isEmpty())) { - - fileStatusCode = TStatusCode.SRM_INVALID_REQUEST; - fileStatusExplanation = "No TURL available"; - PINLifetime = null; - requestSuccess = false; - - } else { - - fileStatusCode = TStatusCode.SRM_SUCCESS; - - if (volatileInfo.isEmpty()) { // SURL is permanent - dbLifetime = TLifeTimeInSeconds.makeInfinite(); - } else { - dbLifetime = (TLifeTimeInSeconds) volatileInfo.get(1); - } - if ((!dbLifetime.isInfinite()) - && (newPINLifetime.value() > dbLifetime.value())) { - PINLifetime = dbLifetime; - fileStatusExplanation = "The requested PIN lifetime is greater than the lifetime of the SURL." - + " PIN lifetime is now equal to the lifetime of the SURL."; - } else { - PINLifetime = newPINLifetime; - fileStatusExplanation = "Lifetime extended"; - } - ExtendFileLifeTimeCommand.log.debug("New PIN lifetime is: " - + PINLifetime.value() + "(SURL: " + surl.getSURLString() - + ")"); - // TODO: update the RequestSummaryCatalog with the new - // pinLifetime - // it is better to do it only once after the for loop - requestFailure = false; - } - } - } else { // file does not exist in the file system - fileStatusCode = TStatusCode.SRM_INVALID_PATH; - fileStatusExplanation = "Invalid path"; - PINLifetime = null; - requestSuccess = false; - - } - } else { - log.error("Unable to build StoRI by SURL and user"); - fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; - fileStatusExplanation = "Unable to build StoRI by SURL and user"; - // For lifetimes infinite means also unknown - PINLifetime = null; - requestSuccess = false; - } - } else { // SURL not found in the DB - ExtendFileLifeTimeCommand.log.debug("SURL: " + surl.getSURLString() - + " NOT FOUND!"); - fileStatusCode = TStatusCode.SRM_INVALID_PATH; - fileStatusExplanation = "SURL not found in the request"; - PINLifetime = null; - requestSuccess = false; - } - // Set the file level information to be returned. - TReturnStatus fileStatus = new TReturnStatus(fileStatusCode, - fileStatusExplanation); - if (fileStatus.getStatusCode().equals(TStatusCode.SRM_SUCCESS)) { - ExtendFileLifeTimeCommand.log.info("srmExtendFileLifeTime: <" + guser - + "> Request for [token:" + requestToken + "] for [SURL:" + surl - + "] with [pinlifetime: " + newPINLifetime - + "] successfully done with: [status:" + fileStatus.toString() - + "]"); - } else { - ExtendFileLifeTimeCommand.log.error("srmExtendFileLifeTime: <" - + guser + "> Request for [token:" + requestToken + "] for [SURL:" - + surl + "] with [pinlifetime: " + newPINLifetime - + "] failed with: [status:" + fileStatus.toString() + "]"); - } - - TSURLLifetimeReturnStatus lifetimeReturnStatus = new TSURLLifetimeReturnStatus( - surl, fileStatus, dbLifetime, PINLifetime); - details.addTSurlReturnStatus(lifetimeReturnStatus); - } catch (InvalidTSURLLifetimeReturnStatusAttributeException e3) { - ExtendFileLifeTimeCommand.log - .debug("Thrown InvalidTSURLLifetimeReturnStatusAttributeException"); - } - } - - // Set global status - if (requestFailure) { - globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, - "All file requests are failed"); - } else if (requestSuccess) { - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "All file requests are successfully completed"); - } else { - globalStatus = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Details are on the file statuses"); - } - return globalStatus; - } - - /** - * Returns the list of SURLs and statuses (a List of SURLData) belonging to - * the request identified by the requestToken. - * - * @param requestToken - * TRequestToken - * @return List - * @throws UnknownTokenException - * @throws IllegalArgumentException - * @throws ExpiredTokenException - */ - private List getListOfSURLsInTheRequest(GridUserInterface user, - TRequestToken requestToken) - throws IllegalArgumentException, UnknownTokenException, - ExpiredTokenException { - - List listOfSURLsInfo = new LinkedList(); - - SURLStatusManager checker = SURLStatusManagerFactory - .newSURLStatusManager(); - - Map surlStatusMap = - checker.getSURLStatuses(user, requestToken); - - if (!(surlStatusMap == null || surlStatusMap.isEmpty())) { - for (Entry surlStatus : surlStatusMap.entrySet()) { - listOfSURLsInfo.add(new SURLData(surlStatus.getKey(), surlStatus - .getValue().getStatusCode())); - } - } - return listOfSURLsInfo; - } - - private void printRequestOutcome(TReturnStatus status, - ExtendFileLifeTimeInputData inputData) { - - if (inputData != null) { - if (inputData.getArrayOfSURLs() != null) { - if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, - inputData, inputData.getRequestToken(), inputData.getArrayOfSURLs() - .asStringList()); - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, - inputData, inputData.getArrayOfSURLs().asStringList()); - } - - } else { - if (inputData.getRequestToken() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, - inputData, inputData.getRequestToken()); - } else { - CommandHelper - .printRequestOutcome(SRM_COMMAND, log, status, inputData); - } - } - - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); - } - } - - private class SURLData { - - public TSURL surl; - public TStatusCode statusCode; - - public SURLData(TSURL surl, TStatusCode statusCode) { - - this.surl = surl; - this.statusCode = statusCode; - } - } +public class ExtendFileLifeTimeCommand extends DataTransferCommand implements Command { + + private static final Logger log = LoggerFactory.getLogger(ExtendFileLifeTimeCommand.class); + private static final String SRM_COMMAND = "srmExtendFileLifeTime"; + + public ExtendFileLifeTimeCommand() {}; + + /** + * Executes an srmExtendFileLifeTime(). + * + * @param inputData ExtendFileLifeTimeInputData + * @return ExtendFileLifeTimeOutputData + */ + public OutputData execute(InputData data) { + + final String funcName = "ExtendFileLifeTime: "; + ExtendFileLifeTimeOutputData outputData = new ExtendFileLifeTimeOutputData(); + IdentityExtendFileLifeTimeInputData inputData; + if (data instanceof IdentityInputData) { + inputData = (IdentityExtendFileLifeTimeInputData) data; + } else { + outputData.setReturnStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" + SRM_COMMAND)); + outputData.setArrayOfFileStatuses(null); + printRequestOutcome(outputData.getReturnStatus(), (ExtendFileLifeTimeInputData) data); + return outputData; + } + + TReturnStatus globalStatus = null; + + ExtendFileLifeTimeCommand.log.debug(funcName + "Started."); + + /** **************************** Check for malformed request ***************************** */ + if (inputData.getArrayOfSURLs() == null) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, "Missing mandatory parameter 'arrayOfSURLs'"); + } else if (inputData.getArrayOfSURLs().size() < 1) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, "Parameter 'arrayOfSURLs': invalid size"); + } else if (!(inputData.getNewPinLifetime().isEmpty()) + && !(inputData.getNewFileLifetime().isEmpty()) + && (inputData.getRequestToken() != null)) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, "Cannot update both FileLifetime and PinLifetime"); + } else if (inputData.getNewPinLifetime().isEmpty() + && !(inputData.getNewFileLifetime().isEmpty()) + && (inputData.getRequestToken() != null)) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, + "Do not specify the request token to update the FileLifetime"); + } else if (!(inputData.getNewPinLifetime().isEmpty()) + && !(inputData.getNewFileLifetime().isEmpty()) + && (inputData.getRequestToken() == null)) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, + "Attempt to extend PinLifetime without request token"); + } else if (!(inputData.getNewPinLifetime().isEmpty()) + && inputData.getNewFileLifetime().isEmpty() + && (inputData.getRequestToken() == null)) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, + "Attempt to extend PinLifetime without request token"); + } + + if (globalStatus != null) { + ExtendFileLifeTimeCommand.log.debug(funcName + globalStatus.getExplanation()); + outputData.setReturnStatus(globalStatus); + outputData.setArrayOfFileStatuses(null); + printRequestOutcome(outputData.getReturnStatus(), inputData); + return outputData; + } + + /** + * ******************** Check user authentication and authorization + * ***************************** + */ + GridUserInterface user = inputData.getUser(); + if (user == null) { + ExtendFileLifeTimeCommand.log.debug(funcName + "The user field is NULL"); + outputData.setReturnStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_AUTHENTICATION_FAILURE, "Unable to get user credential!")); + printRequestOutcome(outputData.getReturnStatus(), inputData); + outputData.setArrayOfFileStatuses(null); + return outputData; + } + + /** + * ******************************** Start to manage the request + * ********************************** + */ + ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatus = new ArrayOfTSURLLifetimeReturnStatus(); + + if ((inputData.getRequestToken() == null) && (inputData.getNewPinLifetime().isEmpty())) { + log.debug(funcName + "Extending SURL lifetime..."); + globalStatus = + manageExtendSURLLifetime( + inputData.getNewFileLifetime(), + inputData.getArrayOfSURLs(), + user, + arrayOfFileStatus, + inputData.getRequestToken()); + } else { + log.debug(funcName + "Extending PIN lifetime..."); + try { + globalStatus = + manageExtendPinLifetime( + inputData.getRequestToken(), + inputData.getNewPinLifetime(), + inputData.getArrayOfSURLs(), + user, + arrayOfFileStatus); + } catch (IllegalArgumentException e) { + log.error(funcName + "Unexpected IllegalArgumentException: " + e.getMessage()); + globalStatus = + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "Request Failed, retry."); + outputData.setReturnStatus(globalStatus); + outputData.setArrayOfFileStatuses(null); + printRequestOutcome(outputData.getReturnStatus(), inputData); + return outputData; + } + } + + outputData.setReturnStatus(globalStatus); + outputData.setArrayOfFileStatuses(arrayOfFileStatus); + printRequestOutcome(outputData.getReturnStatus(), inputData); + log.debug(funcName + "Finished."); + + return outputData; + } + + /** + * Extend the lifetime of a SURL. The parameter details is filled by this method and contains file + * level information on the execution of the request. + * + * @param newLifetime TLifeTimeInSeconds. + * @param arrayOfSURLS ArrayOfSURLs. + * @param guser VomsGridUser. + * @param arrayOfFileLifetimeStatus . ArrayOfTSURLLifetimeReturnStatus The returned file level + * information. + * @return TReturnStatus. The request status. + */ + private TReturnStatus manageExtendSURLLifetime( + TLifeTimeInSeconds newLifetime, + ArrayOfSURLs arrayOfSURLS, + GridUserInterface guser, + ArrayOfTSURLLifetimeReturnStatus details, + TRequestToken requestToken) { + + if (details == null) { + ExtendFileLifeTimeCommand.log.debug( + "Function manageExtendSURLLifetime, class ExtendFileLifeTimeExecutor: parameter details is NULL"); + } + NamespaceInterface namespace = NamespaceDirector.getNamespace(); + VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog.getInstance(); + boolean requestSuccess = true; + boolean requestFailure = true; + + // For each requested SURL, try to extend its lifetime. + for (int i = 0; i < arrayOfSURLS.size(); i++) { + TSURL surl = arrayOfSURLS.getTSURL(i); + StoRI stori = null; + TStatusCode fileStatusCode; + String fileStatusExplanation; + try { + try { + stori = namespace.resolveStoRIbySURL(surl, guser); + } catch (IllegalArgumentException e) { + ExtendFileLifeTimeCommand.log.error("Unable to build StoRI by SURL and user", e); + fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; + fileStatusExplanation = "Unable to build StoRI by SURL and user"; + } catch (UnapprochableSurlException e) { + log.info( + "Unable to build a stori for surl " + + surl + + " for user " + + guser + + " UnapprochableSurlException: " + + e.getMessage()); + fileStatusCode = TStatusCode.SRM_AUTHORIZATION_FAILURE; + fileStatusExplanation = e.getMessage(); + } catch (NamespaceException e) { + log.info( + "Unable to build a stori for surl " + + surl + + " for user " + + guser + + " NamespaceException: " + + e.getMessage()); + fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; + fileStatusExplanation = e.getMessage(); + } catch (InvalidSURLException e) { + log.info( + "Unable to build a stori for surl " + + surl + + " for user " + + guser + + " InvalidSURLException: " + + e.getMessage()); + fileStatusCode = TStatusCode.SRM_INVALID_PATH; + fileStatusExplanation = e.getMessage(); + } + if (stori != null) { + LocalFile localFile = stori.getLocalFile(); + if (localFile.exists()) { + ExtendFileLifeTimeCommand.log.debug(stori.getPFN().toString()); + List volatileInfo = catalog.volatileInfoOn(stori.getPFN()); + if (volatileInfo.isEmpty()) { + fileStatusCode = TStatusCode.SRM_SUCCESS; + fileStatusExplanation = "Nothing to do, SURL is permanent"; + newLifetime = TLifeTimeInSeconds.makeInfinite(); + requestFailure = false; + } else if (volatileInfo.size() > 2) { + fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; + fileStatusExplanation = "Found more than one entry.... that's a BUG."; + // For lifetimes infinite means also unknown + newLifetime = TLifeTimeInSeconds.makeInfinite(); + requestSuccess = false; + } else if (isStoRISURLBusy(stori)) { + fileStatusCode = TStatusCode.SRM_FILE_BUSY; + fileStatusExplanation = + "File status is SRM_SPACE_AVAILABLE. SURL lifetime cannot be extend (try with PIN lifetime)"; + // For lifetimes infinite means also unknown + newLifetime = TLifeTimeInSeconds.makeInfinite(); + requestSuccess = false; + } else { // Ok, extend the lifetime of the SURL + // Update the DB with the new lifetime + catalog.trackVolatile(stori.getPFN(), (Calendar) volatileInfo.get(0), newLifetime); + // TODO: return the correct lifetime, i.e. the one which is + // written to the DB. + // TLifeTimeInSeconds writtenLifetime = (TLifeTimeInSeconds) + // volatileInfo.get(1); + + fileStatusCode = TStatusCode.SRM_SUCCESS; + fileStatusExplanation = "Lifetime extended"; + requestFailure = false; + } + } else { // Requested SURL does not exists in the filesystem + fileStatusCode = TStatusCode.SRM_INVALID_PATH; + fileStatusExplanation = "File does not exist"; + requestSuccess = false; + } + + // Set the file level information to be returned. + TReturnStatus fileStatus = new TReturnStatus(fileStatusCode, fileStatusExplanation); + if (fileStatus.getStatusCode().equals(TStatusCode.SRM_SUCCESS)) { + ExtendFileLifeTimeCommand.log.info( + "srmExtendFileLifeTime: <" + + guser + + "> Request for [token:" + + requestToken + + "] for [SURL:" + + surl + + "] with [lifetime:" + + newLifetime + + " ] successfully done with: [status:" + + fileStatus + + "]"); + } else { + ExtendFileLifeTimeCommand.log.error( + "srmExtendFileLifeTime: <" + + guser + + "> Request for [token:" + + requestToken + + "] for [SURL:" + + surl + + "] with [lifetime:" + + newLifetime + + "] failed with: [status:" + + fileStatus + + "]"); + } + TSURLLifetimeReturnStatus lifetimeReturnStatus = + new TSURLLifetimeReturnStatus(surl, fileStatus, newLifetime, null); + details.addTSurlReturnStatus(lifetimeReturnStatus); + } + } catch (InvalidTSURLLifetimeReturnStatusAttributeException e3) { + ExtendFileLifeTimeCommand.log.debug( + "Thrown InvalidTSURLLifetimeReturnStatusAttributeException"); + } + } + TReturnStatus globalStatus = null; + // Set global status + if (requestFailure) { + globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, "All file requests are failed"); + } else if (requestSuccess) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_SUCCESS, "All file requests are successfully completed"); + } else { + globalStatus = + new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, "Details are on the file statuses"); + } + return globalStatus; + } + + /** + * Returns true if the status of the SURL of the received StoRI is SRM_SPACE_AVAILABLE, false + * otherwise. This method queries the DB, therefore pay attention to possible performance issues. + * + * @return boolean + */ + private boolean isStoRISURLBusy(StoRI element) { + + SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); + + return checker.isSURLBusy(element.getSURL()); + } + + /** + * Extend the PIN lifetime of a SURL. The parameter details is filled by this method and contains + * file level information on the execution of the request. + * + * @param requestToken TRequestToken. + * @param newPINLifetime TLifeTimeInSeconds. + * @param arrayOfSURLS ArrayOfSURLs. + * @param guser VomsGridUser. + * @param details ArrayOfTSURLLifetimeReturnStatus. + * @return TReturnStatus. The request status. + * @throws UnknownTokenException + * @throws IllegalArgumentException + */ + private TReturnStatus manageExtendPinLifetime( + TRequestToken requestToken, + TLifeTimeInSeconds newPINLifetime, + ArrayOfSURLs arrayOfSURLS, + GridUserInterface guser, + ArrayOfTSURLLifetimeReturnStatus details) + throws IllegalArgumentException { + + if (details == null) { + ExtendFileLifeTimeCommand.log.debug( + "Function manageExtendSURLLifetime, class ExtendFileLifeTimeExecutor: parameter details is NULL"); + } + TReturnStatus globalStatus = null; + List requestSURLsList; + try { + requestSURLsList = getListOfSURLsInTheRequest(guser, requestToken); + } catch (UnknownTokenException e4) { + return CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); + } catch (ExpiredTokenException e) { + return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, "Request expired"); + } catch (AuthzException e) { + return CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage()); + } + if (requestSURLsList.isEmpty()) { + return CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); + } + // Once we have the list of SURLs belonging to the request, we must check + // that the SURLs given by the user are consistent, that the resulting + // lifetime could be lower than the one requested (and for this we must read + // the Volatile table of the DB), that the SURLs are not released, aborted, + // expired or suspended and so on... therefore the purpose of all that stuff + // is to return the right information. I mean, no PIN lifetime is + // effectively extend, in StoRM the TURL corresponds to the SURL. + boolean requestSuccess = true; + boolean requestFailure = true; + TLifeTimeInSeconds PINLifetime; + TLifeTimeInSeconds dbLifetime = null; + for (int i = 0; i < arrayOfSURLS.size(); i++) { + TSURL surl = arrayOfSURLS.getTSURL(i); + TStatusCode statusOfTheSURL = null; + TStatusCode fileStatusCode; + String fileStatusExplanation; + boolean surlFound = false; + // Check if the current SURL belongs to the request token + for (int j = 0; j < requestSURLsList.size(); j++) { + SURLData surlData = (SURLData) requestSURLsList.get(j); + if (surl.equals(surlData.surl)) { + statusOfTheSURL = surlData.statusCode; + requestSURLsList.remove(j); + surlFound = true; + break; + } + } + try { + if (surlFound) { + ExtendFileLifeTimeCommand.log.debug( + "Found SURL: " + + surl.getSURLString() + + " (status: " + + statusOfTheSURL.toString() + + ")"); + NamespaceInterface namespace = NamespaceDirector.getNamespace(); + StoRI stori = null; + try { + stori = namespace.resolveStoRIbySURL(surl, guser); + } catch (IllegalArgumentException e) { + log.error("Unable to build StoRI by SURL and user", e); + } catch (Exception e) { + log.info( + String.format( + "Unable to build a stori for surl %s for user %s, %s: %s", + surl, guser, e.getClass().getCanonicalName(), e.getMessage())); + } + if (stori != null) { + LocalFile localFile = stori.getLocalFile(); + if (localFile.exists()) { + VolatileAndJiTCatalog catalog = VolatileAndJiTCatalog.getInstance(); + List volatileInfo = catalog.volatileInfoOn(stori.getPFN()); + + if ((statusOfTheSURL != TStatusCode.SRM_FILE_PINNED) + && (statusOfTheSURL != TStatusCode.SRM_SPACE_AVAILABLE) + && (statusOfTheSURL != TStatusCode.SRM_SUCCESS)) { + fileStatusCode = TStatusCode.SRM_INVALID_REQUEST; + fileStatusExplanation = "No TURL available"; + PINLifetime = null; + requestSuccess = false; + } else if (volatileInfo.size() > 2) { + fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; + fileStatusExplanation = "Found more than one entry.... that's a BUG."; + // For lifetimes infinite means also unknown + PINLifetime = TLifeTimeInSeconds.makeInfinite(); + requestSuccess = false; + } else { // OK, extend the PIN lifetime. + // If the status is success the extension will not take place, + // only in case of empty parameter the current value are + // returned, otherwaise the request must + // fail! + + if ((statusOfTheSURL == TStatusCode.SRM_SUCCESS) && (!newPINLifetime.isEmpty())) { + + fileStatusCode = TStatusCode.SRM_INVALID_REQUEST; + fileStatusExplanation = "No TURL available"; + PINLifetime = null; + requestSuccess = false; + + } else { + + fileStatusCode = TStatusCode.SRM_SUCCESS; + + if (volatileInfo.isEmpty()) { // SURL is permanent + dbLifetime = TLifeTimeInSeconds.makeInfinite(); + } else { + dbLifetime = (TLifeTimeInSeconds) volatileInfo.get(1); + } + if ((!dbLifetime.isInfinite()) && (newPINLifetime.value() > dbLifetime.value())) { + PINLifetime = dbLifetime; + fileStatusExplanation = + "The requested PIN lifetime is greater than the lifetime of the SURL." + + " PIN lifetime is now equal to the lifetime of the SURL."; + } else { + PINLifetime = newPINLifetime; + fileStatusExplanation = "Lifetime extended"; + } + ExtendFileLifeTimeCommand.log.debug( + "New PIN lifetime is: " + + PINLifetime.value() + + "(SURL: " + + surl.getSURLString() + + ")"); + // TODO: update the RequestSummaryCatalog with the new + // pinLifetime + // it is better to do it only once after the for loop + requestFailure = false; + } + } + } else { // file does not exist in the file system + fileStatusCode = TStatusCode.SRM_INVALID_PATH; + fileStatusExplanation = "Invalid path"; + PINLifetime = null; + requestSuccess = false; + } + } else { + log.error("Unable to build StoRI by SURL and user"); + fileStatusCode = TStatusCode.SRM_INTERNAL_ERROR; + fileStatusExplanation = "Unable to build StoRI by SURL and user"; + // For lifetimes infinite means also unknown + PINLifetime = null; + requestSuccess = false; + } + } else { // SURL not found in the DB + ExtendFileLifeTimeCommand.log.debug("SURL: " + surl.getSURLString() + " NOT FOUND!"); + fileStatusCode = TStatusCode.SRM_INVALID_PATH; + fileStatusExplanation = "SURL not found in the request"; + PINLifetime = null; + requestSuccess = false; + } + // Set the file level information to be returned. + TReturnStatus fileStatus = new TReturnStatus(fileStatusCode, fileStatusExplanation); + if (fileStatus.getStatusCode().equals(TStatusCode.SRM_SUCCESS)) { + ExtendFileLifeTimeCommand.log.info( + "srmExtendFileLifeTime: <" + + guser + + "> Request for [token:" + + requestToken + + "] for [SURL:" + + surl + + "] with [pinlifetime: " + + newPINLifetime + + "] successfully done with: [status:" + + fileStatus.toString() + + "]"); + } else { + ExtendFileLifeTimeCommand.log.error( + "srmExtendFileLifeTime: <" + + guser + + "> Request for [token:" + + requestToken + + "] for [SURL:" + + surl + + "] with [pinlifetime: " + + newPINLifetime + + "] failed with: [status:" + + fileStatus.toString() + + "]"); + } + + TSURLLifetimeReturnStatus lifetimeReturnStatus = + new TSURLLifetimeReturnStatus(surl, fileStatus, dbLifetime, PINLifetime); + details.addTSurlReturnStatus(lifetimeReturnStatus); + } catch (InvalidTSURLLifetimeReturnStatusAttributeException e3) { + ExtendFileLifeTimeCommand.log.debug( + "Thrown InvalidTSURLLifetimeReturnStatusAttributeException"); + } + } + + // Set global status + if (requestFailure) { + globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, "All file requests are failed"); + } else if (requestSuccess) { + globalStatus = + new TReturnStatus( + TStatusCode.SRM_SUCCESS, "All file requests are successfully completed"); + } else { + globalStatus = + new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, "Details are on the file statuses"); + } + return globalStatus; + } + + /** + * Returns the list of SURLs and statuses (a List of SURLData) belonging to the request identified + * by the requestToken. + * + * @param requestToken TRequestToken + * @return List + * @throws UnknownTokenException + * @throws IllegalArgumentException + * @throws ExpiredTokenException + */ + private List getListOfSURLsInTheRequest( + GridUserInterface user, TRequestToken requestToken) + throws IllegalArgumentException, UnknownTokenException, ExpiredTokenException { + + List listOfSURLsInfo = new LinkedList(); + + SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); + + Map surlStatusMap = checker.getSURLStatuses(user, requestToken); + + if (!(surlStatusMap == null || surlStatusMap.isEmpty())) { + for (Entry surlStatus : surlStatusMap.entrySet()) { + listOfSURLsInfo.add( + new SURLData(surlStatus.getKey(), surlStatus.getValue().getStatusCode())); + } + } + return listOfSURLsInfo; + } + + private void printRequestOutcome(TReturnStatus status, ExtendFileLifeTimeInputData inputData) { + + if (inputData != null) { + if (inputData.getArrayOfSURLs() != null) { + if (inputData.getRequestToken() != null) { + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + status, + inputData, + inputData.getRequestToken(), + inputData.getArrayOfSURLs().asStringList()); + } else { + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, status, inputData, inputData.getArrayOfSURLs().asStringList()); + } + + } else { + if (inputData.getRequestToken() != null) { + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, status, inputData, inputData.getRequestToken()); + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); + } + } + + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); + } + } + + private class SURLData { + + public TSURL surl; + public TStatusCode statusCode; + + public SURLData(TSURL surl, TStatusCode statusCode) { + + this.surl = surl; + this.statusCode = statusCode; + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/FileTransferRequestStatusCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/FileTransferRequestStatusCommand.java index 86ef9e6c..07adef19 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/FileTransferRequestStatusCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/FileTransferRequestStatusCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -9,7 +8,6 @@ import it.grid.storm.catalogs.RequestSummaryData; import it.grid.storm.catalogs.surl.SURLStatusManager; import it.grid.storm.catalogs.surl.SURLStatusManagerFactory; -import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TRequestType; @@ -24,28 +22,23 @@ import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferOutputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestFilesInputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestInputData; - import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public abstract class FileTransferRequestStatusCommand extends - DataTransferCommand implements Command { +public abstract class FileTransferRequestStatusCommand extends DataTransferCommand + implements Command { protected abstract String getSrmCommand(); protected abstract TRequestType getRequestType(); - private static final Logger log = LoggerFactory - .getLogger(FileTransferRequestStatusCommand.class); + private static final Logger log = LoggerFactory.getLogger(FileTransferRequestStatusCommand.class); - public FileTransferRequestStatusCommand() { - - }; + public FileTransferRequestStatusCommand() {}; public TRequestToken getTokenFromInputData(InputData inputData) { @@ -55,21 +48,20 @@ public TRequestToken getTokenFromInputData(InputData inputData) { public boolean inputDataHasSURLArray(InputData inputData) { return (inputData instanceof ManageFileTransferRequestFilesInputData) - || (inputData instanceof ManageFileTransferFilesInputData); + || (inputData instanceof ManageFileTransferFilesInputData); } public boolean validInputData(InputData inputData) { return (inputData instanceof ManageFileTransferRequestFilesInputData) - || (inputData instanceof ManageFileTransferFilesInputData) - || (inputData instanceof ManageFileTransferRequestInputData); + || (inputData instanceof ManageFileTransferFilesInputData) + || (inputData instanceof ManageFileTransferRequestInputData); } public List getSURLListFromInputData(InputData inputData) { if (inputDataHasSURLArray(inputData)) { - return ((ManageFileTransferFilesInputData) inputData).getArrayOfSURLs() - .getArrayList(); + return ((ManageFileTransferFilesInputData) inputData).getArrayOfSURLs().getArrayList(); } return null; } @@ -77,16 +69,15 @@ public List getSURLListFromInputData(InputData inputData) { private List toStringList(List surls) { List ls = new ArrayList(); - for (TSURL s : surls) - ls.add(s.getSURLString()); + for (TSURL s : surls) ls.add(s.getSURLString()); return ls; } protected ManageFileTransferOutputData handleExpiredRequestToken(InputData id) { - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Expired request token")); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, "Expired request token")); printRequestOutcome(outputData.getReturnStatus(), id); @@ -95,9 +86,9 @@ protected ManageFileTransferOutputData handleExpiredRequestToken(InputData id) { protected ManageFileTransferOutputData handleInvalidRequestToken(InputData id) { - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token")); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token")); printRequestOutcome(outputData.getReturnStatus(), id); return outputData; @@ -117,14 +108,14 @@ protected boolean tokenHasExpired(InputData inputData) { return (token.hasExpirationDate() && token.isExpired()); } - protected ManageFileTransferOutputData handleInternalError(InputData id, - Throwable t) { + protected ManageFileTransferOutputData handleInternalError(InputData id, Throwable t) { log.warn(t.getMessage(), t); - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, - "Internal error: " + t.getMessage())); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus( + TStatusCode.SRM_FAILURE, "Internal error: " + t.getMessage())); printRequestOutcome(outputData.getReturnStatus(), id); @@ -133,14 +124,13 @@ protected ManageFileTransferOutputData handleInternalError(InputData id, @Override public ManageFileTransferOutputData execute(InputData inputData) - throws IllegalArgumentException, CommandException { + throws IllegalArgumentException, CommandException { log.debug(getSrmCommand() + "Started."); if (!validInputData(inputData)) { throw new IllegalArgumentException( - "Unable to execute the task. Wrong input argument type: " - + inputData.getClass()); + "Unable to execute the task. Wrong input argument type: " + inputData.getClass()); } Map surlStatuses = null; @@ -156,8 +146,11 @@ public ManageFileTransferOutputData execute(InputData inputData) try { - surlStatuses = checker.getSURLStatuses(getUserFromInputData(inputData), - getTokenFromInputData(inputData), getSURLListFromInputData(inputData)); + surlStatuses = + checker.getSURLStatuses( + getUserFromInputData(inputData), + getTokenFromInputData(inputData), + getSURLListFromInputData(inputData)); } catch (IllegalArgumentException e) { return handleInternalError(inputData, e); @@ -171,21 +164,22 @@ public ManageFileTransferOutputData execute(InputData inputData) log.info("No SURLs found in the DB. Request failed"); TReturnStatus returnStatus; if (inputData instanceof ManageFileTransferRequestFilesInputData) { - returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token, no match with provided surls"); + returnStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, + "Invalid request token, no match with provided surls"); } else { if (inputData instanceof ManageFileTransferRequestInputData) { - returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); + returnStatus = + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); } else { if (inputData instanceof ManageFileTransferFilesInputData) { - returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_REQUEST, - "None of the specified SURLs was found"); + returnStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "None of the specified SURLs was found"); } else { - throw new IllegalStateException("Unexpected InputData received: " - + inputData.getClass()); + throw new IllegalStateException( + "Unexpected InputData received: " + inputData.getClass()); } } } @@ -195,13 +189,14 @@ public ManageFileTransferOutputData execute(InputData inputData) return new ManageFileTransferOutputData(returnStatus); } - ArrayOfTSURLReturnStatus surlReturnStatuses = encodeSURLReturnStatuses( - surlStatuses, getSURLListFromInputData(inputData)); + ArrayOfTSURLReturnStatus surlReturnStatuses = + encodeSURLReturnStatuses(surlStatuses, getSURLListFromInputData(inputData)); TReturnStatus requestStatus; if (inputData instanceof ManageFileTransferRequestInputData) { - RequestSummaryData data = RequestSummaryCatalog.getInstance().find( - ((ManageFileTransferRequestInputData) inputData).getRequestToken()); + RequestSummaryData data = + RequestSummaryCatalog.getInstance() + .find(((ManageFileTransferRequestInputData) inputData).getRequestToken()); if (data != null) { requestStatus = data.getStatus(); } else { @@ -215,44 +210,37 @@ public ManageFileTransferOutputData execute(InputData inputData) return new ManageFileTransferOutputData(requestStatus, surlReturnStatuses); } - private ManageFileTransferOutputData handleAuthzError(InputData inputData, - AuthzException e) { + private ManageFileTransferOutputData handleAuthzError(InputData inputData, AuthzException e) { log.warn(e.getMessage(), e); - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - e.getMessage())); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); printRequestOutcome(outputData.getReturnStatus(), inputData); return outputData; - } protected abstract TReturnStatus computeRequestStatus( - ArrayOfTSURLReturnStatus arrayOfFileStatuses); + ArrayOfTSURLReturnStatus arrayOfFileStatuses); - protected ArrayOfTSURLReturnStatus encodeSURLReturnStatuses( - Map statuses) { + protected ArrayOfTSURLReturnStatus encodeSURLReturnStatuses(Map statuses) { - ArrayOfTSURLReturnStatus retStatuses = new ArrayOfTSURLReturnStatus( - statuses.size()); + ArrayOfTSURLReturnStatus retStatuses = new ArrayOfTSURLReturnStatus(statuses.size()); for (Entry rs : statuses.entrySet()) { - retStatuses.addTSurlReturnStatus(CommandHelper.buildStatus(rs.getKey(), - rs.getValue())); + retStatuses.addTSurlReturnStatus(CommandHelper.buildStatus(rs.getKey(), rs.getValue())); } return retStatuses; - } protected ArrayOfTSURLReturnStatus encodeSURLReturnStatuses( - Map statuses, List surls) { + Map statuses, List surls) { - if (surls == null || surls.isEmpty()) - return encodeSURLReturnStatuses(statuses); + if (surls == null || surls.isEmpty()) return encodeSURLReturnStatuses(statuses); ArrayOfTSURLReturnStatus retStatuses = new ArrayOfTSURLReturnStatus(); @@ -260,8 +248,7 @@ protected ArrayOfTSURLReturnStatus encodeSURLReturnStatuses( TReturnStatus status = statuses.get(s); if (status == null) { - status = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, - "Invalid SURL path."); + status = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, "Invalid SURL path."); } retStatuses.addTSurlReturnStatus(CommandHelper.buildStatus(s, status)); @@ -276,12 +263,10 @@ protected void printRequestOutcome(TReturnStatus status, InputData id) { List surls = getSURLListFromInputData(id); if (surls == null) { - CommandHelper - .printRequestOutcome(getSrmCommand(), log, status, id, token); + CommandHelper.printRequestOutcome(getSrmCommand(), log, status, id, token); } else { - CommandHelper.printRequestOutcome(getSrmCommand(), log, status, id, - token, toStringList(surls)); + CommandHelper.printRequestOutcome( + getSrmCommand(), log, status, id, token, toStringList(surls)); } } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestCommand.java index 38f8fbd8..371493e0 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestCommand.java @@ -1,11 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.asynch.BuilderException; import it.grid.storm.asynch.PtG; import it.grid.storm.asynch.PtGBuilder; @@ -15,49 +12,49 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.FileTransferInputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToGetOutputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PrepareToGetRequestCommand implements Command { - private static final Logger log = LoggerFactory - .getLogger(PrepareToGetRequestCommand.class); + private static final Logger log = LoggerFactory.getLogger(PrepareToGetRequestCommand.class); - @Override - public OutputData execute(InputData inputData) throws CommandException { + @Override + public OutputData execute(InputData inputData) throws CommandException { - if (!(inputData instanceof FileTransferInputData)) { - log.error("Unable to convert from InputData. Wrong InputData type: \'" - + inputData.getClass().getName() + "\'"); - throw new IllegalArgumentException( - "Unable to convert from InputData. Wrong InputData type: \'" - + inputData.getClass().getName() + "\'"); - } - PtG request; - try { - request = PtGBuilder.build((FileTransferInputData) inputData); - } catch (BuilderException e) { - log - .error("Unable to build PtG request from the InputData. BuilderException: " - + e.getMessage()); - throw new CommandException( - "Unable to build PtG request from the InputData"); - } - ChunkTask ptgTask = new ChunkTask(request); - ptgTask.run(); - try { - return new PrepareToGetOutputData(request.getRequestData().getSURL(), - request.getRequestData().getTransferURL(), request.getRequestData() - .getStatus(), request.getRequestData().getGeneratedRequestToken(), - request.getRequestData().getFileSize(), request.getRequestData() - .getPinLifeTime()); - } catch (IllegalArgumentException e) { - log - .error("Unable to create PrepareToPutOutputData. IllegalArgumentException: " - + e.getMessage()); - throw new CommandException("Unable to create PrepareToPutOutputData"); - } - } + if (!(inputData instanceof FileTransferInputData)) { + log.error( + "Unable to convert from InputData. Wrong InputData type: \'" + + inputData.getClass().getName() + + "\'"); + throw new IllegalArgumentException( + "Unable to convert from InputData. Wrong InputData type: \'" + + inputData.getClass().getName() + + "\'"); + } + PtG request; + try { + request = PtGBuilder.build((FileTransferInputData) inputData); + } catch (BuilderException e) { + log.error( + "Unable to build PtG request from the InputData. BuilderException: " + e.getMessage()); + throw new CommandException("Unable to build PtG request from the InputData"); + } + ChunkTask ptgTask = new ChunkTask(request); + ptgTask.run(); + try { + return new PrepareToGetOutputData( + request.getRequestData().getSURL(), + request.getRequestData().getTransferURL(), + request.getRequestData().getStatus(), + request.getRequestData().getGeneratedRequestToken(), + request.getRequestData().getFileSize(), + request.getRequestData().getPinLifeTime()); + } catch (IllegalArgumentException e) { + log.error( + "Unable to create PrepareToPutOutputData. IllegalArgumentException: " + e.getMessage()); + throw new CommandException("Unable to create PrepareToPutOutputData"); + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestStatusCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestStatusCommand.java index 22818db3..3d3dc8e1 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestStatusCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToGetRequestStatusCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -12,81 +11,77 @@ import it.grid.storm.synchcall.command.Command; import it.grid.storm.synchcall.command.CommandHelper; -public class PrepareToGetRequestStatusCommand extends - FileTransferRequestStatusCommand implements Command { +public class PrepareToGetRequestStatusCommand extends FileTransferRequestStatusCommand + implements Command { - private static final String SRM_COMMAND = "srmStatusOfGetRequest"; + private static final String SRM_COMMAND = "srmStatusOfGetRequest"; - public PrepareToGetRequestStatusCommand() { + public PrepareToGetRequestStatusCommand() {} - } + @Override + protected TReturnStatus computeRequestStatus(ArrayOfTSURLReturnStatus arrayOfFileStatuses) { - @Override - protected TReturnStatus computeRequestStatus( - ArrayOfTSURLReturnStatus arrayOfFileStatuses) { + boolean atLeastOneSuccessOrPinned = false; + boolean atLeastOneQueued = false; + boolean atLeastOneInProgress = false; + boolean atLeastOneAborted = false; + boolean atLeastOneFailed = false; + for (TSURLReturnStatus surlStatus : arrayOfFileStatuses.getArray()) { + switch (surlStatus.getStatus().getStatusCode()) { + case SRM_SUCCESS: + atLeastOneSuccessOrPinned = true; + break; + case SRM_FILE_PINNED: + atLeastOneSuccessOrPinned = true; + break; + case SRM_REQUEST_QUEUED: + atLeastOneQueued = true; + break; + case SRM_REQUEST_INPROGRESS: + atLeastOneInProgress = true; + case SRM_RELEASED: + break; + case SRM_ABORTED: + atLeastOneAborted = true; + break; + default: + atLeastOneFailed = true; + break; + } + } + if (atLeastOneSuccessOrPinned + && !(atLeastOneQueued || atLeastOneInProgress || atLeastOneAborted || atLeastOneFailed)) { + return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, "Request executed successfully"); + } + if (((atLeastOneSuccessOrPinned || atLeastOneAborted || atLeastOneFailed) && atLeastOneQueued) + || atLeastOneInProgress) { + return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_INPROGRESS, "Request in progress"); + } + if (atLeastOneSuccessOrPinned + && atLeastOneFailed + && !(atLeastOneQueued || atLeastOneInProgress)) { + return CommandHelper.buildStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, "Partial success, some surls are failed"); + } + if (atLeastOneAborted + && !(atLeastOneQueued + || atLeastOneInProgress + || atLeastOneSuccessOrPinned + || atLeastOneFailed)) { + return CommandHelper.buildStatus(TStatusCode.SRM_ABORTED, "Request aborted"); + } + return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "Request failed"); + } - boolean atLeastOneSuccessOrPinned = false; - boolean atLeastOneQueued = false; - boolean atLeastOneInProgress = false; - boolean atLeastOneAborted = false; - boolean atLeastOneFailed = false; - for (TSURLReturnStatus surlStatus : arrayOfFileStatuses.getArray()) { - switch (surlStatus.getStatus().getStatusCode()) { - case SRM_SUCCESS: - atLeastOneSuccessOrPinned = true; - break; - case SRM_FILE_PINNED: - atLeastOneSuccessOrPinned = true; - break; - case SRM_REQUEST_QUEUED: - atLeastOneQueued = true; - break; - case SRM_REQUEST_INPROGRESS: - atLeastOneInProgress = true; - case SRM_RELEASED: - break; - case SRM_ABORTED: - atLeastOneAborted = true; - break; - default: - atLeastOneFailed = true; - break; - } - } - if (atLeastOneSuccessOrPinned - && !(atLeastOneQueued || atLeastOneInProgress || atLeastOneAborted || atLeastOneFailed)) { - return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, - "Request executed successfully"); - } - if (((atLeastOneSuccessOrPinned || atLeastOneAborted || atLeastOneFailed) && atLeastOneQueued) - || atLeastOneInProgress) { - return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_INPROGRESS, - "Request in progress"); - } - if (atLeastOneSuccessOrPinned && atLeastOneFailed - && !(atLeastOneQueued || atLeastOneInProgress)) { - return CommandHelper.buildStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Partial success, some surls are failed"); - } - if (atLeastOneAborted - && !(atLeastOneQueued || atLeastOneInProgress - || atLeastOneSuccessOrPinned || atLeastOneFailed)) { - return CommandHelper.buildStatus(TStatusCode.SRM_ABORTED, - "Request aborted"); - } - return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "Request failed"); - } + @Override + protected String getSrmCommand() { - @Override - protected String getSrmCommand() { + return SRM_COMMAND; + } - return SRM_COMMAND; - } - - @Override - protected TRequestType getRequestType() { - - return TRequestType.PREPARE_TO_GET; - }; + @Override + protected TRequestType getRequestType() { + return TRequestType.PREPARE_TO_GET; + }; } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestCommand.java index 6af2ec55..49cbf021 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestCommand.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.asynch.BuilderException; import it.grid.storm.asynch.PtP; import it.grid.storm.asynch.PtPBuilder; @@ -16,49 +12,47 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToPutOutputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PrepareToPutRequestCommand implements Command { - private static final Logger log = LoggerFactory - .getLogger(PrepareToPutRequestCommand.class); - - @Override - public OutputData execute(InputData inputData) - throws IllegalArgumentException, CommandException { + private static final Logger log = LoggerFactory.getLogger(PrepareToPutRequestCommand.class); - if (!(inputData instanceof PrepareToPutInputData)) { - log.error("Unable to convert from InputData. Wrong InputData type: \'" - + inputData.getClass().getName() + "\'"); - throw new IllegalArgumentException( - "Unable to convert from InputData. Wrong InputData type: \'" - + inputData.getClass().getName() + "\'"); - } - PtP request; - try { - request = PtPBuilder.build((PrepareToPutInputData) inputData); - } catch (BuilderException e) { - log - .error("Unable to build PtP request from the InputData. BuilderException: " - + e.getMessage()); - throw new CommandException( - "Unable to build PtP request from the InputData"); - } - ChunkTask ptpTask = new ChunkTask(request); - ptpTask.run(); - try { - return new PrepareToPutOutputData(request.getRequestData().getSURL(), - request.getRequestData().getTransferURL(), request.getRequestData() - .getStatus(), request.getRequestData().getGeneratedRequestToken()); - } catch (IllegalArgumentException e) { - log - .error("Unable to create PrepareToPutOutputData. IllegalArgumentException: " - + e.getMessage()); - throw new CommandException("Unable to create PrepareToPutOutputData"); - } - } + @Override + public OutputData execute(InputData inputData) throws IllegalArgumentException, CommandException { + if (!(inputData instanceof PrepareToPutInputData)) { + log.error( + "Unable to convert from InputData. Wrong InputData type: \'" + + inputData.getClass().getName() + + "\'"); + throw new IllegalArgumentException( + "Unable to convert from InputData. Wrong InputData type: \'" + + inputData.getClass().getName() + + "\'"); + } + PtP request; + try { + request = PtPBuilder.build((PrepareToPutInputData) inputData); + } catch (BuilderException e) { + log.error( + "Unable to build PtP request from the InputData. BuilderException: " + e.getMessage()); + throw new CommandException("Unable to build PtP request from the InputData"); + } + ChunkTask ptpTask = new ChunkTask(request); + ptpTask.run(); + try { + return new PrepareToPutOutputData( + request.getRequestData().getSURL(), + request.getRequestData().getTransferURL(), + request.getRequestData().getStatus(), + request.getRequestData().getGeneratedRequestToken()); + } catch (IllegalArgumentException e) { + log.error( + "Unable to create PrepareToPutOutputData. IllegalArgumentException: " + e.getMessage()); + throw new CommandException("Unable to create PrepareToPutOutputData"); + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestStatusCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestStatusCommand.java index 939ff63a..65a18a25 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestStatusCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PrepareToPutRequestStatusCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -12,81 +11,78 @@ import it.grid.storm.synchcall.command.Command; import it.grid.storm.synchcall.command.CommandHelper; -public class PrepareToPutRequestStatusCommand extends - FileTransferRequestStatusCommand implements Command { +public class PrepareToPutRequestStatusCommand extends FileTransferRequestStatusCommand + implements Command { - private static final String SRM_COMMAND = "srmStatusOfPutRequest"; + private static final String SRM_COMMAND = "srmStatusOfPutRequest"; - public PrepareToPutRequestStatusCommand() { + public PrepareToPutRequestStatusCommand() {} - } + @Override + protected String getSrmCommand() { - @Override - protected String getSrmCommand() { + return SRM_COMMAND; + } - return SRM_COMMAND; - } + @Override + protected TRequestType getRequestType() { - @Override - protected TRequestType getRequestType() { + return TRequestType.PREPARE_TO_PUT; + }; - return TRequestType.PREPARE_TO_PUT; - }; - - @Override - protected TReturnStatus computeRequestStatus( - ArrayOfTSURLReturnStatus arrayOfFileStatuses) { - - boolean atLeastOneSuccessOrSpaceAvailable = false; - boolean atLeastOneQueued = false; - boolean atLeastOneInProgress = false; - boolean atLeastOneAborted = false; - boolean atLeastOneFailed = false; - for (TSURLReturnStatus surlStatus : arrayOfFileStatuses.getArray()) { - switch (surlStatus.getStatus().getStatusCode()) { - case SRM_SUCCESS: - atLeastOneSuccessOrSpaceAvailable = true; - break; - case SRM_SPACE_AVAILABLE: - atLeastOneSuccessOrSpaceAvailable = true; - break; - case SRM_REQUEST_QUEUED: - atLeastOneQueued = true; - break; - case SRM_REQUEST_INPROGRESS: - atLeastOneInProgress = true; - case SRM_RELEASED: - break; - case SRM_ABORTED: - atLeastOneAborted = true; - break; - default: - atLeastOneFailed = true; - break; - } - } - if (atLeastOneSuccessOrSpaceAvailable - && !(atLeastOneQueued || atLeastOneInProgress || atLeastOneAborted || atLeastOneFailed)) { - return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, - "Request executed successfully"); - } - if (((atLeastOneSuccessOrSpaceAvailable || atLeastOneAborted || atLeastOneFailed) && atLeastOneQueued) - || atLeastOneInProgress) { - return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_INPROGRESS, - "Request in progress"); - } - if (atLeastOneSuccessOrSpaceAvailable && atLeastOneFailed - && !(atLeastOneQueued || atLeastOneInProgress)) { - return CommandHelper.buildStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Partial success, some surls are failed"); - } - if (atLeastOneAborted - && !(atLeastOneQueued || atLeastOneInProgress - || atLeastOneSuccessOrSpaceAvailable || atLeastOneFailed)) { - return CommandHelper.buildStatus(TStatusCode.SRM_ABORTED, - "Request aborted"); - } - return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "Request failed"); - } + @Override + protected TReturnStatus computeRequestStatus(ArrayOfTSURLReturnStatus arrayOfFileStatuses) { + boolean atLeastOneSuccessOrSpaceAvailable = false; + boolean atLeastOneQueued = false; + boolean atLeastOneInProgress = false; + boolean atLeastOneAborted = false; + boolean atLeastOneFailed = false; + for (TSURLReturnStatus surlStatus : arrayOfFileStatuses.getArray()) { + switch (surlStatus.getStatus().getStatusCode()) { + case SRM_SUCCESS: + atLeastOneSuccessOrSpaceAvailable = true; + break; + case SRM_SPACE_AVAILABLE: + atLeastOneSuccessOrSpaceAvailable = true; + break; + case SRM_REQUEST_QUEUED: + atLeastOneQueued = true; + break; + case SRM_REQUEST_INPROGRESS: + atLeastOneInProgress = true; + case SRM_RELEASED: + break; + case SRM_ABORTED: + atLeastOneAborted = true; + break; + default: + atLeastOneFailed = true; + break; + } + } + if (atLeastOneSuccessOrSpaceAvailable + && !(atLeastOneQueued || atLeastOneInProgress || atLeastOneAborted || atLeastOneFailed)) { + return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, "Request executed successfully"); + } + if (((atLeastOneSuccessOrSpaceAvailable || atLeastOneAborted || atLeastOneFailed) + && atLeastOneQueued) + || atLeastOneInProgress) { + return CommandHelper.buildStatus(TStatusCode.SRM_REQUEST_INPROGRESS, "Request in progress"); + } + if (atLeastOneSuccessOrSpaceAvailable + && atLeastOneFailed + && !(atLeastOneQueued || atLeastOneInProgress)) { + return CommandHelper.buildStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, "Partial success, some surls are failed"); + } + if (atLeastOneAborted + && !(atLeastOneQueued + || atLeastOneInProgress + || atLeastOneSuccessOrSpaceAvailable + || atLeastOneFailed)) { + return CommandHelper.buildStatus(TStatusCode.SRM_ABORTED, "Request aborted"); + } + return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "Request failed"); + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtGAbortExecutor.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtGAbortExecutor.java index a5d57b41..5a400327 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtGAbortExecutor.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtGAbortExecutor.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This is the Abort executor for a PtG request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Aug 2006 @@ -31,12 +30,10 @@ import it.grid.storm.synchcall.data.datatransfer.AbortInputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,12 +42,9 @@ public class PtGAbortExecutor implements AbortExecutorInterface { static Configuration config = Configuration.getInstance(); private static int maxLoopTimes = PtGAbortExecutor.config.getMaxLoop(); - private static final Logger log = LoggerFactory - .getLogger(PtGAbortExecutor.class); + private static final Logger log = LoggerFactory.getLogger(PtGAbortExecutor.class); - public PtGAbortExecutor() { - - }; + public PtGAbortExecutor() {}; public AbortGeneralOutputData doIt(AbortInputData inputData) { @@ -62,13 +56,11 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { TReturnStatus globalStatus = null; /** - * 0) Get all Chunk related to the specified request according with user - * specification (in case of AbortFiles). 1) Wait until a chunk goes in - * SRM_FILE_PINNED status.(or any other status different from SRM_QUEUED). - * 2) Rollback. For a PtG request the rollback only means to remove the acl - * eventually inserted into the Volatile-JiT catalog. + * 0) Get all Chunk related to the specified request according with user specification (in case + * of AbortFiles). 1) Wait until a chunk goes in SRM_FILE_PINNED status.(or any other status + * different from SRM_QUEUED). 2) Rollback. For a PtG request the rollback only means to remove + * the acl eventually inserted into the Volatile-JiT catalog. */ - Map surlStatusMap; SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); @@ -76,114 +68,123 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { GridUserInterface user = AbstractCommand.getUserFromInputData(inputData); try { - surlStatusMap = checker - .getSURLStatuses(user, inputData.getRequestToken()); + surlStatusMap = checker.getSURLStatuses(user, inputData.getRequestToken()); } catch (IllegalArgumentException e) { - log - .error("Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " - + e); - throw new IllegalStateException("Unexpected IllegalArgumentException: " - + e.getMessage()); + log.error( + "Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " + e); + throw new IllegalStateException("Unexpected IllegalArgumentException: " + e.getMessage()); } catch (UnknownTokenException e) { - PtGAbortExecutor.log - .debug("PtGAbortExecutor: Request - Invalid request token"); + PtGAbortExecutor.log.debug("PtGAbortExecutor: Request - Invalid request token"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token"); + globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - PtGAbortExecutor.log.info("srmAbortRequest: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] successfully done with [status: " - + globalStatus + "]"); + PtGAbortExecutor.log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] successfully done with [status: " + + globalStatus + + "]"); return outputData; } catch (ExpiredTokenException e) { - log.info("The request is expired: ExpiredTokenException: " - + e.getMessage()); + log.info("The request is expired: ExpiredTokenException: " + e.getMessage()); - globalStatus = new TReturnStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, - "Request expired"); + globalStatus = new TReturnStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, "Request expired"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() - + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } if (surlStatusMap.isEmpty()) { - PtGAbortExecutor.log - .debug("PtGAbortExecutor: Request - Invalid request token"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid request token"); + PtGAbortExecutor.log.debug("PtGAbortExecutor: Request - Invalid request token"); + globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Invalid request token"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - PtGAbortExecutor.log.info("srmAbortRequest: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] successfully done with [status: " - + globalStatus + "]"); + PtGAbortExecutor.log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] successfully done with [status: " + + globalStatus + + "]"); return outputData; } /** - * Get only the SURL requested in a AbortFile Request Define a new - * Collection to contains the "epurated" chunk, removing the ones not - * specified in input request + * Get only the SURL requested in a AbortFile Request Define a new Collection to contains the + * "epurated" chunk, removing the ones not specified in input request */ - if (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES)) { - PtGAbortExecutor.log - .debug("PtGAbortExecutor: Case of AbortFile request. Purge Chunks with SurlArray."); + PtGAbortExecutor.log.debug( + "PtGAbortExecutor: Case of AbortFile request. Purge Chunks with SurlArray."); /** - * Get the related Chunk for each SURL in the input SurlArray. If a Surl - * requested is not founf, the TSURLReturnStatus related is setted to - * SRM_INVALID_PATH + * Get the related Chunk for each SURL in the input SurlArray. If a Surl requested is not + * founf, the TSURLReturnStatus related is setted to SRM_INVALID_PATH */ - List surlList = extractSurlArray(inputData).getArrayList(); surlStatusMap.keySet().retainAll(surlList); if (!surlStatusMap.keySet().containsAll(surlList)) { for (TSURL surl : surlList) { if (!surlStatusMap.containsKey(surl)) { - log - .debug("PtGAbortExecutor: requested SURL NOT found, invalid file request"); + log.debug("PtGAbortExecutor: requested SURL NOT found, invalid file request"); TSURLReturnStatus surlReturnStatus = new TSURLReturnStatus(); surlReturnStatus.setSurl(surl); - surlReturnStatus.setStatus(new TReturnStatus( - TStatusCode.SRM_INVALID_PATH, - "SURL specified does not referes to this request token.")); - log.info("srmAbortFiles: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() - + "] for [SURL:" + surl + "] failed with [status: " - + surlReturnStatus.getStatus() + "]"); + surlReturnStatus.setStatus( + new TReturnStatus( + TStatusCode.SRM_INVALID_PATH, + "SURL specified does not referes to this request token.")); + log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for [SURL:" + + surl + + "] failed with [status: " + + surlReturnStatus.getStatus() + + "]"); arrayOfTSurlRetStatus.addTSurlReturnStatus(surlReturnStatus); } } } } - /********* Check here the new chunks container is not empty! ******/ + /** ******* Check here the new chunks container is not empty! ***** */ if (surlStatusMap.isEmpty()) { - log - .debug("Abort Request - No surl specified associated to request token"); + log.debug("Abort Request - No surl specified associated to request token"); - globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, - "All surl specified does not referes to the request token."); + globalStatus = + new TReturnStatus( + TStatusCode.SRM_FAILURE, "All surl specified does not referes to the request token."); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() - + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } - /********* - * Phase 1 Wait until the request goes in a status different from - * REQUEST_QUEUED - ********/ - + /** + * ******* Phase 1 Wait until the request goes in a status different from REQUEST_QUEUED ****** + */ int chunkAborted = 0; // To avoid deadlock @@ -193,14 +194,12 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { int totalSize = surlStatusMap.size(); - while ((chunkAborted < totalSize) - && (counter < PtGAbortExecutor.maxLoopTimes)) { + while ((chunkAborted < totalSize) && (counter < PtGAbortExecutor.maxLoopTimes)) { // Increment loop times counter++; int numOfSurl = 0; - Iterator> iterator = surlStatusMap - .entrySet().iterator(); + Iterator> iterator = surlStatusMap.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry surlStatus = iterator.next(); numOfSurl++; @@ -210,18 +209,16 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { * and the rollback start. */ - if (!(surlStatus.getValue().getStatusCode() - .equals(TStatusCode.SRM_REQUEST_INPROGRESS))) { + if (!(surlStatus.getValue().getStatusCode().equals(TStatusCode.SRM_REQUEST_INPROGRESS))) { /* * If an EXECUTED CHUNK is found, then it is ABORTED. */ - PtGAbortExecutor.log - .debug("srmAbortRequest: PtGAbortExecutor: PtGChunk not in IN_PROGRESS state. Ready for ABORT."); + PtGAbortExecutor.log.debug( + "srmAbortRequest: PtGAbortExecutor: PtGChunk not in IN_PROGRESS state. Ready for ABORT."); TSURLReturnStatus surlReturnStatus = new TSURLReturnStatus(); - /******** Phase (3) of Abort *************/ - PtGAbortExecutor.log - .debug("srmAbortRequest: PtGAbortExecutor: start Phase(3)"); + /** ****** Phase (3) of Abort ************ */ + PtGAbortExecutor.log.debug("srmAbortRequest: PtGAbortExecutor: start Phase(3)"); /* * AvancePicker HACK! Due to a thread issue in the @@ -229,33 +226,37 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { * has already been aborted frome the picker. In such case, the * manageAuthorizedAbort is not needed */ - if ((surlStatus.getValue().getStatusCode() - .equals(TStatusCode.SRM_ABORTED))) { + if ((surlStatus.getValue().getStatusCode().equals(TStatusCode.SRM_ABORTED))) { // The AdvancedPicker have already aborted the chunk. - PtGAbortExecutor.log - .debug("PtGAbortExecutor: CHUNK already aborted!"); + PtGAbortExecutor.log.debug("PtGAbortExecutor: CHUNK already aborted!"); surlReturnStatus.setSurl(surlStatus.getKey()); - surlReturnStatus.setStatus(new TReturnStatus( - TStatusCode.SRM_SUCCESS, "File request successfully aborted.")); + surlReturnStatus.setStatus( + new TReturnStatus(TStatusCode.SRM_SUCCESS, "File request successfully aborted.")); } else { // Chunk not ABORTED. We have to work... PtGAbortExecutor.log.debug("PtPAbortExecutor: CHUNK to abort!"); try { - surlReturnStatus = manageAuthorizedAbort(user, - inputData.getRequestToken(), surlStatus.getKey(), - surlStatus.getValue()); + surlReturnStatus = + manageAuthorizedAbort( + user, + inputData.getRequestToken(), + surlStatus.getKey(), + surlStatus.getValue()); } catch (ExpiredTokenException e) { - log.info("The request is expired: ExpiredTokenException: " - + e.getMessage()); + log.info("The request is expired: ExpiredTokenException: " + e.getMessage()); - globalStatus = new TReturnStatus( - TStatusCode.SRM_REQUEST_TIMED_OUT, "Request expired"); + globalStatus = + new TReturnStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, "Request expired"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " - + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } } @@ -263,20 +264,36 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { // Remove this chunks from the other to abort. iterator.remove(); - if ((surlReturnStatus.getStatus().getStatusCode() - .equals(TStatusCode.SRM_SUCCESS))) { - PtGAbortExecutor.log.info("srmAbortFiles: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] for SURL " + numOfSurl - + " of " + totalSize + " [SURL:" + surlStatus.getKey() - + "] successfully done with [status: " - + surlReturnStatus.getStatus() + "]"); + if ((surlReturnStatus.getStatus().getStatusCode().equals(TStatusCode.SRM_SUCCESS))) { + PtGAbortExecutor.log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for SURL " + + numOfSurl + + " of " + + totalSize + + " [SURL:" + + surlStatus.getKey() + + "] successfully done with [status: " + + surlReturnStatus.getStatus() + + "]"); } else { - PtGAbortExecutor.log.info("srmAbortFiles: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] for SURL " + numOfSurl - + " of " + totalSize + " [SURL:" + surlStatus.getKey() - + "] failed with [status: " + surlReturnStatus.getStatus() + "]"); + PtGAbortExecutor.log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for SURL " + + numOfSurl + + " of " + + totalSize + + " [SURL:" + + surlStatus.getKey() + + "] failed with [status: " + + surlReturnStatus.getStatus() + + "]"); errorCount++; } // Add returnStatus @@ -301,44 +318,46 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { try { List surls = new ArrayList(surlStatusMap.keySet()); - surlStatusMap = checker.getSURLStatuses(user, - inputData.getRequestToken(), surls); + surlStatusMap = checker.getSURLStatuses(user, inputData.getRequestToken(), surls); } catch (IllegalArgumentException e) { - log - .error("Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " - + e); - throw new IllegalStateException( - "Unexpected IllegalArgumentException: " + e.getMessage()); + log.error( + "Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " + e); + throw new IllegalStateException("Unexpected IllegalArgumentException: " + e.getMessage()); } catch (UnknownTokenException e) { - log - .warn("PtGAbortExecutor: Request - Invalid request token, probably it is expired"); + log.warn("PtGAbortExecutor: Request - Invalid request token, probably it is expired"); - globalStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "Expired request token"); + globalStatus = + new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "Expired request token"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - PtGAbortExecutor.log.info("srmAbortRequest: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() - + "] successfully done with [status: " + globalStatus + "]"); + PtGAbortExecutor.log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] successfully done with [status: " + + globalStatus + + "]"); return outputData; } catch (ExpiredTokenException e) { - log.info("The request is expired: ExpiredTokenException: " - + e.getMessage()); + log.info("The request is expired: ExpiredTokenException: " + e.getMessage()); - globalStatus = new TReturnStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, - "Request expired"); + globalStatus = new TReturnStatus(TStatusCode.SRM_REQUEST_TIMED_OUT, "Request expired"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() - + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } log.debug("srmAbortRequest: PtGAbortExecutor: refresh done."); } - } PtGAbortExecutor.log.debug("PtGAbortExecutor: Cycles done."); @@ -347,13 +366,19 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { // The ABORT execution is interrupted to prevent a deadlock situation log.warn("Abort: Timeout exceeded."); - globalStatus = new TReturnStatus(TStatusCode.SRM_INTERNAL_ERROR, - "TimeOut for abort execution exceeded."); + globalStatus = + new TReturnStatus( + TStatusCode.SRM_INTERNAL_ERROR, "TimeOut for abort execution exceeded."); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); - log.warn("srmAbortRequest: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() - + "] failed with [status: " + globalStatus + "]"); + log.warn( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } @@ -362,61 +387,56 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { } else { if (errorCount > 0) { if (inputData.getType().equals(AbortInputData.AbortType.ABORT_REQUEST)) { - globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, - "Some chunks failed."); + globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, "Some chunks failed."); } else { - globalStatus = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Some chunks failed."); + globalStatus = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, "Some chunks failed."); } } else { - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "Abort request completed."); + globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, "Abort request completed."); if ((inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES))) { - TReturnStatus requestStatus = new TReturnStatus( - TStatusCode.SRM_ABORTED, "Request Aborted."); - RequestSummaryCatalog.getInstance().updateGlobalStatus( - inputData.getRequestToken(), requestStatus); + TReturnStatus requestStatus = + new TReturnStatus(TStatusCode.SRM_ABORTED, "Request Aborted."); + RequestSummaryCatalog.getInstance() + .updateGlobalStatus(inputData.getRequestToken(), requestStatus); } } } // Set output data outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); outputData.setReturnStatus(globalStatus); - PtGAbortExecutor.log.info("srmAbortRequest: <" - + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus - + "]"); + PtGAbortExecutor.log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; - } private ArrayOfSURLs extractSurlArray(AbortInputData inputData) { switch (inputData.getType()) { - case ABORT_REQUEST: - throw new IllegalStateException( - "Unable to get SurlArray from an ABORT_REQUEST input data"); - case ABORT_FILES: - return ((AbortFilesInputData) inputData).getArrayOfSURLs(); - default: - throw new IllegalStateException("Received an unknown AbortType: " - + inputData.getType()); + case ABORT_REQUEST: + throw new IllegalStateException("Unable to get SurlArray from an ABORT_REQUEST input data"); + case ABORT_FILES: + return ((AbortFilesInputData) inputData).getArrayOfSURLs(); + default: + throw new IllegalStateException("Received an unknown AbortType: " + inputData.getType()); } } /** - * * Manage the roll back needed to execute an abort request. - * - * @param chunkData - * PtGChunkData + * + * @param chunkData PtGChunkData * @return returnStatus TSURLReturnStatus * @throws ExpiredTokenException */ - - private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, - TRequestToken token, TSURL surl, TReturnStatus status) - throws ExpiredTokenException { + private TSURLReturnStatus manageAuthorizedAbort( + GridUserInterface user, TRequestToken token, TSURL surl, TReturnStatus status) + throws ExpiredTokenException { TSURLReturnStatus surlReturnStatus = new TSURLReturnStatus(); surlReturnStatus.setSurl(surl); @@ -424,40 +444,36 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, SURLStatusManager manager = SURLStatusManagerFactory.newSURLStatusManager(); if (TStatusCode.SRM_FILE_PINNED.equals(status.getStatusCode()) - || TStatusCode.SRM_REQUEST_QUEUED.equals(status.getStatusCode())) { + || TStatusCode.SRM_REQUEST_QUEUED.equals(status.getStatusCode())) { try { manager.abortRequestForSURL(user, token, surl, "Request aborted."); } catch (IllegalArgumentException e) { - log - .error("Unexpected IllegalArgumentException during surl statuses update: " - + e); - throw new IllegalStateException("Unexpected IllegalArgumentException: " - + e.getMessage()); + log.error("Unexpected IllegalArgumentException during surl statuses update: " + e); + throw new IllegalStateException("Unexpected IllegalArgumentException: " + e.getMessage()); } catch (UnknownTokenException e) { - log - .error("Unexpected UnknownTokenException during surl statuses update: " - + e); - throw new IllegalStateException("Unexpected UnknownTokenException: " - + e.getMessage()); + log.error("Unexpected UnknownTokenException during surl statuses update: " + e); + throw new IllegalStateException("Unexpected UnknownTokenException: " + e.getMessage()); } - surlReturnStatus.setStatus(new TReturnStatus(TStatusCode.SRM_SUCCESS, - "File request successfully aborted.")); + surlReturnStatus.setStatus( + new TReturnStatus(TStatusCode.SRM_SUCCESS, "File request successfully aborted.")); } else { if (TStatusCode.SRM_FILE_LIFETIME_EXPIRED.equals(status.getStatusCode())) { - surlReturnStatus.setStatus(new TReturnStatus(TStatusCode.SRM_FAILURE, - "Request is in a final status. Abort not allowed.")); + surlReturnStatus.setStatus( + new TReturnStatus( + TStatusCode.SRM_FAILURE, "Request is in a final status. Abort not allowed.")); } else { if (TStatusCode.SRM_RELEASED.equals(status.getStatusCode())) { - surlReturnStatus.setStatus(new TReturnStatus(TStatusCode.SRM_FAILURE, - "Request is in a final status. Abort not allowed.")); + surlReturnStatus.setStatus( + new TReturnStatus( + TStatusCode.SRM_FAILURE, "Request is in a final status. Abort not allowed.")); } else { - surlReturnStatus.setStatus(new TReturnStatus(TStatusCode.SRM_FAILURE, - "Abort request not executed.")); + surlReturnStatus.setStatus( + new TReturnStatus(TStatusCode.SRM_FAILURE, "Abort request not executed.")); } } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtPAbortExecutor.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtPAbortExecutor.java index 7769663e..ba21a1be 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtPAbortExecutor.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PtPAbortExecutor.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This is the Abort executor for a PtP request. - * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Aug 2006 @@ -12,6 +11,20 @@ */ package it.grid.storm.synchcall.command.datatransfer; +import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; +import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; +import static it.grid.storm.srm.types.TStatusCode.SRM_FAILURE; +import static it.grid.storm.srm.types.TStatusCode.SRM_INTERNAL_ERROR; +import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_PATH; +import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; +import static it.grid.storm.srm.types.TStatusCode.SRM_PARTIAL_SUCCESS; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_INPROGRESS; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_QUEUED; +import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; +import static it.grid.storm.srm.types.TStatusCode.SRM_SPACE_AVAILABLE; +import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; + +import com.google.common.collect.Lists; import it.grid.storm.authz.AuthzException; import it.grid.storm.catalogs.PtPChunkCatalog; import it.grid.storm.catalogs.PtPPersistentChunkData; @@ -45,30 +58,13 @@ import it.grid.storm.synchcall.data.datatransfer.AbortInputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; - -import static it.grid.storm.srm.types.TStatusCode.SRM_ABORTED; -import static it.grid.storm.srm.types.TStatusCode.SRM_AUTHORIZATION_FAILURE; -import static it.grid.storm.srm.types.TStatusCode.SRM_FAILURE; -import static it.grid.storm.srm.types.TStatusCode.SRM_INTERNAL_ERROR; -import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_PATH; -import static it.grid.storm.srm.types.TStatusCode.SRM_INVALID_REQUEST; -import static it.grid.storm.srm.types.TStatusCode.SRM_PARTIAL_SUCCESS; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_INPROGRESS; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_QUEUED; -import static it.grid.storm.srm.types.TStatusCode.SRM_REQUEST_TIMED_OUT; -import static it.grid.storm.srm.types.TStatusCode.SRM_SPACE_AVAILABLE; -import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; - import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; - public class PtPAbortExecutor implements AbortExecutorInterface { private static final Logger log = LoggerFactory.getLogger(PtPAbortExecutor.class); @@ -113,8 +109,8 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { } catch (IllegalArgumentException e) { - log - .error("Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " + e); + log.error( + "Unexpected IllegalArgumentException during SurlStatusManager.getSurlsStatus: " + e); throw new IllegalStateException("Unexpected IllegalArgumentException: " + e.getMessage()); } catch (UnknownTokenException e) { @@ -122,8 +118,14 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, "Invalid request token"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } catch (ExpiredTokenException e) { log.info("The request is expired: ExpiredTokenException: " + e.getMessage()); @@ -131,8 +133,14 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { globalStatus = new TReturnStatus(SRM_REQUEST_TIMED_OUT, "Request expired"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } @@ -142,8 +150,14 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { globalStatus = new TReturnStatus(SRM_INVALID_REQUEST, "Invalid request token"); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } @@ -151,7 +165,6 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { * Get only the SURL requested in a AbortFile Request Define a new Collection to contains the * "epurated" chunk, removing the ones not specified in input request */ - if (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES)) { log.debug( "srmAbortFiles: PtPAbortExecutor: Case of AbortFile request. Purge Chunks with SurlArray."); @@ -160,7 +173,6 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { * Get the related Chunk for each SURL in the input SurlArray. If a Surl requested is not * founf, the TSURLReturnStatus related is setted to SRM_INVALID_PATH */ - List surlList = extractSurlArray(inputData).getArrayList(); surlStatusMap.keySet().retainAll(surlList); if (!surlStatusMap.keySet().containsAll(surlList)) { @@ -170,34 +182,48 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { TSURLReturnStatus surlReturnStatus = new TSURLReturnStatus(); surlReturnStatus.setSurl(surl); - surlReturnStatus.setStatus(new TReturnStatus(SRM_INVALID_PATH, - "SURL specified does not referes to this request token.")); - log.info("srmAbortFiles: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() + "] for [SURL:" + surl - + "] failed with [status: " + surlReturnStatus.getStatus() + "]"); + surlReturnStatus.setStatus( + new TReturnStatus( + SRM_INVALID_PATH, "SURL specified does not referes to this request token.")); + log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for [SURL:" + + surl + + "] failed with [status: " + + surlReturnStatus.getStatus() + + "]"); arrayOfTSurlRetStatus.addTSurlReturnStatus(surlReturnStatus); } } } } - /********* Check here the new chunks container is not empty! ******/ + /** ******* Check here the new chunks container is not empty! ***** */ if (surlStatusMap.isEmpty()) { log.debug("Abort Request - No surl specified associated to request token"); - globalStatus = new TReturnStatus(SRM_FAILURE, - "All surl specified does not referes to the request token."); + globalStatus = + new TReturnStatus( + SRM_FAILURE, "All surl specified does not referes to the request token."); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); - log.info("srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + log.info( + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } - /********* - * Phase 1 Wait until the request goes in a status different from REQUEST_QUEUED - ********/ - + /** + * ******* Phase 1 Wait until the request goes in a status different from REQUEST_QUEUED ****** + */ int chunkAborted = 0; // To avoid deadlock @@ -229,7 +255,7 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { log.debug("PtPAbortExecutor: PtPChunk not in IN_PROGRESS state. Ready for Abort."); TSURLReturnStatus surlReturnStatus = new TSURLReturnStatus(); - /******** Phase (3) of Abort *************/ + /** ****** Phase (3) of Abort ************ */ log.debug("PtPAbortExecutor: start Phase(3)"); /* @@ -242,27 +268,52 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { log.debug("PtPAbortExecutor: CHUNK already aborted!"); surlReturnStatus.setSurl(surlStatus.getKey()); - surlReturnStatus - .setStatus(new TReturnStatus(SRM_SUCCESS, "File request successfully aborted.")); + surlReturnStatus.setStatus( + new TReturnStatus(SRM_SUCCESS, "File request successfully aborted.")); } else { log.debug("PtPAbortExecutor:CHUNK to abort."); // Chunk not ABORTED. We have to work... - surlReturnStatus = manageAuthorizedAbort(user, inputData.getRequestToken(), - surlStatus.getKey(), surlStatus.getValue(), inputData); + surlReturnStatus = + manageAuthorizedAbort( + user, + inputData.getRequestToken(), + surlStatus.getKey(), + surlStatus.getValue(), + inputData); } // Remove this chunks from the other to abort. iterator.remove(); if ((surlReturnStatus.getStatus().getStatusCode().equals(SRM_SUCCESS))) { - log.info("srmAbortFiles: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() + "] for SURL " + numOfSurl - + " of " + totalSize + " [SURL:" + surlStatus.getKey() - + "] successfully done with [status: " + surlReturnStatus.getStatus() + "]"); + log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for SURL " + + numOfSurl + + " of " + + totalSize + + " [SURL:" + + surlStatus.getKey() + + "] successfully done with [status: " + + surlReturnStatus.getStatus() + + "]"); } else { - log.info("srmAbortFiles: <" + DataHelper.getRequestor(inputData) - + "> Request for [token:" + inputData.getRequestToken() + "] for SURL " + numOfSurl - + " of " + totalSize + " [SURL:" + surlStatus.getKey() + "] failed with [status: " - + surlReturnStatus.getStatus() + "]"); + log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] for SURL " + + numOfSurl + + " of " + + totalSize + + " [SURL:" + + surlStatus.getKey() + + "] failed with [status: " + + surlReturnStatus.getStatus() + + "]"); errorCount++; } // Add returnStatus @@ -270,9 +321,7 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { // Increment number of chunk aborted chunkAborted++; - } - } // while chunk Collection if (chunkAborted < totalSize) { @@ -289,8 +338,11 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { try { List surls = Lists.newArrayList(surlStatusMap.keySet()); - surlStatusMap = checker.getSURLStatuses(AbstractCommand.getUserFromInputData(inputData), - inputData.getRequestToken(), surls); + surlStatusMap = + checker.getSURLStatuses( + AbstractCommand.getUserFromInputData(inputData), + inputData.getRequestToken(), + surls); } catch (AuthzException e) { log.error(e.getMessage(), e); @@ -313,8 +365,13 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); log.info( - "srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } catch (ExpiredTokenException e) { log.info("The request is expired: ExpiredTokenException: " + e.getMessage()); @@ -323,13 +380,17 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(null); log.info( - "srmAbortRequest: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "] failed with [status: " + globalStatus + "]"); + "srmAbortRequest: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "] failed with [status: " + + globalStatus + + "]"); return outputData; } log.debug("srmAbortRequest: PtPAbortExecutor: refresh done."); } - } // LoopTimes Exceeded? @@ -340,12 +401,18 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { globalStatus = new TReturnStatus(SRM_INTERNAL_ERROR, "TimeOut for abort execution exceeded."); outputData.setReturnStatus(globalStatus); outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); - log.error("srmAbortFiles: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "]" - + (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) - ? " for [SURL:" + extractSurlArray(inputData) + "]" - : "") - + " completed with [status: " + globalStatus + "]"); + log.error( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "]" + + (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) + ? " for [SURL:" + extractSurlArray(inputData) + "]" + : "") + + " completed with [status: " + + globalStatus + + "]"); return outputData; } @@ -365,20 +432,25 @@ public AbortGeneralOutputData doIt(AbortInputData inputData) { TReturnStatus requestStatus = new TReturnStatus(SRM_ABORTED, "Request Aborted."); RequestSummaryCatalog.getInstance() - .updateGlobalStatus(inputData.getRequestToken(), requestStatus); + .updateGlobalStatus(inputData.getRequestToken(), requestStatus); } } // Set output data outputData.setArrayOfFileStatuses(arrayOfTSurlRetStatus); outputData.setReturnStatus(globalStatus); - log.info("srmAbortFiles: <" + DataHelper.getRequestor(inputData) + "> Request for [token:" - + inputData.getRequestToken() + "]" - + (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) - ? " for [SURL:" + extractSurlArray(inputData) + "]" - : "") - + " completed with [status: " + globalStatus + "]"); + log.info( + "srmAbortFiles: <" + + DataHelper.getRequestor(inputData) + + "> Request for [token:" + + inputData.getRequestToken() + + "]" + + (inputData.getType().equals(AbortInputData.AbortType.ABORT_FILES) + ? " for [SURL:" + extractSurlArray(inputData) + "]" + : "") + + " completed with [status: " + + globalStatus + + "]"); return outputData; - } private ArrayOfSURLs extractSurlArray(AbortInputData inputData) { @@ -394,15 +466,17 @@ private ArrayOfSURLs extractSurlArray(AbortInputData inputData) { } /** - * * Manage the roll back needed to execute an abort request. - * + * * @param chunkData PtGChunkData * @return returnStatus TSURLReturnStatus */ - - private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequestToken token, - TSURL surl, TReturnStatus status, AbortInputData inputData) { + private TSURLReturnStatus manageAuthorizedAbort( + GridUserInterface user, + TRequestToken token, + TSURL surl, + TReturnStatus status, + AbortInputData inputData) { boolean failure = false; namespace = NamespaceDirector.getNamespace(); @@ -441,8 +515,12 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest } if (chunkData == null) { - throw new IllegalStateException("Unexpected condition. token " + token - + " stored on the db but no associated surl " + surl + " found"); + throw new IllegalStateException( + "Unexpected condition. token " + + token + + " stored on the db but no associated surl " + + surl + + " found"); } if (!(chunkData.getSpaceToken().isEmpty())) { TSpaceToken sToken = chunkData.getSpaceToken(); @@ -470,8 +548,9 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest * With fileSize specified. In this case the spaceFile have to be increased from the free * block of the removed put placeholder. */ - reserveSpaceStatus = rexec.updateReservation(chunkData.getSpaceToken(), - chunkData.expectedFileSize(), chunkData.getSURL()); + reserveSpaceStatus = + rexec.updateReservation( + chunkData.getSpaceToken(), chunkData.expectedFileSize(), chunkData.getSURL()); if (!reserveSpaceStatus.getStatusCode().equals(SRM_SUCCESS)) { // Something goes wrong failure = true; @@ -495,12 +574,14 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest } } catch (UnapprochableSurlException e) { if (inputData instanceof IdentityInputData) { - log.info(String.format( - "Unable to build a stori for surl %s for user %s UnapprochableSurlException: %s", - surl, DataHelper.getRequestor(inputData), e.getMessage())); + log.info( + String.format( + "Unable to build a stori for surl %s for user %s UnapprochableSurlException: %s", + surl, DataHelper.getRequestor(inputData), e.getMessage())); } else { log.info( - String.format("Unable to build a stori for surl %s UnapprochableSurlException: %s", + String.format( + "Unable to build a stori for surl %s UnapprochableSurlException: %s", surl, e.getMessage())); } @@ -517,12 +598,15 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest return surlReturnStatus; } catch (NamespaceException e) { if (inputData instanceof IdentityInputData) { - log.info(String.format( - "Unable to build a stori for surl %s for user %s NamespaceException: %s", surl, - DataHelper.getRequestor(inputData), e.getMessage())); + log.info( + String.format( + "Unable to build a stori for surl %s for user %s NamespaceException: %s", + surl, DataHelper.getRequestor(inputData), e.getMessage())); } else { - log.info(String.format("Unable to build a stori for surl %s NamespaceException: %s", surl, - e.getMessage())); + log.info( + String.format( + "Unable to build a stori for surl %s NamespaceException: %s", + surl, e.getMessage())); } manager.failRequestForSURL(user, token, surl, SRM_INTERNAL_ERROR, e.getMessage()); @@ -531,12 +615,15 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest return surlReturnStatus; } catch (InvalidSURLException e) { if (inputData instanceof IdentityInputData) { - log.info(String.format( - "Unable to build a stori for surl %s for user %s InvalidSURLException: %s", surl, - DataHelper.getRequestor(inputData), e.getMessage())); + log.info( + String.format( + "Unable to build a stori for surl %s for user %s InvalidSURLException: %s", + surl, DataHelper.getRequestor(inputData), e.getMessage())); } else { - log.info(String.format("Unable to build a stori for surl %s InvalidSURLException: %s", - surl, e.getMessage())); + log.info( + String.format( + "Unable to build a stori for surl %s InvalidSURLException: %s", + surl, e.getMessage())); } manager.failRequestForSURL(user, token, surl, SRM_INVALID_PATH, e.getMessage()); @@ -571,16 +658,17 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest res = manager.abortRequestForSURL(user, token, surl, "Request aborted."); if (res) { - surlReturnStatus - .setStatus(new TReturnStatus(SRM_SUCCESS, "File request successfully aborted.")); + surlReturnStatus.setStatus( + new TReturnStatus(SRM_SUCCESS, "File request successfully aborted.")); try { NamespaceDirector.getNamespace() - .resolveVFSbyLocalFile(fileToRemove) - .decreaseUsedSpace(sizeToRemove); + .resolveVFSbyLocalFile(fileToRemove) + .decreaseUsedSpace(sizeToRemove); } catch (NamespaceException e) { log.error(e.getMessage()); - surlReturnStatus.getStatus() - .extendExplaination("Unable to decrease used space: " + e.getMessage()); + surlReturnStatus + .getStatus() + .extendExplaination("Unable to decrease used space: " + e.getMessage()); } return surlReturnStatus; } else { @@ -588,7 +676,5 @@ private TSURLReturnStatus manageAuthorizedAbort(GridUserInterface user, TRequest surlReturnStatus.setStatus(new TReturnStatus(SRM_INTERNAL_ERROR, "File not removed.")); return surlReturnStatus; } - } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommand.java index b071f5fb..43782f2b 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -18,16 +17,8 @@ import static it.grid.storm.srm.types.TStatusCode.SRM_SUCCESS; import static it.grid.storm.synchcall.command.CommandHelper.buildStatus; -import java.util.Calendar; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.base.Preconditions; import com.google.common.collect.Lists; - import it.grid.storm.authz.AuthzException; import it.grid.storm.catalogs.VolatileAndJiTCatalog; import it.grid.storm.catalogs.surl.SURLStatusManager; @@ -57,11 +48,13 @@ import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestFilesInputData; import it.grid.storm.synchcall.surl.ExpiredTokenException; import it.grid.storm.synchcall.surl.UnknownTokenException; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - */ - +/** This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. */ public class PutDoneCommand extends DataTransferCommand implements Command { private static final Logger log = LoggerFactory.getLogger(PutDoneCommand.class); @@ -76,7 +69,8 @@ private ManageFileTransferRequestFilesInputData inputDataSanityCheck(InputData i try { checkNotNull(inputData, "PutDone: Invalid null input data"); - checkArgument(inputData instanceof ManageFileTransferRequestFilesInputData, + checkArgument( + inputData instanceof ManageFileTransferRequestFilesInputData, "PutDone: Invalid input data class"); data = (ManageFileTransferRequestFilesInputData) inputData; checkNotNull(data.getRequestToken(), "PutDone: Invalid null request token"); @@ -85,15 +79,17 @@ private ManageFileTransferRequestFilesInputData inputDataSanityCheck(InputData i } catch (NullPointerException | IllegalArgumentException e) { - log.error("PutDone: Invalid input parameters specified [{}: {}]", e.getClass().getName(), + log.error( + "PutDone: Invalid input parameters specified [{}: {}]", + e.getClass().getName(), e.getMessage()); throw new PutDoneCommandException(buildStatus(SRM_INVALID_REQUEST, e.getMessage()), e); } return data; } - - private TReturnStatus buildGlobalStatus(boolean atLeastOneSuccess, - boolean atLeastOneFailure, boolean atLeastOneAborted) { + + private TReturnStatus buildGlobalStatus( + boolean atLeastOneSuccess, boolean atLeastOneFailure, boolean atLeastOneAborted) { if (atLeastOneSuccess) { if (!atLeastOneFailure && !atLeastOneAborted) { @@ -105,7 +101,7 @@ private TReturnStatus buildGlobalStatus(boolean atLeastOneSuccess, if (atLeastOneFailure) { if (!atLeastOneAborted) { return buildStatus(SRM_FAILURE, "All file requests are failed"); - } + } return buildStatus(SRM_FAILURE, "Some file requests are failed, the others are aborted"); } @@ -118,32 +114,33 @@ private TReturnStatus buildGlobalStatus(boolean atLeastOneSuccess, return buildStatus(SRM_INTERNAL_ERROR, "Request Failed, no surl status recognized, retry."); } - private void markSURLsReadyForRead(TRequestToken requestToken, List spaceAvailableSURLs) throws PutDoneCommandException { - + private void markSURLsReadyForRead(TRequestToken requestToken, List spaceAvailableSURLs) + throws PutDoneCommandException { + if (spaceAvailableSURLs.isEmpty()) { log.debug("markSURLsReadyForRead: empty spaceAvailableSURLs"); return; } - + SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); try { - + checker.markSURLsReadyForRead(requestToken, spaceAvailableSURLs); } catch (IllegalArgumentException e) { - + log.error("PutDone: Unexpected IllegalArgumentException '{}'", e.getMessage()); - throw new PutDoneCommandException(CommandHelper.buildStatus(SRM_INTERNAL_ERROR, "Request Failed, retry."), e); + throw new PutDoneCommandException( + CommandHelper.buildStatus(SRM_INTERNAL_ERROR, "Request Failed, retry."), e); } } - + private ArrayOfTSURLReturnStatus loadSURLsStatuses( - ManageFileTransferRequestFilesInputData inputData) - throws PutDoneCommandException { - + ManageFileTransferRequestFilesInputData inputData) throws PutDoneCommandException { + TRequestToken requestToken = inputData.getRequestToken(); List listOfSURLs = inputData.getArrayOfSURLs().getArrayList(); - + ArrayOfTSURLReturnStatus surlsStatuses = null; try { @@ -156,87 +153,80 @@ private ArrayOfTSURLReturnStatus loadSURLsStatuses( } catch (IllegalArgumentException e) { - log.error("PutDone: Unexpected IllegalArgumentException: {}", - e.getMessage(), e); + log.error("PutDone: Unexpected IllegalArgumentException: {}", e.getMessage(), e); throw new PutDoneCommandException(buildStatus(SRM_INTERNAL_ERROR, "Request Failed, retry.")); } catch (RequestUnknownException e) { log.info( - "PutDone: Invalid request token and surl. RequestUnknownException: {}", - e.getMessage(), e); + "PutDone: Invalid request token and surl. RequestUnknownException: {}", + e.getMessage(), + e); throw new PutDoneCommandException( buildStatus(SRM_INVALID_REQUEST, "Invalid request token and surls")); } catch (UnknownTokenException e) { - log.info("PutDone: Invalid request token. UnknownTokenException: {}", - e.getMessage(), e); + log.info("PutDone: Invalid request token. UnknownTokenException: {}", e.getMessage(), e); throw new PutDoneCommandException(buildStatus(SRM_INVALID_REQUEST, "Invalid request token")); } catch (ExpiredTokenException e) { - log.info("PutDone: The request is expired: ExpiredTokenException: {}", - e.getMessage(), e); + log.info("PutDone: The request is expired: ExpiredTokenException: {}", e.getMessage(), e); throw new PutDoneCommandException(buildStatus(SRM_REQUEST_TIMED_OUT, "Request expired")); } return surlsStatuses; } - - + /** - * Implements the srmPutDone. Used to notify the SRM that the client completed - * a file transfer to the TransferURL in the allocated space (by a - * PrepareToPut). + * Implements the srmPutDone. Used to notify the SRM that the client completed a file transfer to + * the TransferURL in the allocated space (by a PrepareToPut). */ public OutputData execute(InputData absData) { log.debug("PutDone: Started."); - + TReturnStatus globalStatus = null; ArrayOfTSURLReturnStatus surlsStatuses = null; - + boolean atLeastOneSuccess = false; boolean atLeastOneFailure = false; boolean atLeastOneAborted = false; ManageFileTransferRequestFilesInputData inputData = null; try { - + inputData = inputDataSanityCheck(absData); - + } catch (PutDoneCommandException e) { printRequestOutcome(e.getReturnStatus()); return new ManageFileTransferOutputData(e.getReturnStatus()); } - GridUserInterface user = inputData instanceof IdentityInputData - ? ((IdentityInputData) inputData).getUser() : null; + GridUserInterface user = + inputData instanceof IdentityInputData ? ((IdentityInputData) inputData).getUser() : null; TRequestToken requestToken = inputData.getRequestToken(); List spaceAvailableSURLs = Lists.newArrayList(); - + try { - + surlsStatuses = loadSURLsStatuses(inputData); - + } catch (PutDoneCommandException e) { - + printRequestOutcome(e.getReturnStatus(), inputData); - return new ManageFileTransferOutputData(e.getReturnStatus()); + return new ManageFileTransferOutputData(e.getReturnStatus()); } - - + for (TSURLReturnStatus surlStatus : surlsStatuses.getArray()) { - + TReturnStatus newStatus; TReturnStatus currentStatus = surlStatus.getStatus(); - - switch (currentStatus.getStatusCode()) { + switch (currentStatus.getStatusCode()) { case SRM_SPACE_AVAILABLE: - spaceAvailableSURLs.add(surlStatus.getSurl()); // DO PutDone try { @@ -252,19 +242,16 @@ public OutputData execute(InputData absData) { break; case SRM_SUCCESS: - newStatus = buildStatus(SRM_DUPLICATION_ERROR, "Duplication error"); atLeastOneFailure = true; break; case SRM_ABORTED: - newStatus = buildStatus(SRM_INVALID_PATH, "PtP status for this SURL is SRM_ABORTED"); atLeastOneAborted = true; break; default: - newStatus = buildStatus(SRM_FAILURE, "Check StatusOfPutRequest for more information"); atLeastOneFailure = true; break; @@ -272,24 +259,23 @@ public OutputData execute(InputData absData) { surlsStatuses.updateStatus(surlStatus, newStatus); } - + try { - + markSURLsReadyForRead(requestToken, spaceAvailableSURLs); } catch (PutDoneCommandException e) { - + printRequestOutcome(e.getReturnStatus(), inputData); - return new ManageFileTransferOutputData(e.getReturnStatus()); + return new ManageFileTransferOutputData(e.getReturnStatus()); } - + log.debug("PutDone: Computing final global status ..."); - globalStatus = buildGlobalStatus(atLeastOneSuccess, atLeastOneFailure, - atLeastOneAborted); - + globalStatus = buildGlobalStatus(atLeastOneSuccess, atLeastOneFailure, atLeastOneAborted); + log.debug("PutDone: Finished with status {}", globalStatus); printRequestOutcome(globalStatus, inputData); - + return new ManageFileTransferOutputData(globalStatus, surlsStatuses); } @@ -299,31 +285,36 @@ private static void printRequestOutcome(TReturnStatus status) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); } - private static void printRequestOutcome(TReturnStatus status, ManageFileTransferRequestFilesInputData inputData) { + private static void printRequestOutcome( + TReturnStatus status, ManageFileTransferRequestFilesInputData inputData) { Preconditions.checkNotNull(inputData); Preconditions.checkNotNull(status); - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData, - inputData.getRequestToken(), inputData.getArrayOfSURLs().asStringList()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + status, + inputData, + inputData.getRequestToken(), + inputData.getArrayOfSURLs().asStringList()); } - private ArrayOfTSURLReturnStatus loadSURLsStatus(GridUserInterface user, - TRequestToken requestToken, List inputSURLs) - throws RequestUnknownException { + private ArrayOfTSURLReturnStatus loadSURLsStatus( + GridUserInterface user, TRequestToken requestToken, List inputSURLs) + throws RequestUnknownException { - ArrayOfTSURLReturnStatus returnStatuses = new ArrayOfTSURLReturnStatus( - inputSURLs.size()); + ArrayOfTSURLReturnStatus returnStatuses = new ArrayOfTSURLReturnStatus(inputSURLs.size()); SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); - Map surlsStatuses = checker.getSURLStatuses(user, - requestToken, inputSURLs); + Map surlsStatuses = + checker.getSURLStatuses(user, requestToken, inputSURLs); if (surlsStatuses.isEmpty()) { log.info("PutDone: No one of the requested surls found for the provided token"); throw new RequestUnknownException( - "No one of the requested surls found for the provided token"); + "No one of the requested surls found for the provided token"); } TReturnStatus status = null; @@ -336,8 +327,10 @@ private ArrayOfTSURLReturnStatus loadSURLsStatus(GridUserInterface user, status = surlsStatuses.get(surl); } else { log.debug("PutDone: SURL '{}' NOT found in the DB!", surl); - status = new TReturnStatus(SRM_INVALID_PATH, - "SURL does not refer to an existing file for the specified request token"); + status = + new TReturnStatus( + SRM_INVALID_PATH, + "SURL does not refer to an existing file for the specified request token"); } TSURLReturnStatus surlRetStatus = new TSURLReturnStatus(surl, status); returnStatuses.addTSurlReturnStatus(surlRetStatus); @@ -345,100 +338,101 @@ private ArrayOfTSURLReturnStatus loadSURLsStatus(GridUserInterface user, return returnStatuses; } - public static boolean executePutDone(TSURL surl) throws PutDoneCommandException { - return executePutDone(surl, null); + public static boolean executePutDone(TSURL surl) throws PutDoneCommandException { + return executePutDone(surl, null); + } + + public static boolean executePutDone(TSURL surl, GridUserInterface user) + throws PutDoneCommandException { + + Preconditions.checkNotNull(surl, "Null SURL received"); + + log.debug("Executing PutDone for SURL: {}", surl.getSURLString()); + + String userStr = user == null ? "Anonymous" : user.toString(); + StoRI stori = null; + + try { + + stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl, user); + + } catch (IllegalArgumentException e) { + + log.error( + String.format( + "User %s is unable to build a stori for surl %s, %s: %s", + userStr, surl, e.getClass().getName(), e.getMessage())); + throw new PutDoneCommandException(buildStatus(SRM_INTERNAL_ERROR, e.getMessage()), e); + + } catch (Exception e) { + + log.info( + String.format( + "User %s is unable to build a stori for surl %s, %s: %s", + userStr, surl, e.getClass().getName(), e.getMessage()), + e); + return false; } - public static boolean executePutDone(TSURL surl, GridUserInterface user) - throws PutDoneCommandException { - - Preconditions.checkNotNull(surl, "Null SURL received"); - - log.debug("Executing PutDone for SURL: {}", surl.getSURLString()); - - String userStr = user == null ? "Anonymous" : user.toString(); - StoRI stori = null; - - try { - - stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl, user); - - } catch (IllegalArgumentException e) { - - log.error( - String.format("User %s is unable to build a stori for surl %s, %s: %s", - userStr, surl, e.getClass().getName(), e.getMessage())); - throw new PutDoneCommandException(buildStatus(SRM_INTERNAL_ERROR, e.getMessage()), e); - - } catch (Exception e) { - - log.info( - String.format("User %s is unable to build a stori for surl %s, %s: %s", - userStr, surl, e.getClass().getName(), e.getMessage()), e); - return false; - - } - - // 1- if the SURL is volatile update the entry in the Volatile table - if (VolatileAndJiTCatalog.getInstance().exists(stori.getPFN())) { - try { - VolatileAndJiTCatalog.getInstance().setStartTime(stori.getPFN(), - Calendar.getInstance()); - } catch (Exception e) { - // impossible because of the "exists" check - } - } - - // 2- JiTs must me removed from the TURL - if (stori.hasJustInTimeACLs()) { - log.debug("PutDone: JiT case, removing ACEs on SURL: " + surl.toString()); - // Retrieve the PFN of the SURL parents - List storiParentsList = stori.getParents(); - List pfnParentsList = Lists.newArrayList(); - - for (StoRI parentStoRI : storiParentsList) { - pfnParentsList.add(parentStoRI.getPFN()); - } - LocalUser localUser = null; - try { - if (user != null) { - localUser = user.getLocalUser(); - } - } catch (CannotMapUserException e) { - log.warn( - "PutDone: Unable to get the local user for user {}. CannotMapUserException: {}", - user, e.getMessage(), e); - } - if (localUser != null) { - VolatileAndJiTCatalog.getInstance().expirePutJiTs(stori.getPFN(), - localUser); - } else { - VolatileAndJiTCatalog.getInstance().removeAllJiTsOn(stori.getPFN()); - } - } - - // 3- compute the checksum and store it in an extended attribute - LocalFile localFile = stori.getLocalFile(); - - VirtualFS vfs = null; - try { - vfs = NamespaceDirector.getNamespace().resolveVFSbyLocalFile(localFile); - } catch (NamespaceException e) { - log.error(e.getMessage(), e); - return false; - } - - // 4- Tape stuff management. - if (vfs.getStorageClassType().isTapeEnabled()) { - String fileAbosolutePath = localFile.getAbsolutePath(); - StormEA.removePinned(fileAbosolutePath); - StormEA.setPremigrate(fileAbosolutePath); - } - - // 5- Update UsedSpace into DB - vfs.increaseUsedSpace(localFile.getSize()); - - return true; - } + // 1- if the SURL is volatile update the entry in the Volatile table + if (VolatileAndJiTCatalog.getInstance().exists(stori.getPFN())) { + try { + VolatileAndJiTCatalog.getInstance().setStartTime(stori.getPFN(), Calendar.getInstance()); + } catch (Exception e) { + // impossible because of the "exists" check + } + } + // 2- JiTs must me removed from the TURL + if (stori.hasJustInTimeACLs()) { + log.debug("PutDone: JiT case, removing ACEs on SURL: " + surl.toString()); + // Retrieve the PFN of the SURL parents + List storiParentsList = stori.getParents(); + List pfnParentsList = Lists.newArrayList(); + + for (StoRI parentStoRI : storiParentsList) { + pfnParentsList.add(parentStoRI.getPFN()); + } + LocalUser localUser = null; + try { + if (user != null) { + localUser = user.getLocalUser(); + } + } catch (CannotMapUserException e) { + log.warn( + "PutDone: Unable to get the local user for user {}. CannotMapUserException: {}", + user, + e.getMessage(), + e); + } + if (localUser != null) { + VolatileAndJiTCatalog.getInstance().expirePutJiTs(stori.getPFN(), localUser); + } else { + VolatileAndJiTCatalog.getInstance().removeAllJiTsOn(stori.getPFN()); + } + } + + // 3- compute the checksum and store it in an extended attribute + LocalFile localFile = stori.getLocalFile(); + + VirtualFS vfs = null; + try { + vfs = NamespaceDirector.getNamespace().resolveVFSbyLocalFile(localFile); + } catch (NamespaceException e) { + log.error(e.getMessage(), e); + return false; + } + + // 4- Tape stuff management. + if (vfs.getStorageClassType().isTapeEnabled()) { + String fileAbosolutePath = localFile.getAbsolutePath(); + StormEA.removePinned(fileAbosolutePath); + StormEA.setPremigrate(fileAbosolutePath); + } + + // 5- Update UsedSpace into DB + vfs.increaseUsedSpace(localFile.getSize()); + + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommandException.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommandException.java index f288adfd..62e84fce 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommandException.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/PutDoneCommandException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -8,23 +7,24 @@ public class PutDoneCommandException extends Exception { - private static final long serialVersionUID = 1L; - - private TReturnStatus status; - - public PutDoneCommandException(TReturnStatus status) { - - super(String.format("%s: %s", status.getStatusCode().getValue(), status.getExplanation())); - this.status = status; - } - - public PutDoneCommandException(TReturnStatus status, Throwable cause) { - - super(String.format("%s: %s", status.getStatusCode().getValue(), status.getExplanation()), cause); - this.status = status; - } - - public TReturnStatus getReturnStatus() { - return status; - } - } \ No newline at end of file + private static final long serialVersionUID = 1L; + + private TReturnStatus status; + + public PutDoneCommandException(TReturnStatus status) { + + super(String.format("%s: %s", status.getStatusCode().getValue(), status.getExplanation())); + this.status = status; + } + + public PutDoneCommandException(TReturnStatus status, Throwable cause) { + + super( + String.format("%s: %s", status.getStatusCode().getValue(), status.getExplanation()), cause); + this.status = status; + } + + public TReturnStatus getReturnStatus() { + return status; + } +} diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/ReleaseFilesCommand.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/ReleaseFilesCommand.java index 1e60f221..7395e867 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/ReleaseFilesCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/ReleaseFilesCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; @@ -27,45 +26,34 @@ import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferOutputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestFilesInputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestInputData; - import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * - * Authors: - * + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it * @author Alberto Forti - * * @date = Oct 10, 2008 - * */ - public class ReleaseFilesCommand extends DataTransferCommand implements Command { - private static final Logger log = LoggerFactory - .getLogger(ReleaseFilesCommand.class); + private static final Logger log = LoggerFactory.getLogger(ReleaseFilesCommand.class); private static final String SRM_COMMAND = "srmReleaseFiles"; - private static final EnumSet PINNED_OR_SUCCESS = EnumSet.of( - TStatusCode.SRM_SUCCESS, TStatusCode.SRM_FILE_PINNED); + private static final EnumSet PINNED_OR_SUCCESS = + EnumSet.of(TStatusCode.SRM_SUCCESS, TStatusCode.SRM_FILE_PINNED); - public ReleaseFilesCommand() { - - } + public ReleaseFilesCommand() {} public TRequestToken getTokenFromInputData(InputData inputData) { @@ -79,8 +67,7 @@ public TRequestToken getTokenFromInputData(InputData inputData) { public List getSURLListFromInputData(InputData inputData) { if (inputDataHasSURLArray(inputData)) { - return ((ManageFileTransferFilesInputData) inputData).getArrayOfSURLs() - .getArrayList(); + return ((ManageFileTransferFilesInputData) inputData).getArrayOfSURLs().getArrayList(); } return null; } @@ -88,53 +75,50 @@ public List getSURLListFromInputData(InputData inputData) { private List toStringList(List surls) { List ls = new ArrayList(); - for (TSURL s : surls) - ls.add(s.getSURLString()); + for (TSURL s : surls) ls.add(s.getSURLString()); return ls; } public boolean validInputData(InputData inputData) { return (inputData instanceof ManageFileTransferRequestFilesInputData) - || (inputData instanceof ManageFileTransferFilesInputData) - || (inputData instanceof ManageFileTransferRequestInputData); + || (inputData instanceof ManageFileTransferFilesInputData) + || (inputData instanceof ManageFileTransferRequestInputData); } public boolean inputDataHasToken(InputData inputData) { return (inputData instanceof ManageFileTransferRequestFilesInputData) - || (inputData instanceof ManageFileTransferRequestInputData); + || (inputData instanceof ManageFileTransferRequestInputData); } public boolean inputDataHasSURLArray(InputData inputData) { return (inputData instanceof ManageFileTransferRequestFilesInputData) - || (inputData instanceof ManageFileTransferFilesInputData); + || (inputData instanceof ManageFileTransferFilesInputData); } public OutputData handleNullInputData(InputData inputData) { - log.error("ReleaseFiles: Invalid input parameters specified: inputData=" - + inputData); + log.error("ReleaseFiles: Invalid input parameters specified: inputData=" + inputData); - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, - "Empty request parametes")); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, "Empty request parametes")); logRequestOutcome(outputData.getReturnStatus(), inputData); return outputData; - } - public OutputData handleInvalidRequest(InputData in, - IllegalArgumentException e) { + public OutputData handleInvalidRequest(InputData in, IllegalArgumentException e) { log.warn(e.getMessage(), e); - ManageFileTransferOutputData outputData = new ManageFileTransferOutputData( - CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, - "Internal error: " + e.getMessage())); + ManageFileTransferOutputData outputData = + new ManageFileTransferOutputData( + CommandHelper.buildStatus( + TStatusCode.SRM_FAILURE, "Internal error: " + e.getMessage())); logRequestOutcome(outputData.getReturnStatus(), in); @@ -145,14 +129,14 @@ public OutputData handleNoSURLsFound(InputData in) { log.info("No SURLs found in the DB. Request failed"); - TReturnStatus returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_REQUEST, - "No SURLs found matching user, input request token or list of SURLs."); + TReturnStatus returnStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, + "No SURLs found matching user, input request token or list of SURLs."); logRequestOutcome(returnStatus, in); return new ManageFileTransferOutputData(returnStatus); - } private boolean isAnonymousRequest(InputData inputData) { @@ -161,9 +145,8 @@ private boolean isAnonymousRequest(InputData inputData) { } /** - * Does a ReleaseFiles. Used to release pins on the previously requested - * "copies" (or "state") of the SURL. This function normally follows a - * srmPrepareToGet or srmBringOnline functions. + * Does a ReleaseFiles. Used to release pins on the previously requested "copies" (or "state") of + * the SURL. This function normally follows a srmPrepareToGet or srmBringOnline functions. */ public OutputData execute(InputData inputData) { @@ -172,12 +155,11 @@ public OutputData execute(InputData inputData) { if (inputData == null) { return handleNullInputData(inputData); - } if (!validInputData(inputData)) { throw new IllegalArgumentException( - "Release files: invalid argument type: " + inputData.getClass()); + "Release files: invalid argument type: " + inputData.getClass()); } Map surlStatuses = null; @@ -195,14 +177,13 @@ public OutputData execute(InputData inputData) { try { if (token == null) { - surlStatuses = checker.getPinnedSURLsForUser(user, - getSURLListFromInputData(inputData)); + surlStatuses = checker.getPinnedSURLsForUser(user, getSURLListFromInputData(inputData)); } else { - surlStatuses = checker - .getSURLStatuses(user, getTokenFromInputData(inputData), - getSURLListFromInputData(inputData)); + surlStatuses = + checker.getSURLStatuses( + user, getTokenFromInputData(inputData), getSURLListFromInputData(inputData)); } } catch (AuthzException e) { @@ -210,21 +191,20 @@ public OutputData execute(InputData inputData) { } catch (IllegalArgumentException e) { return handleInvalidRequest(inputData, e); - } if (surlStatuses.isEmpty()) { return handleNoSURLsFound(inputData); } - ArrayOfTSURLReturnStatus surlReturnStatuses = prepareSurlsReturnStatus( - surlStatuses, getSURLListFromInputData(inputData)); + ArrayOfTSURLReturnStatus surlReturnStatuses = + prepareSurlsReturnStatus(surlStatuses, getSURLListFromInputData(inputData)); List surlToRelease = extractSurlToRelease(surlReturnStatuses); if (surlToRelease.isEmpty()) { - TReturnStatus returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_FAILURE, "No files released"); + TReturnStatus returnStatus = + CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "No files released"); logRequestOutcome(returnStatus, inputData); @@ -250,16 +230,16 @@ private OutputData handleAuthzError(InputData inputData, AuthzException e) { log.error(e.getMessage()); - TReturnStatus returnStatus = CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage()); + TReturnStatus returnStatus = + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage()); logRequestOutcome(returnStatus, inputData); return new ManageFileTransferOutputData(returnStatus); } - private TReturnStatus buildStatus(InputData inputData, - ArrayOfTSURLReturnStatus surlReturnStatuses) { + private TReturnStatus buildStatus( + InputData inputData, ArrayOfTSURLReturnStatus surlReturnStatuses) { boolean atLeastOneReleased = false; boolean atLeastOneFailure = false; @@ -268,37 +248,32 @@ private TReturnStatus buildStatus(InputData inputData, printSurlOutcome(returnStatus, inputData); - if (returnStatus.getStatus().getStatusCode() - .equals(TStatusCode.SRM_SUCCESS)) { + if (returnStatus.getStatus().getStatusCode().equals(TStatusCode.SRM_SUCCESS)) { atLeastOneReleased = true; } else { atLeastOneFailure = true; - } } if (atLeastOneReleased) { if (atLeastOneFailure) { - return CommandHelper.buildStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Check files status for details"); + return CommandHelper.buildStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, "Check files status for details"); } else { - return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, - "Files released"); + return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, "Files released"); } } else { - return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, - "No files released"); + return CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "No files released"); } } private ArrayOfTSURLReturnStatus prepareSurlsReturnStatus( - Map statuses, List surlsInRequest) { + Map statuses, List surlsInRequest) { - ArrayOfTSURLReturnStatus surlReturnStatuses = new ArrayOfTSURLReturnStatus( - statuses.size()); + ArrayOfTSURLReturnStatus surlReturnStatuses = new ArrayOfTSURLReturnStatus(statuses.size()); Collection surls; @@ -317,12 +292,10 @@ private ArrayOfTSURLReturnStatus prepareSurlsReturnStatus( returnStatus = prepareStatus(rs.getStatusCode()); } else { - returnStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, - "Invalid SURL"); + returnStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, "Invalid SURL"); } - surlReturnStatuses.addTSurlReturnStatus(CommandHelper.buildStatus(surl, - returnStatus)); + surlReturnStatuses.addTSurlReturnStatus(CommandHelper.buildStatus(surl, returnStatus)); } return surlReturnStatuses; @@ -334,19 +307,17 @@ private TReturnStatus prepareStatus(TStatusCode status) { return CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, "Released"); } - return CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, - "Not released because it is not pinned"); + return CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_PATH, "Not released because it is not pinned"); } - private List extractSurlToRelease( - ArrayOfTSURLReturnStatus surlReturnStatuses) { + private List extractSurlToRelease(ArrayOfTSURLReturnStatus surlReturnStatuses) { LinkedList surlToRelease = new LinkedList(); for (TSURLReturnStatus returnStatus : surlReturnStatuses.getArray()) { - if (TStatusCode.SRM_SUCCESS.equals(returnStatus.getStatus() - .getStatusCode())) { + if (TStatusCode.SRM_SUCCESS.equals(returnStatus.getStatus().getStatusCode())) { surlToRelease.add(returnStatus.getSurl()); } @@ -356,9 +327,8 @@ private List extractSurlToRelease( } /** - * Removes the Extended Attribute "pinned" from SURLs belonging to a - * filesystem with tape support. - * + * Removes the Extended Attribute "pinned" from SURLs belonging to a filesystem with tape support. + * * @param surlToRelease */ private void removePinneExtendedAttribute(List surlToRelease) { @@ -373,9 +343,9 @@ private void removePinneExtendedAttribute(List surlToRelease) { } catch (Throwable e) { - log.warn(String.format( - "UNEXPECTED: Unable to build a stori for surl %s: %s", surl, - e.getMessage())); + log.warn( + String.format( + "UNEXPECTED: Unable to build a stori for surl %s: %s", surl, e.getMessage())); continue; } @@ -387,11 +357,10 @@ private void removePinneExtendedAttribute(List surlToRelease) { } } - private void printSurlOutcome(TSURLReturnStatus surlStatus, - InputData inputData) { + private void printSurlOutcome(TSURLReturnStatus surlStatus, InputData inputData) { - CommandHelper.printSurlOutcome(SRM_COMMAND, log, surlStatus.getStatus(), - inputData, surlStatus.getSurl()); + CommandHelper.printSurlOutcome( + SRM_COMMAND, log, surlStatus.getStatus(), inputData, surlStatus.getSurl()); } protected void logRequestOutcome(TReturnStatus status, InputData id) { @@ -402,8 +371,7 @@ protected void logRequestOutcome(TReturnStatus status, InputData id) { if (surls == null) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, id, token); } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, id, token, - toStringList(surls)); + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, id, token, toStringList(surls)); } } } diff --git a/src/main/java/it/grid/storm/synchcall/command/datatransfer/RequestUnknownException.java b/src/main/java/it/grid/storm/synchcall/command/datatransfer/RequestUnknownException.java index 2ad16d15..7db52bee 100644 --- a/src/main/java/it/grid/storm/synchcall/command/datatransfer/RequestUnknownException.java +++ b/src/main/java/it/grid/storm/synchcall/command/datatransfer/RequestUnknownException.java @@ -1,33 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.datatransfer; public class RequestUnknownException extends Exception { - /** - * - */ - private static final long serialVersionUID = 2766075955119694140L; + /** */ + private static final long serialVersionUID = 2766075955119694140L; - public RequestUnknownException() { + public RequestUnknownException() {} - } + public RequestUnknownException(String message) { - public RequestUnknownException(String message) { + super(message); + } - super(message); - } + public RequestUnknownException(Throwable cause) { - public RequestUnknownException(Throwable cause) { + super(cause); + } - super(cause); - } - - public RequestUnknownException(String message, Throwable cause) { - - super(message, cause); - } + public RequestUnknownException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/directory/LsCommand.java b/src/main/java/it/grid/storm/synchcall/command/directory/LsCommand.java index b865920d..ff703da9 100644 --- a/src/main/java/it/grid/storm/synchcall/command/directory/LsCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/directory/LsCommand.java @@ -1,17 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.directory; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -import org.apache.commons.lang.mutable.MutableInt; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; import it.grid.storm.authz.path.model.SRMFileRequest; @@ -68,16 +59,21 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.directory.LSInputData; import it.grid.storm.synchcall.data.directory.LSOutputData; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import org.apache.commons.lang.mutable.MutableInt; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * Authors: - * + * + *

Authors: + * * @author lucamag luca.magnoniATcnaf.infn.it * @date = Dec 3, 2008 */ - public class LsCommand extends DirectoryCommand implements Command { public static final Logger log = LoggerFactory.getLogger(LsCommand.class); @@ -95,7 +91,7 @@ public LsCommand() { /** * Method that provides LS functionality. - * + * * @param inputData LSInputData * @return LSOutputData */ @@ -110,11 +106,13 @@ public OutputData execute(InputData data) { outputData.setRequestToken(null); outputData.setDetails(null); - if (inputData == null || inputData.getSurlArray() == null + if (inputData == null + || inputData.getSurlArray() == null || inputData.getSurlArray().size() == 0) { log.debug("srmLs: Input parameters for srmLs request NOT found!"); - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Invalid input parameters specified"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "Invalid input parameters specified"); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -123,8 +121,9 @@ public OutputData execute(InputData data) { ArrayOfSURLs surlArray = inputData.getSurlArray(); if (inputData.getStorageTypeSpecified()) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_NOT_SUPPORTED, - "Filtering result by fileStorageType not supported."); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Filtering result by fileStorageType not supported."); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); outputData.setRequestToken(null); @@ -152,8 +151,9 @@ public OutputData execute(InputData data) { } else { numOfLevels = inputData.getNumOfLevels().intValue(); if (numOfLevels < 0) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Parameter 'numOfLevels' is negative"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "Parameter 'numOfLevels' is negative"); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -169,8 +169,9 @@ public OutputData execute(InputData data) { } else { count = inputData.getCount().intValue(); if (count < 0) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Parameter 'count' is less or equal zero"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "Parameter 'count' is less or equal zero"); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -188,8 +189,9 @@ public OutputData execute(InputData data) { } else { offset = inputData.getOffset().intValue(); if (offset < 0) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "Parameter 'offset' is negative"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "Parameter 'offset' is negative"); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -230,8 +232,11 @@ public OutputData execute(InputData data) { } catch (UnapprochableSurlException e) { failure = true; - log.info("Unable to build a stori for surl {} for user {}. {}", surl, - DataHelper.getRequestor(inputData), e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. {}", + surl, + DataHelper.getRequestor(inputData), + e.getMessage()); fileLevelStatusCode = TStatusCode.SRM_AUTHORIZATION_FAILURE; fileLevelExplanation = e.getMessage(); @@ -241,8 +246,11 @@ public OutputData execute(InputData data) { } catch (NamespaceException e) { failure = true; - log.info("Unable to build a stori for surl {} for user {}. {}", surl, - DataHelper.getRequestor(inputData), e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. {}", + surl, + DataHelper.getRequestor(inputData), + e.getMessage()); fileLevelStatusCode = TStatusCode.SRM_INTERNAL_ERROR; fileLevelExplanation = e.getMessage(); @@ -252,8 +260,11 @@ public OutputData execute(InputData data) { } catch (InvalidSURLException e) { failure = true; - log.info("Unable to build a stori for surl {} for user {}. {}", surl, - DataHelper.getRequestor(inputData), e.getMessage()); + log.info( + "Unable to build a stori for surl {} for user {}. {}", + surl, + DataHelper.getRequestor(inputData), + e.getMessage()); fileLevelStatusCode = TStatusCode.SRM_INVALID_PATH; fileLevelExplanation = e.getMessage(); @@ -297,23 +308,24 @@ public OutputData execute(InputData data) { failure = true; fileLevelStatusCode = TStatusCode.SRM_INTERNAL_ERROR; fileLevelExplanation = e.getMessage(); - printRequestOutcome(CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), - inputData); + printRequestOutcome( + CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), inputData); } } else { log.debug("srmLs: SURL not specified as input parameter!"); failure = true; fileLevelStatusCode = TStatusCode.SRM_INVALID_PATH; fileLevelExplanation = "Invalid path"; - printRequestOutcome(CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), - inputData); + printRequestOutcome( + CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), inputData); } if (!failure) { AuthzDecision lsAuthz; if (inputData instanceof IdentityInputData) { - lsAuthz = AuthzDirector.getPathAuthz() - .authorize(((IdentityInputData) inputData).getUser(), SRMFileRequest.LS, stori); + lsAuthz = + AuthzDirector.getPathAuthz() + .authorize(((IdentityInputData) inputData).getUser(), SRMFileRequest.LS, stori); } else { lsAuthz = AuthzDirector.getPathAuthz().authorizeAnonymous(SRMFileRequest.LS, stori.getStFN()); @@ -321,24 +333,36 @@ public OutputData execute(InputData data) { if (lsAuthz.equals(AuthzDecision.PERMIT)) { - log.debug("srmLs: Ls authorized for user [{}] and PFN = [{}]", - DataHelper.getRequestor(inputData), stori.getPFN()); - - errorCount += manageAuthorizedLS(inputData, stori, details, allLevelRecursive, - numOfLevels, fullDetailedList, errorCount, maxEntries, offset, - numberOfReturnedEntries, 0, numberOfIterations); + log.debug( + "srmLs: Ls authorized for user [{}] and PFN = [{}]", + DataHelper.getRequestor(inputData), + stori.getPFN()); + + errorCount += + manageAuthorizedLS( + inputData, + stori, + details, + allLevelRecursive, + numOfLevels, + fullDetailedList, + errorCount, + maxEntries, + offset, + numberOfReturnedEntries, + 0, + numberOfIterations); } else { fileLevelStatusCode = TStatusCode.SRM_AUTHORIZATION_FAILURE; fileLevelExplanation = "User does not have valid permissions"; - printRequestOutcome(CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), - inputData); + printRequestOutcome( + CommandHelper.buildStatus(fileLevelStatusCode, fileLevelExplanation), inputData); failure = true; } } - if (failure) { errorCount++; @@ -356,12 +380,12 @@ public OutputData execute(InputData data) { details.addTMetaDataPathDetail(elementDetail); } - } if (details.size() == 0) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_INVALID_REQUEST, - "The offset is grater than the number of results"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "The offset is grater than the number of results"); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -369,8 +393,10 @@ public OutputData execute(InputData data) { if (numberOfReturnedEntries.intValue() >= maxEntries) { if (maxEntries < count) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_TOO_MANY_RESULTS, - "Max returned entries is: " + DirectoryCommand.config.getLSMaxNumberOfEntry()); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_TOO_MANY_RESULTS, + "Max returned entries is: " + DirectoryCommand.config.getLSMaxNumberOfEntry()); printRequestOutcome(globalStatus, inputData); outputData.setStatus(globalStatus); return outputData; @@ -389,13 +415,15 @@ public OutputData execute(InputData data) { } if (errorCount == 0) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, - warningMessage + "All requests successfully completed"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_SUCCESS, warningMessage + "All requests successfully completed"); printRequestOutcome(globalStatus, inputData); } else if (errorCount < surlArray.size()) { - globalStatus = CommandHelper.buildStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - warningMessage + "Check file statuses for details"); + globalStatus = + CommandHelper.buildStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, warningMessage + "Check file statuses for details"); printRequestOutcome(globalStatus, inputData); } else { @@ -412,8 +440,8 @@ private void printRequestOutcome(TReturnStatus status, LSInputData inputData) { if (inputData != null) { if (inputData.getSurlArray() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData, - inputData.getSurlArray().asStringList()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, status, inputData, inputData.getSurlArray().asStringList()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); } @@ -426,7 +454,7 @@ private void printRequestOutcome(TReturnStatus status, LSInputData inputData) { /** * Recursive function for visiting Directory an TMetaDataPath Creation. Returns the number of file * statuses different than SRM_SUCCESS. - * + * * @param guser * @param stori * @param rootArray @@ -442,15 +470,21 @@ private void printRequestOutcome(TReturnStatus status, LSInputData inputData) { * @param numberOfIterations * @return number of errors */ - - private int manageAuthorizedLS(LSInputData inputData, StoRI stori, - ArrayOfTMetaDataPathDetail rootArray, boolean allLevelRecursive, int numOfLevels, - boolean fullDetailedList, int errorCount, int count_maxEntries, int offset, - MutableInt numberOfResults, int currentLevel, MutableInt numberOfIterations) { - - /** - * @todo In this version the FileStorageType field is not managed even if it is specified. - */ + private int manageAuthorizedLS( + LSInputData inputData, + StoRI stori, + ArrayOfTMetaDataPathDetail rootArray, + boolean allLevelRecursive, + int numOfLevels, + boolean fullDetailedList, + int errorCount, + int count_maxEntries, + int offset, + MutableInt numberOfResults, + int currentLevel, + MutableInt numberOfIterations) { + + /** @todo In this version the FileStorageType field is not managed even if it is specified. */ // Check if max number of requests has been reached if (numberOfResults.intValue() >= count_maxEntries) { @@ -468,12 +502,11 @@ private int manageAuthorizedLS(LSInputData inputData, StoRI stori, * and return. Please note that for each level the same ArrayOfTMetaData is passed as parameter, * in order to collect results. this Array is referenced in the currentTMetaData element. */ - LocalFile localElement = stori.getLocalFile(); // Ls of the current element if (localElement.exists()) { // The local element exists in the underlying - // file system + // file system if (localElement.isDirectory()) { @@ -489,8 +522,11 @@ private int manageAuthorizedLS(LSInputData inputData, StoRI stori, try { fullDetail(inputData, stori, currentElementDetail); } catch (FSException e) { - log.error("srmLs: unable to get full details on stori {}. {}", - stori.getAbsolutePath(), e.getMessage(), e); + log.error( + "srmLs: unable to get full details on stori {}. {}", + stori.getAbsolutePath(), + e.getMessage(), + e); errorCount++; currentElementDetail.setStatus( new TReturnStatus(TStatusCode.SRM_FAILURE, "Unable to get full details")); @@ -524,9 +560,19 @@ private int manageAuthorizedLS(LSInputData inputData, StoRI stori, break; } - manageAuthorizedLS(inputData, item, currentMetaDataArray, allLevelRecursive, - numOfLevels, fullDetailedList, errorCount, count_maxEntries, offset, - numberOfResults, currentLevel + 1, numberOfIterations); + manageAuthorizedLS( + inputData, + item, + currentMetaDataArray, + allLevelRecursive, + numOfLevels, + fullDetailedList, + errorCount, + count_maxEntries, + offset, + numberOfResults, + currentLevel + 1, + numberOfIterations); } // for } @@ -540,8 +586,11 @@ private int manageAuthorizedLS(LSInputData inputData, StoRI stori, try { fullDetail(inputData, stori, currentElementDetail); } catch (FSException e) { - log.error("srmLs: unable to get full details on stori {}. {}", - stori.getAbsolutePath(), e.getMessage(), e); + log.error( + "srmLs: unable to get full details on stori {}. {}", + stori.getAbsolutePath(), + e.getMessage(), + e); errorCount++; currentElementDetail.setStatus( new TReturnStatus(TStatusCode.SRM_FAILURE, "Unable to get full details")); @@ -603,7 +652,7 @@ private List getFirstLevel(StoRI element) { /** * Set size and status of "localElement" into "elementDetail". - * + * * @param localElement LocalFile * @param elementDetail TMetaDataPathDetail */ @@ -656,7 +705,7 @@ private void populateDetailFromFS(StoRI element, TMetaDataPathDetail elementDeta /** * Returns true if the status of the SURL of the received StoRI is SRM_SPACE_AVAILABLE, false * otherwise. This method queries the DB, therefore pay attention to possible performance issues. - * + * * @return boolean */ private boolean isStoRISURLBusy(StoRI element) { @@ -666,8 +715,9 @@ private boolean isStoRISURLBusy(StoRI element) { return checker.isSURLBusy(element.getSURL()); } - private void fullDetail(LSInputData inputData, StoRI stori, - TMetaDataPathDetail currentElementDetail) throws FSException { + private void fullDetail( + LSInputData inputData, StoRI stori, TMetaDataPathDetail currentElementDetail) + throws FSException { if (inputData instanceof IdentityInputData) { fullDetail(stori, ((IdentityInputData) inputData).getUser(), currentElementDetail); @@ -693,10 +743,13 @@ private void fullDetail(StoRI element, GridUserInterface guser, TMetaDataPathDet permission = element.getLocalFile().getGroupPermission(guser.getLocalUser()); } if (permission != null) { - userPermission = new TUserPermission(new TUserID(guser.getLocalUser().getLocalUserName()), - TPermissionMode.getTPermissionMode(permission)); + userPermission = + new TUserPermission( + new TUserID(guser.getLocalUser().getLocalUserName()), + TPermissionMode.getTPermissionMode(permission)); groupPermission = - new TGroupPermission(new TGroupID(guser.getLocalUser().getLocalUserName()), + new TGroupPermission( + new TGroupID(guser.getLocalUser().getLocalUserName()), TPermissionMode.getTPermissionMode(permission)); otherPermission = TPermissionMode.getTPermissionMode(permission); } @@ -733,7 +786,7 @@ private void fullDetail(StoRI element, GridUserInterface guser, TMetaDataPathDet /** * Set full details into "elementDetail". Information details set by the function * populateDetailFromFS() are not considered. - * + * * @param element StoRI * @param localElement LocalFile * @param guser GridUserInterface @@ -778,8 +831,8 @@ private void fullDetail(StoRI element, TMetaDataPathDetail elementDetail) throws elementDetail.setLifeTimeAssigned(element.getFileLifeTime()); if (element.getFileStartTime() != null) { - elementDetail - .setLifetimeLeft(element.getFileLifeTime().timeLeft(element.getFileStartTime())); + elementDetail.setLifetimeLeft( + element.getFileLifeTime().timeLeft(element.getFileStartTime())); } else { elementDetail.setLifetimeLeft(TLifeTimeInSeconds.makeInfinite()); } @@ -801,12 +854,12 @@ private void fullDetail(StoRI element, TMetaDataPathDetail elementDetail) throws /** * populateDetailFromPersistence - * + * * @param element StoRI * @param elementDetail TMetaDataPathDetail */ - private void populateFileDetailsFromPersistence(StoRI element, - TMetaDataPathDetail elementDetail) { + private void populateFileDetailsFromPersistence( + StoRI element, TMetaDataPathDetail elementDetail) { // TFileStorageType boolean isVolatile = VolatileAndJiTCatalog.getInstance().exists(element.getPFN()); @@ -815,12 +868,11 @@ private void populateFileDetailsFromPersistence(StoRI element, } else { elementDetail.setTFileStorageType(TFileStorageType.PERMANENT); } - } /** * checkAnotherLevel - * + * * @param allLevelRecursive boolean * @param numOfLevels int * @param currentLevel int @@ -836,5 +888,4 @@ private boolean checkAnotherLevel(boolean allLevelRecursive, int numOfLevels, in } return result; } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/directory/MkdirCommand.java b/src/main/java/it/grid/storm/synchcall/command/directory/MkdirCommand.java index 32f31353..3569e1d6 100644 --- a/src/main/java/it/grid/storm/synchcall/command/directory/MkdirCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/directory/MkdirCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.directory; @@ -18,11 +17,7 @@ import static it.grid.storm.synchcall.command.directory.MkdirException.srmInvalidPath; import static java.lang.String.format; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; - import it.grid.storm.acl.AclManager; import it.grid.storm.acl.AclManagerFS; import it.grid.storm.authz.AuthzDecision; @@ -58,6 +53,8 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.directory.MkdirInputData; import it.grid.storm.synchcall.data.directory.MkdirOutputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class MkdirException extends SRMCommandException { @@ -89,15 +86,13 @@ public static MkdirException srmAuthorizationFailure(String message) { } } - /** * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and * ICTP/EGRID project - * + * * @author lucamag * @date May 27, 2008 */ - public class MkdirCommand extends DirectoryCommand implements Command { public static final Logger log = LoggerFactory.getLogger(MkdirCommand.class); @@ -117,7 +112,7 @@ public MkdirCommand() { /** * Method that provide SrmMkdir functionality. - * + * * @param inputData Contains information about input data for Mkdir request. * @return MkdirOutputData Contains output data */ @@ -140,8 +135,8 @@ private MkdirOutputData doMkdir(MkdirInputData data) { GridUserInterface user = getUser(data); StoRI stori = resolveStoRI(surl, user); checkUserAuthorization(stori, user); - log.debug("srmMkdir authorized for {} for directory = {}", userToString(user), - stori.getPFN()); + log.debug( + "srmMkdir authorized for {} for directory = {}", userToString(user), stori.getPFN()); returnStatus = createFolder(stori.getLocalFile()); if (returnStatus.isSRM_SUCCESS()) { log.debug("srmMkdir: updating used space info ..."); @@ -163,7 +158,8 @@ private TReturnStatus createFolder(LocalFile file) { log.debug("srmMkdir: Parent directory is {}.", parent); if (parent != null) { if (!parent.exists()) { - return new TReturnStatus(SRM_INVALID_PATH, + return new TReturnStatus( + SRM_INVALID_PATH, "Parent directory does not exists. Recursive directory creation Not Allowed"); } log.debug("srmMkdir: Parent directory {} exists.", parent); @@ -295,8 +291,9 @@ private void manageAcl(StoRI stori, GridUserInterface user) { } } - private void setAcl(GridUserInterface user, LocalFile file, boolean hasJiTACL, - FilesystemPermission permission) throws CannotMapUserException { + private void setAcl( + GridUserInterface user, LocalFile file, boolean hasJiTACL, FilesystemPermission permission) + throws CannotMapUserException { /* * Add Acces Control List (ACL) in directory created. ACL allow user to read-write-list the new @@ -329,7 +326,9 @@ private void manageDefaultACL(LocalFile dir, FilesystemPermission permission) return; } for (ACLEntry ace : dacl.getACL()) { - log.debug("Adding DefaultACL for the gid: {} with permission: {}", ace.getGroupID(), + log.debug( + "Adding DefaultACL for the gid: {} with permission: {}", + ace.getGroupID(), ace.getFilePermissionString()); LocalUser user = new LocalUser(ace.getGroupID(), ace.getGroupID()); @@ -347,7 +346,11 @@ private void printRequestOutcome(TReturnStatus status, MkdirInputData inputData) if (inputData != null) { if (inputData.getSurl() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData, + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + status, + inputData, Lists.newArrayList(inputData.getSurl().toString())); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); diff --git a/src/main/java/it/grid/storm/synchcall/command/directory/MvCommand.java b/src/main/java/it/grid/storm/synchcall/command/directory/MvCommand.java index d4d07ffd..71d547be 100644 --- a/src/main/java/it/grid/storm/synchcall/command/directory/MvCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/directory/MvCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.directory; @@ -37,582 +36,572 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.directory.MvInputData; import it.grid.storm.synchcall.data.directory.MvOutputData; - import java.util.Arrays; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project This class implements the SrmMv - * Command. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project This class implements the SrmMv Command. + * * @author lucamag * @date May 28, 2008 */ - public class MvCommand extends DirectoryCommand implements Command { public static final Logger log = LoggerFactory.getLogger(MvCommand.class); - private static final String SRM_COMMAND = "SrmMv"; - private final NamespaceInterface namespace; - - public MvCommand() { - - namespace = NamespaceDirector.getNamespace(); - - } - - /** - * Method that provide SrmMv functionality. - * - * @param inputData - * Contains information about input data for Mv request. - * @return outputData Contains output data - */ - public OutputData execute(InputData data) { - - log.debug("srmMv: Start execution."); - MvOutputData outputData = new MvOutputData(); - MvInputData inputData = (MvInputData) data; - - /** - * Validate MvInputData. The check is done at this level to separate - * internal StoRM logic from xmlrpc specific operation. - */ - - if ((inputData == null) || (inputData.getFromSURL() == null) - || (inputData.getToSURL() == null)) { - outputData.setStatus(CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, - "Invalid parameter specified.")); - log.warn("srmMv: Request failed with [status: {}]", - outputData.getStatus()); - - return outputData; - } - - TSURL fromSURL = inputData.getFromSURL(); - - if (fromSURL.isEmpty()) { - log.warn("srmMv: unable to perform the operation, empty fromSurl"); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, "Invalid fromSURL specified!")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - TSURL toSURL = inputData.getToSURL(); - - if (toSURL.isEmpty()) { - log.error("srmMv: unable to perform the operation, empty toSurl"); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, "Invalid toSURL specified!")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - StoRI fromStori = null; - try { - if (inputData instanceof IdentityInputData) { - try { - fromStori = namespace.resolveStoRIbySURL(fromSURL, - ((IdentityInputData) inputData).getUser()); - } catch (UnapprochableSurlException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (NamespaceException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidSURLException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - } - } else { - try { - fromStori = namespace.resolveStoRIbySURL(fromSURL); - } catch (UnapprochableSurlException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (NamespaceException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidSURLException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - } - } catch (IllegalArgumentException e) { - log.warn("srmMv: Unable to build StoRI by SURL: {}. {}", fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_REQUEST, "Unable to build StoRI by SURL")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - StoRI toStori = null;; - try { - if (inputData instanceof IdentityInputData) { - try { - toStori = namespace.resolveStoRIbySURL(toSURL, - ((IdentityInputData) inputData).getUser()); - } catch (UnapprochableSurlException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (NamespaceException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidSURLException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - fromSURL, - DataHelper.getRequestor(inputData), - e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - } else { - try { - toStori = namespace.resolveStoRIbySURL(toSURL); - } catch (UnapprochableSurlException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (NamespaceException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidSURLException e) { - log.info("srmMv: Unable to build a stori for surl {}. {}",fromSURL - ,e.getMessage()); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - } - } catch (IllegalArgumentException e) { - log.error("srmMv: Unable to build StoRI by SURL: {}. {}", fromSURL - ,e.getMessage(),e); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, - "Unable to build StoRI by destination SURL")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - TSpaceToken token = new SpaceHelper().getTokenFromStoRI(log, fromStori); - SpaceAuthzInterface spaceAuth = AuthzDirector.getSpaceAuthz(token); - - boolean isSpaceAuthorized; - if (inputData instanceof IdentityInputData) { - isSpaceAuthorized = spaceAuth.authorize( - ((IdentityInputData) inputData).getUser(), SRMSpaceRequest.MV); - } else { - isSpaceAuthorized = spaceAuth.authorizeAnonymous(SRMSpaceRequest.MV); - } - if (!isSpaceAuthorized) { - log.debug("srmMv: User not authorized to perform srmMv on SA: {}", token); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, - ": User not authorized to perform srmMv on SA: " + token)); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - if (fromStori.getLocalFile().getPath() - .compareTo(toStori.getLocalFile().getPath()) == 0) { - outputData.setStatus(CommandHelper.buildStatus(TStatusCode.SRM_SUCCESS, - "Source SURL and target SURL are the same file.")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - if (toStori.getLocalFile().exists()) { - if (toStori.getLocalFile().isDirectory()) { - try { - toStori = buildDestinationStoryForFolder(toSURL, fromStori, data); - } catch (IllegalArgumentException e) { - log.debug("srmMv : Unable to build StoRI for SURL {}. {}", - toSURL, e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, "Unable to build StoRI by SURL")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (UnapprochableSurlException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - toSURL, DataHelper.getRequestor(inputData),e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidTSURLAttributesException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - toSURL, DataHelper.getRequestor(inputData),e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, "Invalid toSURL specified!")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (NamespaceException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - toSURL, DataHelper.getRequestor(inputData),e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } catch (InvalidSURLException e) { - log.info("srmMv: Unable to build a stori for surl {} for user {}. {}", - toSURL, DataHelper.getRequestor(inputData),e.getMessage()); - - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_INVALID_PATH, e.getMessage())); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - } else { - log.debug("srmMv : destination SURL {} already exists.", toSURL); - outputData.setStatus(CommandHelper - .buildStatus(TStatusCode.SRM_DUPLICATION_ERROR, - "destination SURL already exists!")); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - } - - AuthzDecision sourceDecision; - if (inputData instanceof IdentityInputData) { - sourceDecision = AuthzDirector.getPathAuthz().authorize( - ((IdentityInputData) inputData).getUser(), SRMFileRequest.MV_source, - fromStori, toStori); - } else { - sourceDecision = AuthzDirector.getPathAuthz().authorizeAnonymous( - SRMFileRequest.MV_source, fromStori, toStori); - } - AuthzDecision destinationDecision; - if (inputData instanceof IdentityInputData) { - destinationDecision = AuthzDirector.getPathAuthz().authorize( - ((IdentityInputData) inputData).getUser(), SRMFileRequest.MV_dest, - fromStori, toStori); - } else { - destinationDecision = AuthzDirector.getPathAuthz().authorizeAnonymous( - SRMFileRequest.MV_dest, fromStori, toStori); - } - TReturnStatus returnStatus; - if ((sourceDecision.equals(AuthzDecision.PERMIT)) - && (destinationDecision.equals(AuthzDecision.PERMIT))) { - - log.debug("SrmMv: Mv authorized for user {}. Source: {}. Target: {}", - DataHelper.getRequestor(inputData), - fromStori.getPFN(), - toStori.getPFN()); - - returnStatus = manageAuthorizedMV(fromStori, toStori.getLocalFile()); - if (returnStatus.isSRM_SUCCESS()) { - LocalUser user = null; - if (inputData instanceof IdentityInputData) { - try { - user = ((IdentityInputData) inputData).getUser().getLocalUser(); - } catch (CannotMapUserException e) { - log - .warn("srmMv: user mapping error {}", e.getMessage()); - - if (log.isDebugEnabled()){ - log.error(e.getMessage(),e); - } - - returnStatus - .extendExplaination("unable to set user acls on the destination file"); - } - } - if (user != null) { - setAcl(fromStori, toStori, user); - } else { - setAcl(fromStori, toStori); - } - } else { - log.warn("srmMv: <{}> Request for [fromSURL={}; toSURL={}] failed with [status: {}]", - DataHelper.getRequestor(inputData), - fromSURL, - toSURL, - returnStatus); - } - } else { - - String errorMsg = "Authorization error"; - - if (sourceDecision.equals(AuthzDecision.PERMIT)) { - errorMsg = - "User is not authorized to create and/or write the destination file"; - } else { - if (destinationDecision.equals(AuthzDecision.PERMIT)) { - errorMsg = - "User is not authorized to read and/or delete the source file"; - } else { - errorMsg = - "User is neither authorized to read and/or delete the source file " - + "nor to create and/or write the destination file"; - } - } - - returnStatus = - CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - errorMsg); - } - outputData.setStatus(returnStatus); - printRequestOutcome(outputData.getStatus(), inputData); - return outputData; - } - - private StoRI buildDestinationStoryForFolder(TSURL toSURL, StoRI fromStori, - InputData inputData) throws IllegalArgumentException, - InvalidTSURLAttributesException, UnapprochableSurlException, - NamespaceException, InvalidSURLException { - - StoRI toStori; - String toSURLString = toSURL.getSURLString(); - if (!(toSURLString.endsWith("/"))) { - toSURLString += "/"; - } - toSURLString += fromStori.getFilename(); - log.debug("srmMv: New toSURL: {}", toSURLString); - if (inputData instanceof IdentityInputData) { - toStori = namespace.resolveStoRIbySURL( - TSURL.makeFromStringValidate(toSURLString), - ((IdentityInputData) inputData).getUser()); - } else { - toStori = namespace.resolveStoRIbySURL(TSURL - .makeFromStringValidate(toSURLString)); - } - return toStori; - } - - private void setAcl(StoRI oldFileStoRI, StoRI newFileStoRI) { - - try { - AclManagerFS.getInstance().moveHttpsPermissions( - oldFileStoRI.getLocalFile(), newFileStoRI.getLocalFile()); - } catch (IllegalArgumentException e) { - log - .error("Unable to move permissions from the old to the new file.{}", - e.getMessage(), e); - } - } - - private void setAcl(StoRI oldFileStoRI, StoRI newFileStoRI, - LocalUser localUser) { - - setAcl(oldFileStoRI, newFileStoRI); - if (newFileStoRI.hasJustInTimeACLs()) { - // JiT - try { - AclManagerFS.getInstance().grantHttpsUserPermission( - newFileStoRI.getLocalFile(), localUser, - FilesystemPermission.ReadWrite); - } catch (IllegalArgumentException e) { - log - .error("Unable to grant user read and write permission on file. {}", - e.getMessage(), - e); - } - } else { - // AoT - try { - AclManagerFS.getInstance().grantHttpsGroupPermission( - newFileStoRI.getLocalFile(), localUser, - FilesystemPermission.ReadWrite); - } catch (IllegalArgumentException e) { - log - .error("Unable to grant group read and write permission on file. {}" - ,e.getMessage(),e); - } - } - } - - /** - * Split PFN , recursive creation is not supported, as reported at page 16 of - * Srm v2.1 spec. - * - * @param user - * VomsGridUser - * @param LocalFile - * fromFile - * @param LocalFile - * toFile - * @return TReturnStatus - */ - private TReturnStatus manageAuthorizedMV(StoRI fromStori, LocalFile toFile) { - - boolean creationDone; - - String explanation = ""; - TStatusCode statusCode = TStatusCode.EMPTY; - - LocalFile fromFile = fromStori.getLocalFile(); - LocalFile toParent = toFile.getParentFile(); - - /* - * Controllare che File sorgente esiste Esiste directory destinazione(che - * esista e sia directory) Non esiste file deestinazione - */ - - boolean sourceExists = false; - boolean targetDirExists = false; - boolean targetFileExists = false; - - if (fromFile != null) { - sourceExists = fromFile.exists(); - } - - if (toParent != null) { - targetDirExists = toParent.exists() && toParent.isDirectory(); - } - - if (toFile != null) { - targetFileExists = toFile.exists(); - } - - if (sourceExists && targetDirExists && !targetFileExists) { - - SURLStatusManager checker = SURLStatusManagerFactory - .newSURLStatusManager(); - - if(checker.isSURLBusy(fromStori.getSURL())){ - log - .debug("srmMv request failure: fromSURL is busy."); - explanation = "There is an active SrmPrepareToPut on from SURL."; - return CommandHelper - .buildStatus(TStatusCode.SRM_FILE_BUSY, explanation); - } - - /** - * Check if there is an active SrmPrepareToGet on the source SURL. In that - * case SrmMv() fails with SRM_FILE_BUSY. - */ - - if (checker.isSURLPinned(fromStori.getSURL())){ - log - .debug("SrmMv: requests fails because the source SURL is being used from other requests."); - explanation = "There is an active SrmPrepareToGet on from SURL"; - return CommandHelper - .buildStatus(TStatusCode.SRM_FILE_BUSY, explanation); - } - - /** - * Perform the SrmMv() operation. - */ - creationDone = fromFile.renameTo(toFile.getPath()); - - if (creationDone) { - log.debug("SrmMv: Request success!"); - explanation = "SURL moved with success"; - statusCode = TStatusCode.SRM_SUCCESS; - } else { - log.debug("SrmMv: Requests fails because the path is invalid."); - explanation = "Invalid path"; - statusCode = TStatusCode.SRM_INVALID_PATH; - } - - } else { - if (!sourceExists) { // and it is a file - log - .debug("SrmMv: request fails because the source SURL does not exists!"); - explanation = "Source SURL does not exists!"; - statusCode = TStatusCode.SRM_INVALID_PATH; - } else { - if (!targetDirExists) { - log - .debug("SrmMv: request fails because the target directory does not exitts."); - explanation = "Target directory does not exits!"; - statusCode = TStatusCode.SRM_INVALID_PATH; - } else { - if (targetFileExists) { - log.debug("SrmMv: request fails because the target SURL exists."); - explanation = "Target SURL exists!"; - statusCode = TStatusCode.SRM_DUPLICATION_ERROR; - } else { - log.debug("SrmMv request failure! That is a BUG!"); - explanation = "That is a bug!"; - statusCode = TStatusCode.SRM_INTERNAL_ERROR; - } - } - } - } - - return CommandHelper.buildStatus(statusCode, explanation); - } - - private void printRequestOutcome(TReturnStatus status, MvInputData inputData) { - - if (inputData != null) { - if (inputData.getFromSURL() != null && inputData.getToSURL() != null) { - CommandHelper.printRequestOutcome( - SRM_COMMAND, - log, - status, - inputData, - Arrays.asList(new String[] { inputData.getFromSURL().toString(), - inputData.getFromSURL().toString() })); - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); - } - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); - } - } - + private static final String SRM_COMMAND = "SrmMv"; + private final NamespaceInterface namespace; + + public MvCommand() { + + namespace = NamespaceDirector.getNamespace(); + } + + /** + * Method that provide SrmMv functionality. + * + * @param inputData Contains information about input data for Mv request. + * @return outputData Contains output data + */ + public OutputData execute(InputData data) { + + log.debug("srmMv: Start execution."); + MvOutputData outputData = new MvOutputData(); + MvInputData inputData = (MvInputData) data; + + /** + * Validate MvInputData. The check is done at this level to separate internal StoRM logic from + * xmlrpc specific operation. + */ + if ((inputData == null) + || (inputData.getFromSURL() == null) + || (inputData.getToSURL() == null)) { + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_FAILURE, "Invalid parameter specified.")); + log.warn("srmMv: Request failed with [status: {}]", outputData.getStatus()); + + return outputData; + } + + TSURL fromSURL = inputData.getFromSURL(); + + if (fromSURL.isEmpty()) { + log.warn("srmMv: unable to perform the operation, empty fromSurl"); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, "Invalid fromSURL specified!")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + TSURL toSURL = inputData.getToSURL(); + + if (toSURL.isEmpty()) { + log.error("srmMv: unable to perform the operation, empty toSurl"); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, "Invalid toSURL specified!")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + StoRI fromStori = null; + try { + if (inputData instanceof IdentityInputData) { + try { + fromStori = + namespace.resolveStoRIbySURL(fromSURL, ((IdentityInputData) inputData).getUser()); + } catch (UnapprochableSurlException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (NamespaceException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidSURLException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + } + } else { + try { + fromStori = namespace.resolveStoRIbySURL(fromSURL); + } catch (UnapprochableSurlException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (NamespaceException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidSURLException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + } + } catch (IllegalArgumentException e) { + log.warn("srmMv: Unable to build StoRI by SURL: {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_INVALID_REQUEST, "Unable to build StoRI by SURL")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + StoRI toStori = null; + ; + try { + if (inputData instanceof IdentityInputData) { + try { + toStori = namespace.resolveStoRIbySURL(toSURL, ((IdentityInputData) inputData).getUser()); + } catch (UnapprochableSurlException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (NamespaceException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidSURLException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + fromSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + } else { + try { + toStori = namespace.resolveStoRIbySURL(toSURL); + } catch (UnapprochableSurlException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (NamespaceException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidSURLException e) { + log.info("srmMv: Unable to build a stori for surl {}. {}", fromSURL, e.getMessage()); + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + } + } catch (IllegalArgumentException e) { + log.error("srmMv: Unable to build StoRI by SURL: {}. {}", fromSURL, e.getMessage(), e); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_INTERNAL_ERROR, "Unable to build StoRI by destination SURL")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + TSpaceToken token = new SpaceHelper().getTokenFromStoRI(log, fromStori); + SpaceAuthzInterface spaceAuth = AuthzDirector.getSpaceAuthz(token); + + boolean isSpaceAuthorized; + if (inputData instanceof IdentityInputData) { + isSpaceAuthorized = + spaceAuth.authorize(((IdentityInputData) inputData).getUser(), SRMSpaceRequest.MV); + } else { + isSpaceAuthorized = spaceAuth.authorizeAnonymous(SRMSpaceRequest.MV); + } + if (!isSpaceAuthorized) { + log.debug("srmMv: User not authorized to perform srmMv on SA: {}", token); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_AUTHORIZATION_FAILURE, + ": User not authorized to perform srmMv on SA: " + token)); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + if (fromStori.getLocalFile().getPath().compareTo(toStori.getLocalFile().getPath()) == 0) { + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_SUCCESS, "Source SURL and target SURL are the same file.")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + if (toStori.getLocalFile().exists()) { + if (toStori.getLocalFile().isDirectory()) { + try { + toStori = buildDestinationStoryForFolder(toSURL, fromStori, data); + } catch (IllegalArgumentException e) { + log.debug("srmMv : Unable to build StoRI for SURL {}. {}", toSURL, e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_INTERNAL_ERROR, "Unable to build StoRI by SURL")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (UnapprochableSurlException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + toSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidTSURLAttributesException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + toSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, "Invalid toSURL specified!")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (NamespaceException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + toSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } catch (InvalidSURLException e) { + log.info( + "srmMv: Unable to build a stori for surl {} for user {}. {}", + toSURL, + DataHelper.getRequestor(inputData), + e.getMessage()); + + outputData.setStatus( + CommandHelper.buildStatus(TStatusCode.SRM_INVALID_PATH, e.getMessage())); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + } else { + log.debug("srmMv : destination SURL {} already exists.", toSURL); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_DUPLICATION_ERROR, "destination SURL already exists!")); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + } + + AuthzDecision sourceDecision; + if (inputData instanceof IdentityInputData) { + sourceDecision = + AuthzDirector.getPathAuthz() + .authorize( + ((IdentityInputData) inputData).getUser(), + SRMFileRequest.MV_source, + fromStori, + toStori); + } else { + sourceDecision = + AuthzDirector.getPathAuthz() + .authorizeAnonymous(SRMFileRequest.MV_source, fromStori, toStori); + } + AuthzDecision destinationDecision; + if (inputData instanceof IdentityInputData) { + destinationDecision = + AuthzDirector.getPathAuthz() + .authorize( + ((IdentityInputData) inputData).getUser(), + SRMFileRequest.MV_dest, + fromStori, + toStori); + } else { + destinationDecision = + AuthzDirector.getPathAuthz() + .authorizeAnonymous(SRMFileRequest.MV_dest, fromStori, toStori); + } + TReturnStatus returnStatus; + if ((sourceDecision.equals(AuthzDecision.PERMIT)) + && (destinationDecision.equals(AuthzDecision.PERMIT))) { + + log.debug( + "SrmMv: Mv authorized for user {}. Source: {}. Target: {}", + DataHelper.getRequestor(inputData), + fromStori.getPFN(), + toStori.getPFN()); + + returnStatus = manageAuthorizedMV(fromStori, toStori.getLocalFile()); + if (returnStatus.isSRM_SUCCESS()) { + LocalUser user = null; + if (inputData instanceof IdentityInputData) { + try { + user = ((IdentityInputData) inputData).getUser().getLocalUser(); + } catch (CannotMapUserException e) { + log.warn("srmMv: user mapping error {}", e.getMessage()); + + if (log.isDebugEnabled()) { + log.error(e.getMessage(), e); + } + + returnStatus.extendExplaination("unable to set user acls on the destination file"); + } + } + if (user != null) { + setAcl(fromStori, toStori, user); + } else { + setAcl(fromStori, toStori); + } + } else { + log.warn( + "srmMv: <{}> Request for [fromSURL={}; toSURL={}] failed with [status: {}]", + DataHelper.getRequestor(inputData), + fromSURL, + toSURL, + returnStatus); + } + } else { + + String errorMsg = "Authorization error"; + + if (sourceDecision.equals(AuthzDecision.PERMIT)) { + errorMsg = "User is not authorized to create and/or write the destination file"; + } else { + if (destinationDecision.equals(AuthzDecision.PERMIT)) { + errorMsg = "User is not authorized to read and/or delete the source file"; + } else { + errorMsg = + "User is neither authorized to read and/or delete the source file " + + "nor to create and/or write the destination file"; + } + } + + returnStatus = CommandHelper.buildStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, errorMsg); + } + outputData.setStatus(returnStatus); + printRequestOutcome(outputData.getStatus(), inputData); + return outputData; + } + + private StoRI buildDestinationStoryForFolder(TSURL toSURL, StoRI fromStori, InputData inputData) + throws IllegalArgumentException, InvalidTSURLAttributesException, UnapprochableSurlException, + NamespaceException, InvalidSURLException { + + StoRI toStori; + String toSURLString = toSURL.getSURLString(); + if (!(toSURLString.endsWith("/"))) { + toSURLString += "/"; + } + toSURLString += fromStori.getFilename(); + log.debug("srmMv: New toSURL: {}", toSURLString); + if (inputData instanceof IdentityInputData) { + toStori = + namespace.resolveStoRIbySURL( + TSURL.makeFromStringValidate(toSURLString), + ((IdentityInputData) inputData).getUser()); + } else { + toStori = namespace.resolveStoRIbySURL(TSURL.makeFromStringValidate(toSURLString)); + } + return toStori; + } + + private void setAcl(StoRI oldFileStoRI, StoRI newFileStoRI) { + + try { + AclManagerFS.getInstance() + .moveHttpsPermissions(oldFileStoRI.getLocalFile(), newFileStoRI.getLocalFile()); + } catch (IllegalArgumentException e) { + log.error("Unable to move permissions from the old to the new file.{}", e.getMessage(), e); + } + } + + private void setAcl(StoRI oldFileStoRI, StoRI newFileStoRI, LocalUser localUser) { + + setAcl(oldFileStoRI, newFileStoRI); + if (newFileStoRI.hasJustInTimeACLs()) { + // JiT + try { + AclManagerFS.getInstance() + .grantHttpsUserPermission( + newFileStoRI.getLocalFile(), localUser, FilesystemPermission.ReadWrite); + } catch (IllegalArgumentException e) { + log.error("Unable to grant user read and write permission on file. {}", e.getMessage(), e); + } + } else { + // AoT + try { + AclManagerFS.getInstance() + .grantHttpsGroupPermission( + newFileStoRI.getLocalFile(), localUser, FilesystemPermission.ReadWrite); + } catch (IllegalArgumentException e) { + log.error("Unable to grant group read and write permission on file. {}", e.getMessage(), e); + } + } + } + + /** + * Split PFN , recursive creation is not supported, as reported at page 16 of Srm v2.1 spec. + * + * @param user VomsGridUser + * @param LocalFile fromFile + * @param LocalFile toFile + * @return TReturnStatus + */ + private TReturnStatus manageAuthorizedMV(StoRI fromStori, LocalFile toFile) { + + boolean creationDone; + + String explanation = ""; + TStatusCode statusCode = TStatusCode.EMPTY; + + LocalFile fromFile = fromStori.getLocalFile(); + LocalFile toParent = toFile.getParentFile(); + + /* + * Controllare che File sorgente esiste Esiste directory destinazione(che + * esista e sia directory) Non esiste file deestinazione + */ + + boolean sourceExists = false; + boolean targetDirExists = false; + boolean targetFileExists = false; + + if (fromFile != null) { + sourceExists = fromFile.exists(); + } + + if (toParent != null) { + targetDirExists = toParent.exists() && toParent.isDirectory(); + } + + if (toFile != null) { + targetFileExists = toFile.exists(); + } + + if (sourceExists && targetDirExists && !targetFileExists) { + + SURLStatusManager checker = SURLStatusManagerFactory.newSURLStatusManager(); + + if (checker.isSURLBusy(fromStori.getSURL())) { + log.debug("srmMv request failure: fromSURL is busy."); + explanation = "There is an active SrmPrepareToPut on from SURL."; + return CommandHelper.buildStatus(TStatusCode.SRM_FILE_BUSY, explanation); + } + + /** + * Check if there is an active SrmPrepareToGet on the source SURL. In that case SrmMv() fails + * with SRM_FILE_BUSY. + */ + if (checker.isSURLPinned(fromStori.getSURL())) { + log.debug( + "SrmMv: requests fails because the source SURL is being used from other requests."); + explanation = "There is an active SrmPrepareToGet on from SURL"; + return CommandHelper.buildStatus(TStatusCode.SRM_FILE_BUSY, explanation); + } + + /** Perform the SrmMv() operation. */ + creationDone = fromFile.renameTo(toFile.getPath()); + + if (creationDone) { + log.debug("SrmMv: Request success!"); + explanation = "SURL moved with success"; + statusCode = TStatusCode.SRM_SUCCESS; + } else { + log.debug("SrmMv: Requests fails because the path is invalid."); + explanation = "Invalid path"; + statusCode = TStatusCode.SRM_INVALID_PATH; + } + + } else { + if (!sourceExists) { // and it is a file + log.debug("SrmMv: request fails because the source SURL does not exists!"); + explanation = "Source SURL does not exists!"; + statusCode = TStatusCode.SRM_INVALID_PATH; + } else { + if (!targetDirExists) { + log.debug("SrmMv: request fails because the target directory does not exitts."); + explanation = "Target directory does not exits!"; + statusCode = TStatusCode.SRM_INVALID_PATH; + } else { + if (targetFileExists) { + log.debug("SrmMv: request fails because the target SURL exists."); + explanation = "Target SURL exists!"; + statusCode = TStatusCode.SRM_DUPLICATION_ERROR; + } else { + log.debug("SrmMv request failure! That is a BUG!"); + explanation = "That is a bug!"; + statusCode = TStatusCode.SRM_INTERNAL_ERROR; + } + } + } + } + + return CommandHelper.buildStatus(statusCode, explanation); + } + + private void printRequestOutcome(TReturnStatus status, MvInputData inputData) { + + if (inputData != null) { + if (inputData.getFromSURL() != null && inputData.getToSURL() != null) { + CommandHelper.printRequestOutcome( + SRM_COMMAND, + log, + status, + inputData, + Arrays.asList( + new String[] { + inputData.getFromSURL().toString(), inputData.getFromSURL().toString() + })); + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); + } + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/directory/RmCommand.java b/src/main/java/it/grid/storm/synchcall/command/directory/RmCommand.java index ec98d352..8412392b 100644 --- a/src/main/java/it/grid/storm/synchcall/command/directory/RmCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/directory/RmCommand.java @@ -1,14 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.directory; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; import it.grid.storm.authz.SpaceAuthzInterface; @@ -38,6 +32,9 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.directory.RmInputData; import it.grid.storm.synchcall.data.directory.RmOutputData; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class RmException extends SRMCommandException { @@ -49,15 +46,13 @@ public RmException(TStatusCode code, String message) { } } - /** * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and * ICTP/EGRID project - * + * * @author lucamag * @date May 27, 2008 */ - public class RmCommand implements Command { private static final String SRM_COMMAND = "srmRm"; @@ -67,7 +62,6 @@ public class RmCommand implements Command { public RmCommand() { namespace = NamespaceDirector.getNamespace(); - } private void checkInputData(InputData data) throws IllegalArgumentException { @@ -94,7 +88,7 @@ private List getSurlArray(RmInputData data) throws RmException { /** * Method that provide SrmRm functionality. - * + * * @param inputData Contains information about input data for rm request. * @return RmOutputData Contains output data */ @@ -145,7 +139,6 @@ private RmOutputData doRm(RmInputData data) { returnStatus.getStatusCode().equals(TStatusCode.SRM_AUTHORIZATION_FAILURE); printSurlOutcome(returnStatus, data, surl); } - } globalStatus = computeGlobalStatus(atLeastOneSuccess, atLeastOneFailure, allUnauthorized); outputData = new RmOutputData(globalStatus, arrayOfFileStatus); @@ -173,8 +166,8 @@ private TReturnStatus removeFile(TSURL surl, GridUserInterface user, RmInputData } if (localFile.isDirectory()) { - return new TReturnStatus(TStatusCode.SRM_INVALID_PATH, - "The specified file is a directory. Not removed"); + return new TReturnStatus( + TStatusCode.SRM_INVALID_PATH, "The specified file is a directory. Not removed"); } // Get file size before it's removed @@ -182,8 +175,8 @@ private TReturnStatus removeFile(TSURL surl, GridUserInterface user, RmInputData if (!localFile.delete()) { log.warn("srmRm: File not removed!"); - return new TReturnStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - "File not removed, permission denied."); + return new TReturnStatus( + TStatusCode.SRM_AUTHORIZATION_FAILURE, "File not removed, permission denied."); } manager.abortAllGetRequestsForSURL(null, surl, "File has been removed."); @@ -199,11 +192,10 @@ private TReturnStatus removeFile(TSURL surl, GridUserInterface user, RmInputData } return returnStatus; - } - private TReturnStatus computeGlobalStatus(boolean atLeastOneSuccess, boolean atLeastOneFailure, - boolean allUnauthorized) { + private TReturnStatus computeGlobalStatus( + boolean atLeastOneSuccess, boolean atLeastOneFailure, boolean allUnauthorized) { if (atLeastOneSuccess && !atLeastOneFailure) { return new TReturnStatus(TStatusCode.SRM_SUCCESS, "All files removed"); @@ -212,8 +204,8 @@ private TReturnStatus computeGlobalStatus(boolean atLeastOneSuccess, boolean atL return new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, "Some files were not removed"); } if (allUnauthorized) { - return new TReturnStatus(TStatusCode.SRM_AUTHORIZATION_FAILURE, - "User is not authorized to remove any files"); + return new TReturnStatus( + TStatusCode.SRM_AUTHORIZATION_FAILURE, "User is not authorized to remove any files"); } return new TReturnStatus(TStatusCode.SRM_FAILURE, "No files removed"); } @@ -268,7 +260,8 @@ private void checkUserAuthorization(StoRI stori, GridUserInterface user) throws } if (!isSpaceAuthorized) { log.debug("srmRm: User not authorized to perform srmRm on SA: {}", token); - throw new RmException(TStatusCode.SRM_AUTHORIZATION_FAILURE, + throw new RmException( + TStatusCode.SRM_AUTHORIZATION_FAILURE, "User not authorized to perform srmRm request on the storage area"); } AuthzDecision decision; @@ -280,8 +273,8 @@ private void checkUserAuthorization(StoRI stori, GridUserInterface user) throws } if (!decision.equals(AuthzDecision.PERMIT)) { log.debug("srmRm: User is not authorized to delete a file"); - throw new RmException(TStatusCode.SRM_AUTHORIZATION_FAILURE, - "User is not authorized to delete a file"); + throw new RmException( + TStatusCode.SRM_AUTHORIZATION_FAILURE, "User is not authorized to delete a file"); } } @@ -294,8 +287,8 @@ private void printRequestOutcome(TReturnStatus status, RmInputData inputData) { if (inputData != null) { if (inputData.getSurlArray() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData, - inputData.getSurlArray().asStringList()); + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, status, inputData, inputData.getSurlArray().asStringList()); } else { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); } @@ -303,5 +296,4 @@ private void printRequestOutcome(TReturnStatus status, RmInputData inputData) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); } } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/directory/RmdirCommand.java b/src/main/java/it/grid/storm/synchcall/command/directory/RmdirCommand.java index e20166a1..0558a01d 100644 --- a/src/main/java/it/grid/storm/synchcall/command/directory/RmdirCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/directory/RmdirCommand.java @@ -1,14 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.directory; -import java.util.Arrays; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.authz.AuthzDecision; import it.grid.storm.authz.AuthzDirector; import it.grid.storm.authz.SpaceAuthzInterface; @@ -35,7 +29,9 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.directory.RmdirInputData; import it.grid.storm.synchcall.data.directory.RmdirOutputData; - +import java.util.Arrays; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class RmdirException extends SRMCommandException { @@ -48,284 +44,268 @@ public RmdirException(TStatusCode code, String message) { } class TSize { - - private long size; - - TSize(long size) { - this.size = size; - } - - public void add(long n) { - size += n; - } - - public void dec(long n) { - size -= n; - } - - public long get() { - return size; - } - + + private long size; + + TSize(long size) { + this.size = size; + } + + public void add(long n) { + size += n; + } + + public void dec(long n) { + size -= n; + } + + public long get() { + return size; + } } /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * * @author lucamag * @date May 27, 2008 */ - public class RmdirCommand extends DirectoryCommand implements Command { - - public static final Logger log = LoggerFactory.getLogger(RmdirCommand.class); - private static final String SRM_COMMAND = "srmRmdir"; - private final NamespaceInterface namespace; - - public RmdirCommand() { - - namespace = NamespaceDirector.getNamespace(); - - } - - /** - * Method that provide SrmRmdir functionality. - * - * @param inputData - * Contains information about input data for Rmdir request. - * @return OutputData Contains output data - */ - public OutputData execute(InputData data) { - - RmdirOutputData outputData = null; - log.debug("SrmRmdir: Start execution."); - checkInputData(data); - outputData = doRmdir((RmdirInputData) data); - log.debug("srmRmdir return status: {}", outputData.getStatus()); - printRequestOutcome(outputData.getStatus(), (RmdirInputData) data); - return outputData; - - } - - private RmdirOutputData doRmdir(RmdirInputData data) { - - TSURL surl = null; - GridUserInterface user = null; - StoRI stori = null; - TReturnStatus returnStatus = null; - boolean recursion = false; - TSize size = new TSize(0); - - try { - surl = getSURL(data); - user = getUser(data); - recursion = isRecursive(data); - stori = resolveStoRI(surl, user); - checkUserAuthorization(stori, user); - log.debug("srmRmdir: rmdir authorized for {}. Dir={}. Recursive={}", - userToString(user), stori.getPFN(), recursion); - returnStatus = removeFolder(stori.getLocalFile(), recursion, size); - log.debug("srmRmdir: decrease used space of {} bytes", size.get()); - try { - decreaseUsedSpace(stori.getLocalFile(), size.get()); - } catch (NamespaceException e) { - log.error("srmRmdir: {}", e.getMessage()); - returnStatus.extendExplaination("Unable to decrease used space: " - + e.getMessage()); - } - } catch (RmdirException e) { - log.error("srmRmdir: {}", e.getMessage()); - returnStatus = e.getReturnStatus(); - } - - log.debug("srmRmdir: returned status is {}", returnStatus); - return new RmdirOutputData(returnStatus); - } - - private void checkInputData(InputData data) - throws IllegalArgumentException { - - if (data == null) { - throw new IllegalArgumentException("Invalid input data: NULL"); - } - if (!(data instanceof RmdirInputData)) { - throw new IllegalArgumentException("Invalid input data type"); - } - } - - private StoRI resolveStoRI(TSURL surl, GridUserInterface user) - throws RmdirException { - - String formatStr = "Unable to build a stori for surl {} for user {}: {}"; - try { - return namespace.resolveStoRIbySURL(surl, user); - } catch (UnapprochableSurlException e) { - log.error(formatStr, surl, userToString(user), e.getMessage()); - throw new RmdirException(TStatusCode.SRM_AUTHORIZATION_FAILURE, - e.getMessage()); - } catch (NamespaceException e) { - log.error(formatStr, surl, userToString(user), e.getMessage()); - throw new RmdirException(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage()); - } catch (InvalidSURLException e) { - log.error(formatStr, surl, userToString(user), e.getMessage()); - throw new RmdirException(TStatusCode.SRM_INVALID_PATH, e.getMessage()); - } catch (IllegalArgumentException e) { - log.error(formatStr, surl, userToString(user), e.getMessage()); - throw new RmdirException(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage()); - } - } - - private boolean isAnonymous(GridUserInterface user) { - - return (user == null); - } - - private String userToString(GridUserInterface user) { - - return isAnonymous(user) ? "anonymous" : user.getDn(); - } - - private void checkUserAuthorization(StoRI stori, GridUserInterface user) - throws RmdirException { - - TSpaceToken token = stori.getVirtualFileSystem().getSpaceToken(); - SpaceAuthzInterface spaceAuth = AuthzDirector.getSpaceAuthz(token); - - boolean isSpaceAuthorized; - if (isAnonymous(user)) { - isSpaceAuthorized = spaceAuth.authorizeAnonymous(SRMSpaceRequest.RMD); - } else { - isSpaceAuthorized = spaceAuth.authorize(user, SRMSpaceRequest.RMD); - } - if (!isSpaceAuthorized) { - log.debug("srmRmdir: User not authorized to perform srmRmdir request " - + "on the storage area: {}", token); - throw new RmdirException(TStatusCode.SRM_AUTHORIZATION_FAILURE, - "User is not authorized to remove the directory on the storage area " - + token); - } - - AuthzDecision decision; - if (isAnonymous(user)) { - decision = AuthzDirector.getPathAuthz().authorizeAnonymous( - SRMFileRequest.RMD, stori.getStFN()); - } else { - decision = AuthzDirector.getPathAuthz().authorize(user, - SRMFileRequest.RMD, stori); - } - if (!decision.equals(AuthzDecision.PERMIT)) { - log.debug("srmRmdir: User is not authorized to delete the directory"); - throw new RmdirException(TStatusCode.SRM_AUTHORIZATION_FAILURE, - "User is not authorized to remove the directory"); - } - return; - } - - private GridUserInterface getUser(InputData data) { - - if (data instanceof IdentityInputData) { - return ((IdentityInputData) data).getUser(); - } - return null; - } - - private TSURL getSURL(RmdirInputData data) throws RmdirException { - - TSURL surl = ((RmdirInputData) data).getSurl(); - if (surl == null) { - throw new RmdirException(TStatusCode.SRM_FAILURE, - "SURL specified is NULL"); - } - if (surl.isEmpty()) { - throw new RmdirException(TStatusCode.SRM_FAILURE, - "SURL specified is empty"); - } - return surl; - } - - private boolean isRecursive(RmdirInputData data) { - - return data.getRecursive().booleanValue(); - } - - private void decreaseUsedSpace(LocalFile localFile, long sizeToRemove) - throws NamespaceException { - - NamespaceDirector.getNamespace().resolveVFSbyLocalFile(localFile) - .decreaseUsedSpace(sizeToRemove); - } - - private TReturnStatus removeFolder(LocalFile dir, boolean recursive, TSize size) - throws RmdirException { - - /* - * Check if dir exists and is a directory, if recursion is enabled when - * directory is not empty, etc... - */ - - if (!dir.exists()) { - return new TReturnStatus(TStatusCode.SRM_INVALID_PATH, - "Directory does not exists"); - } - if (!dir.isDirectory()) { - return new TReturnStatus(TStatusCode.SRM_INVALID_PATH, "Not a directory"); - } - if (!recursive && (dir.listFiles().length > 0)) { - return new TReturnStatus(TStatusCode.SRM_NON_EMPTY_DIRECTORY, - "Directory is not empty"); - } - - if (recursive) { - LocalFile[] list = dir.listFiles(); - log.debug("srmRmdir: removing {} content", dir); - for (LocalFile element : list) { - log.debug("srmRmdir: removing {}", element); - if (element.isDirectory()) { - removeFolder(element, recursive, size); - } else { - removeFile(element, size); - } - } - } - log.debug("srmRmdir: removing {}", dir); - removeEmptyDirectory(dir, size); - return new TReturnStatus(TStatusCode.SRM_SUCCESS, "Directory removed with success!"); - } - - private void removeEmptyDirectory(LocalFile directory, TSize size) - throws RmdirException { - - removeFile(directory, size); - } - - private void removeFile(LocalFile file, TSize size) throws RmdirException { - - long fileSize = file.length(); - if (!file.delete()) { - log.error("srmRmdir: Unable to delete {}", file); - throw new RmdirException(TStatusCode.SRM_FAILURE, - "Unable to delete " + file.getAbsolutePath()); - } - size.add(fileSize); - } - - private void printRequestOutcome(TReturnStatus status, - RmdirInputData inputData) { - - if (inputData != null) { - if (inputData.getSurl() != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData, - Arrays.asList(inputData.getSurl().toString())); - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); - } - - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); - } - } - -} \ No newline at end of file + private static final String SRM_COMMAND = "srmRmdir"; + private final NamespaceInterface namespace; + + public RmdirCommand() { + + namespace = NamespaceDirector.getNamespace(); + } + + /** + * Method that provide SrmRmdir functionality. + * + * @param inputData Contains information about input data for Rmdir request. + * @return OutputData Contains output data + */ + public OutputData execute(InputData data) { + + RmdirOutputData outputData = null; + log.debug("SrmRmdir: Start execution."); + checkInputData(data); + outputData = doRmdir((RmdirInputData) data); + log.debug("srmRmdir return status: {}", outputData.getStatus()); + printRequestOutcome(outputData.getStatus(), (RmdirInputData) data); + return outputData; + } + + private RmdirOutputData doRmdir(RmdirInputData data) { + + TSURL surl = null; + GridUserInterface user = null; + StoRI stori = null; + TReturnStatus returnStatus = null; + boolean recursion = false; + TSize size = new TSize(0); + + try { + surl = getSURL(data); + user = getUser(data); + recursion = isRecursive(data); + stori = resolveStoRI(surl, user); + checkUserAuthorization(stori, user); + log.debug( + "srmRmdir: rmdir authorized for {}. Dir={}. Recursive={}", + userToString(user), + stori.getPFN(), + recursion); + returnStatus = removeFolder(stori.getLocalFile(), recursion, size); + log.debug("srmRmdir: decrease used space of {} bytes", size.get()); + try { + decreaseUsedSpace(stori.getLocalFile(), size.get()); + } catch (NamespaceException e) { + log.error("srmRmdir: {}", e.getMessage()); + returnStatus.extendExplaination("Unable to decrease used space: " + e.getMessage()); + } + } catch (RmdirException e) { + log.error("srmRmdir: {}", e.getMessage()); + returnStatus = e.getReturnStatus(); + } + + log.debug("srmRmdir: returned status is {}", returnStatus); + return new RmdirOutputData(returnStatus); + } + + private void checkInputData(InputData data) throws IllegalArgumentException { + + if (data == null) { + throw new IllegalArgumentException("Invalid input data: NULL"); + } + if (!(data instanceof RmdirInputData)) { + throw new IllegalArgumentException("Invalid input data type"); + } + } + + private StoRI resolveStoRI(TSURL surl, GridUserInterface user) throws RmdirException { + + String formatStr = "Unable to build a stori for surl {} for user {}: {}"; + try { + return namespace.resolveStoRIbySURL(surl, user); + } catch (UnapprochableSurlException e) { + log.error(formatStr, surl, userToString(user), e.getMessage()); + throw new RmdirException(TStatusCode.SRM_AUTHORIZATION_FAILURE, e.getMessage()); + } catch (NamespaceException e) { + log.error(formatStr, surl, userToString(user), e.getMessage()); + throw new RmdirException(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage()); + } catch (InvalidSURLException e) { + log.error(formatStr, surl, userToString(user), e.getMessage()); + throw new RmdirException(TStatusCode.SRM_INVALID_PATH, e.getMessage()); + } catch (IllegalArgumentException e) { + log.error(formatStr, surl, userToString(user), e.getMessage()); + throw new RmdirException(TStatusCode.SRM_INTERNAL_ERROR, e.getMessage()); + } + } + + private boolean isAnonymous(GridUserInterface user) { + + return (user == null); + } + + private String userToString(GridUserInterface user) { + + return isAnonymous(user) ? "anonymous" : user.getDn(); + } + + private void checkUserAuthorization(StoRI stori, GridUserInterface user) throws RmdirException { + + TSpaceToken token = stori.getVirtualFileSystem().getSpaceToken(); + SpaceAuthzInterface spaceAuth = AuthzDirector.getSpaceAuthz(token); + + boolean isSpaceAuthorized; + if (isAnonymous(user)) { + isSpaceAuthorized = spaceAuth.authorizeAnonymous(SRMSpaceRequest.RMD); + } else { + isSpaceAuthorized = spaceAuth.authorize(user, SRMSpaceRequest.RMD); + } + if (!isSpaceAuthorized) { + log.debug( + "srmRmdir: User not authorized to perform srmRmdir request " + "on the storage area: {}", + token); + throw new RmdirException( + TStatusCode.SRM_AUTHORIZATION_FAILURE, + "User is not authorized to remove the directory on the storage area " + token); + } + + AuthzDecision decision; + if (isAnonymous(user)) { + decision = + AuthzDirector.getPathAuthz().authorizeAnonymous(SRMFileRequest.RMD, stori.getStFN()); + } else { + decision = AuthzDirector.getPathAuthz().authorize(user, SRMFileRequest.RMD, stori); + } + if (!decision.equals(AuthzDecision.PERMIT)) { + log.debug("srmRmdir: User is not authorized to delete the directory"); + throw new RmdirException( + TStatusCode.SRM_AUTHORIZATION_FAILURE, "User is not authorized to remove the directory"); + } + return; + } + + private GridUserInterface getUser(InputData data) { + + if (data instanceof IdentityInputData) { + return ((IdentityInputData) data).getUser(); + } + return null; + } + + private TSURL getSURL(RmdirInputData data) throws RmdirException { + + TSURL surl = ((RmdirInputData) data).getSurl(); + if (surl == null) { + throw new RmdirException(TStatusCode.SRM_FAILURE, "SURL specified is NULL"); + } + if (surl.isEmpty()) { + throw new RmdirException(TStatusCode.SRM_FAILURE, "SURL specified is empty"); + } + return surl; + } + + private boolean isRecursive(RmdirInputData data) { + + return data.getRecursive().booleanValue(); + } + + private void decreaseUsedSpace(LocalFile localFile, long sizeToRemove) throws NamespaceException { + + NamespaceDirector.getNamespace() + .resolveVFSbyLocalFile(localFile) + .decreaseUsedSpace(sizeToRemove); + } + + private TReturnStatus removeFolder(LocalFile dir, boolean recursive, TSize size) + throws RmdirException { + + /* + * Check if dir exists and is a directory, if recursion is enabled when + * directory is not empty, etc... + */ + + if (!dir.exists()) { + return new TReturnStatus(TStatusCode.SRM_INVALID_PATH, "Directory does not exists"); + } + if (!dir.isDirectory()) { + return new TReturnStatus(TStatusCode.SRM_INVALID_PATH, "Not a directory"); + } + if (!recursive && (dir.listFiles().length > 0)) { + return new TReturnStatus(TStatusCode.SRM_NON_EMPTY_DIRECTORY, "Directory is not empty"); + } + + if (recursive) { + LocalFile[] list = dir.listFiles(); + log.debug("srmRmdir: removing {} content", dir); + for (LocalFile element : list) { + log.debug("srmRmdir: removing {}", element); + if (element.isDirectory()) { + removeFolder(element, recursive, size); + } else { + removeFile(element, size); + } + } + } + log.debug("srmRmdir: removing {}", dir); + removeEmptyDirectory(dir, size); + return new TReturnStatus(TStatusCode.SRM_SUCCESS, "Directory removed with success!"); + } + + private void removeEmptyDirectory(LocalFile directory, TSize size) throws RmdirException { + + removeFile(directory, size); + } + + private void removeFile(LocalFile file, TSize size) throws RmdirException { + + long fileSize = file.length(); + if (!file.delete()) { + log.error("srmRmdir: Unable to delete {}", file); + throw new RmdirException( + TStatusCode.SRM_FAILURE, "Unable to delete " + file.getAbsolutePath()); + } + size.add(fileSize); + } + + private void printRequestOutcome(TReturnStatus status, RmdirInputData inputData) { + + if (inputData != null) { + if (inputData.getSurl() != null) { + CommandHelper.printRequestOutcome( + SRM_COMMAND, log, status, inputData, Arrays.asList(inputData.getSurl().toString())); + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); + } + + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); + } + } +} diff --git a/src/main/java/it/grid/storm/synchcall/command/discovery/PingCommand.java b/src/main/java/it/grid/storm/synchcall/command/discovery/PingCommand.java index 9a85013c..a2319f53 100644 --- a/src/main/java/it/grid/storm/synchcall/command/discovery/PingCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/discovery/PingCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.discovery; @@ -18,10 +17,6 @@ import it.grid.storm.synchcall.data.discovery.PingInputData; import it.grid.storm.synchcall.data.discovery.PingOutputData; import it.grid.storm.tape.recalltable.TapeRecallCatalog; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -29,17 +24,17 @@ import java.util.List; import java.util.Map.Entry; import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and * ICTP/EGRID project - * + * * @author lucamag * @author Alberto Forti * @date May 28, 2008 - * */ - public class PingCommand extends DiscoveryCommand implements Command { public static final Logger log = LoggerFactory.getLogger(PingCommand.class); @@ -77,14 +72,16 @@ public OutputData execute(InputData data) { outputData.setExtraInfoArray(extraInfoArray); - log.info("srmPing: <{}> [AuthID: {}] extraInfo: {}", DataHelper.getRequestor(inputData), - inputData.getAuthorizationID(), extraInfoArray); + log.info( + "srmPing: <{}> [AuthID: {}] extraInfo: {}", + DataHelper.getRequestor(inputData), + inputData.getAuthorizationID(), + extraInfoArray); return outputData; } /** - * * @param authorizationID String * @return String normalizedAuthID */ @@ -101,10 +98,7 @@ private String getKey(String authorizationID) { return result; } - /** - * - * @return Properties - */ + /** @return Properties */ private Properties loadProperties() { Properties properties = new Properties(); @@ -118,8 +112,8 @@ private Properties loadProperties() { try { properties.load(new FileInputStream(propertiesFile)); } catch (IOException e) { - log.error("Error loading ping properties from file {}. {}", propertiesFile, e.getMessage(), - e); + log.error( + "Error loading ping properties from file {}. {}", propertiesFile, e.getMessage(), e); } } @@ -128,13 +122,11 @@ private Properties loadProperties() { return properties; } - /**************************** - * SPECIAL KEY MANAGEMENT - ****************************/ + /** ************************** SPECIAL KEY MANAGEMENT ************************** */ /** * Dispatcher for manage the special keys on Ping - * + * * @param param * @return */ @@ -169,23 +161,25 @@ private ArrayOfTExtraInfo manageSpecialKey(String key) { case TEST_TAKEOVER: arrayResult = test_takeover(extractParam(key)); break; - default: { - TExtraInfo extraInfo = new TExtraInfo(); - try { - extraInfo = new TExtraInfo(SpecialKey.UNKNOWN.toString(), - SpecialKey.UNKNOWN.getDescription() + ":'" + key + "'"); - } catch (InvalidTExtraInfoAttributeException e) { - log.error(e.getMessage(), e); + default: + { + TExtraInfo extraInfo = new TExtraInfo(); + try { + extraInfo = + new TExtraInfo( + SpecialKey.UNKNOWN.toString(), + SpecialKey.UNKNOWN.getDescription() + ":'" + key + "'"); + } catch (InvalidTExtraInfoAttributeException e) { + log.error(e.getMessage(), e); + } + arrayResult.addTExtraInfo(extraInfo); + break; } - arrayResult.addTExtraInfo(extraInfo); - break; - } } return arrayResult; } /** - * * @param param * @return */ @@ -195,7 +189,7 @@ private ArrayOfTExtraInfo allKeys(String param) { Properties pingValues = loadProperties(); TExtraInfo otherInfo = new TExtraInfo(); - for (Enumeration e = pingValues.propertyNames(); e.hasMoreElements();) { + for (Enumeration e = pingValues.propertyNames(); e.hasMoreElements(); ) { String key = (String) e.nextElement(); String value = pingValues.getProperty(key); try { @@ -253,12 +247,9 @@ private ArrayOfTExtraInfo test_takeover(String param) { return arrayResult; } - /********************************** - * UTILITY METHODS - **********************************/ + /** ******************************** UTILITY METHODS ******************************** */ /** - * * @param key * @return */ @@ -287,30 +278,20 @@ private static String extractParam(String key) { return param; } - /********************************** - * MAIN for TEST PURPOUSE - **********************************/ - - public static void main(String arg[]) { + /** ******************************** MAIN for TEST PURPOUSE ******************************** */ + public static void main(String arg[]) {} - } - - /** - * - * - */ + /** */ private enum SpecialKey { - - ALL("all", "return all the pair defined in properties"), BE_OS_PLATFORM( - Constants.BE_OS_PLATFORM.getKey(), - "returns the operating system platform"), BE_OS_KERNEL_RELEASE( - Constants.BE_OS_KERNEL_RELEASE.getKey(), - "returns the operating system kernel release"), TEST_TAKEOVER("take-over", - "testing the take-over method"), TEST_POST_NEW_TASK("new-task", - "testing the take-over method"), TEST_PUT_NEW_STATUS("new-status", - "testing the take-over method"), TEST_PUT_RETRY_VALUE("retry-value", - "testing the take-over method"), UNKNOWN("unknown", - "Unable to manage the key"); + ALL("all", "return all the pair defined in properties"), + BE_OS_PLATFORM(Constants.BE_OS_PLATFORM.getKey(), "returns the operating system platform"), + BE_OS_KERNEL_RELEASE( + Constants.BE_OS_KERNEL_RELEASE.getKey(), "returns the operating system kernel release"), + TEST_TAKEOVER("take-over", "testing the take-over method"), + TEST_POST_NEW_TASK("new-task", "testing the take-over method"), + TEST_PUT_NEW_STATUS("new-status", "testing the take-over method"), + TEST_PUT_RETRY_VALUE("retry-value", "testing the take-over method"), + UNKNOWN("unknown", "Unable to manage the key"); private final String operationName; private final String operationDescription; diff --git a/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceMetaDataCommand.java b/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceMetaDataCommand.java index 255202a4..ed8d9a0d 100644 --- a/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceMetaDataCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceMetaDataCommand.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.space; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.persistence.exceptions.DataAccessException; @@ -30,190 +26,200 @@ import it.grid.storm.synchcall.data.space.GetSpaceMetaDataInputData; import it.grid.storm.synchcall.data.space.GetSpaceMetaDataOutputData; import it.grid.storm.synchcall.data.space.IdentityGetSpaceMetaDataInputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the GetSpaceMetaDataManager Class. This class hava a - * reseveSpace method that perform all operation nedded to satisfy a SRM space - * release request. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the GetSpaceMetaDataManager Class. This class hava a reseveSpace method + * that perform all operation nedded to satisfy a SRM space release request. + * * @author lucamag * @date May 29, 2008 - * */ - public class GetSpaceMetaDataCommand extends SpaceCommand implements Command { - public static final Logger log = LoggerFactory - .getLogger(GetSpaceMetaDataCommand.class); - - private ReservedSpaceCatalog catalog = null; - - private static final String SRM_COMMAND = "srmGetSpaceMetaData"; - - /** - * Constructor. Bind the Executor with ReservedSpaceCatalog - */ - - public GetSpaceMetaDataCommand() { - - catalog = new ReservedSpaceCatalog(); - } - - /** - * - * @param data - * GetSpaceMetaDataInputData - * @return GetSpaceMetaDataOutputData - */ - public OutputData execute(InputData indata) { - - log.debug(""); - log.debug(" Updating SA with GPFS quotas results"); - GPFSQuotaManager.INSTANCE.triggerComputeQuotas(); - - IdentityGetSpaceMetaDataInputData data; - if (indata instanceof IdentityInputData) { - data = (IdentityGetSpaceMetaDataInputData) indata; - } else { - GetSpaceMetaDataOutputData outputData = new GetSpaceMetaDataOutputData(); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" - + SRM_COMMAND)); - printRequestOutcome(outputData.getStatus(), - (GetSpaceMetaDataInputData) indata); - return outputData; - } - int errorCount = 0; - ArrayOfTMetaDataSpace arrayData = new ArrayOfTMetaDataSpace(); - TReturnStatus globalStatus = null; - - TMetaDataSpace metadata = null; - - for (TSpaceToken token : data.getSpaceTokenArray().getTSpaceTokenArray()) { - StorageSpaceData spaceData = null; - try { - spaceData = catalog.getStorageSpace(token); - } catch (TransferObjectDecodingException e) { - log.error("Error getting storage space data for token {}. {}", - token, e.getMessage(),e); - metadata = createFailureMetadata(token, TStatusCode.SRM_INTERNAL_ERROR, - "Error building space data from row DB data", data.getUser()); - errorCount++; - arrayData.addTMetaDataSpace(metadata); - continue; - - } catch (DataAccessException e) { - log.error("Error getting storage space data for token {}. {}", - token, e.getMessage(),e); - metadata = createFailureMetadata(token, TStatusCode.SRM_INTERNAL_ERROR, - "Error retrieving row space token data from DB", data.getUser()); - errorCount++; - arrayData.addTMetaDataSpace(metadata); - continue; - } - if (spaceData != null) { - if (!spaceData.isInitialized()) { - log.warn("Uninitialized storage data found for token {}", token); - metadata = createFailureMetadata(token, TStatusCode.SRM_FAILURE, - "Storage Space not initialized yet", data.getUser()); - errorCount++; - } else { - try { - metadata = new TMetaDataSpace(spaceData); - } catch (InvalidTMetaDataSpaceAttributeException e) { - log.error("Metadata error. {}", e.getMessage(), e); - metadata = createFailureMetadata(token, - TStatusCode.SRM_INTERNAL_ERROR, - "Error building Storage Space Metadata from row data", - data.getUser()); - errorCount++; - } catch (InvalidTSizeAttributesException e) { - log.error("Metadata error. {}", e.getMessage(), e); - metadata = createFailureMetadata(token, - TStatusCode.SRM_INTERNAL_ERROR, - "Error building Storage Space Metadata from row data", - data.getUser()); - errorCount++; - } - } - } else { - log.warn("Unable to retrieve space data for token {}.",token); - metadata = createFailureMetadata(token, - TStatusCode.SRM_INVALID_REQUEST, "Space Token not found", - data.getUser()); - errorCount++; - } - arrayData.addTMetaDataSpace(metadata); - } - - boolean requestSuccess = (errorCount == 0); - boolean requestFailure = (errorCount == data.getSpaceTokenArray().size()); - - if (requestSuccess) { - globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, ""); - - log.info("srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " - + "done succesfully with: [status: {}]", data.getUser(), - data.getSpaceTokenArray(), globalStatus); - - } else { - if (requestFailure) { - globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, - "No valid space tokens"); - - log.info( - "srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " - + "failed with: [status: {}]", data.getUser(), - data.getSpaceTokenArray(), globalStatus); - - } else { - - globalStatus = new TReturnStatus(TStatusCode.SRM_PARTIAL_SUCCESS, - "Check space tokens statuses for details"); - - log.info( - "srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " - + "partially done with: [status: {}]", data.getUser(), - data.getSpaceTokenArray(), globalStatus); - - } - } - - GetSpaceMetaDataOutputData response = null; - try { - response = new GetSpaceMetaDataOutputData(globalStatus, arrayData); - } catch (InvalidGetSpaceMetaDataOutputAttributeException e) { - log.error(e.getMessage(),e); - } - return response; - } - - private TMetaDataSpace createFailureMetadata(TSpaceToken token, - TStatusCode statusCode, String message, GridUserInterface user) { - - TMetaDataSpace metadata = TMetaDataSpace.makeEmpty(); - metadata.setSpaceToken(token); - - try { - metadata.setStatus(new TReturnStatus(statusCode, message)); - } catch (IllegalArgumentException e) { - log.error(e.getMessage(),e); - } - - return metadata; - } - - private void printRequestOutcome(TReturnStatus status, - GetSpaceMetaDataInputData inputData) { - - if (inputData != null) { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); - } else { - CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); - } - } - + public static final Logger log = LoggerFactory.getLogger(GetSpaceMetaDataCommand.class); + + private ReservedSpaceCatalog catalog = null; + + private static final String SRM_COMMAND = "srmGetSpaceMetaData"; + + /** Constructor. Bind the Executor with ReservedSpaceCatalog */ + public GetSpaceMetaDataCommand() { + + catalog = new ReservedSpaceCatalog(); + } + + /** + * @param data GetSpaceMetaDataInputData + * @return GetSpaceMetaDataOutputData + */ + public OutputData execute(InputData indata) { + + log.debug(""); + log.debug(" Updating SA with GPFS quotas results"); + GPFSQuotaManager.INSTANCE.triggerComputeQuotas(); + + IdentityGetSpaceMetaDataInputData data; + if (indata instanceof IdentityInputData) { + data = (IdentityGetSpaceMetaDataInputData) indata; + } else { + GetSpaceMetaDataOutputData outputData = new GetSpaceMetaDataOutputData(); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" + SRM_COMMAND)); + printRequestOutcome(outputData.getStatus(), (GetSpaceMetaDataInputData) indata); + return outputData; + } + int errorCount = 0; + ArrayOfTMetaDataSpace arrayData = new ArrayOfTMetaDataSpace(); + TReturnStatus globalStatus = null; + + TMetaDataSpace metadata = null; + + for (TSpaceToken token : data.getSpaceTokenArray().getTSpaceTokenArray()) { + StorageSpaceData spaceData = null; + try { + spaceData = catalog.getStorageSpace(token); + } catch (TransferObjectDecodingException e) { + log.error("Error getting storage space data for token {}. {}", token, e.getMessage(), e); + metadata = + createFailureMetadata( + token, + TStatusCode.SRM_INTERNAL_ERROR, + "Error building space data from row DB data", + data.getUser()); + errorCount++; + arrayData.addTMetaDataSpace(metadata); + continue; + + } catch (DataAccessException e) { + log.error("Error getting storage space data for token {}. {}", token, e.getMessage(), e); + metadata = + createFailureMetadata( + token, + TStatusCode.SRM_INTERNAL_ERROR, + "Error retrieving row space token data from DB", + data.getUser()); + errorCount++; + arrayData.addTMetaDataSpace(metadata); + continue; + } + if (spaceData != null) { + if (!spaceData.isInitialized()) { + log.warn("Uninitialized storage data found for token {}", token); + metadata = + createFailureMetadata( + token, + TStatusCode.SRM_FAILURE, + "Storage Space not initialized yet", + data.getUser()); + errorCount++; + } else { + try { + metadata = new TMetaDataSpace(spaceData); + } catch (InvalidTMetaDataSpaceAttributeException e) { + log.error("Metadata error. {}", e.getMessage(), e); + metadata = + createFailureMetadata( + token, + TStatusCode.SRM_INTERNAL_ERROR, + "Error building Storage Space Metadata from row data", + data.getUser()); + errorCount++; + } catch (InvalidTSizeAttributesException e) { + log.error("Metadata error. {}", e.getMessage(), e); + metadata = + createFailureMetadata( + token, + TStatusCode.SRM_INTERNAL_ERROR, + "Error building Storage Space Metadata from row data", + data.getUser()); + errorCount++; + } + } + } else { + log.warn("Unable to retrieve space data for token {}.", token); + metadata = + createFailureMetadata( + token, TStatusCode.SRM_INVALID_REQUEST, "Space Token not found", data.getUser()); + errorCount++; + } + arrayData.addTMetaDataSpace(metadata); + } + + boolean requestSuccess = (errorCount == 0); + boolean requestFailure = (errorCount == data.getSpaceTokenArray().size()); + + if (requestSuccess) { + globalStatus = new TReturnStatus(TStatusCode.SRM_SUCCESS, ""); + + log.info( + "srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " + + "done succesfully with: [status: {}]", + data.getUser(), + data.getSpaceTokenArray(), + globalStatus); + + } else { + if (requestFailure) { + globalStatus = new TReturnStatus(TStatusCode.SRM_FAILURE, "No valid space tokens"); + + log.info( + "srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " + + "failed with: [status: {}]", + data.getUser(), + data.getSpaceTokenArray(), + globalStatus); + + } else { + + globalStatus = + new TReturnStatus( + TStatusCode.SRM_PARTIAL_SUCCESS, "Check space tokens statuses for details"); + + log.info( + "srmGetSpaceMetadata: user <{}> Request for [spaceTokens: {}] " + + "partially done with: [status: {}]", + data.getUser(), + data.getSpaceTokenArray(), + globalStatus); + } + } + + GetSpaceMetaDataOutputData response = null; + try { + response = new GetSpaceMetaDataOutputData(globalStatus, arrayData); + } catch (InvalidGetSpaceMetaDataOutputAttributeException e) { + log.error(e.getMessage(), e); + } + return response; + } + + private TMetaDataSpace createFailureMetadata( + TSpaceToken token, TStatusCode statusCode, String message, GridUserInterface user) { + + TMetaDataSpace metadata = TMetaDataSpace.makeEmpty(); + metadata.setSpaceToken(token); + + try { + metadata.setStatus(new TReturnStatus(statusCode, message)); + } catch (IllegalArgumentException e) { + log.error(e.getMessage(), e); + } + + return metadata; + } + + private void printRequestOutcome(TReturnStatus status, GetSpaceMetaDataInputData inputData) { + + if (inputData != null) { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, inputData); + } else { + CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceTokensCommand.java b/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceTokensCommand.java index 4e488e2c..adc08c20 100644 --- a/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceTokensCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/space/GetSpaceTokensCommand.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.space; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.catalogs.ReservedSpaceCatalog; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.ArrayOfTSpaceToken; @@ -19,25 +15,22 @@ import it.grid.storm.synchcall.data.InputData; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.space.GetSpaceTokensInputData; -import it.grid.storm.synchcall.data.space.IdentityGetSpaceTokensInputData; import it.grid.storm.synchcall.data.space.GetSpaceTokensOutputData; +import it.grid.storm.synchcall.data.space.IdentityGetSpaceTokensInputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project * Execute the GetSpaceTokens - * request. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project * Execute the GetSpaceTokens request. + * * @author lucamag * @author Alberto Forti - * * @date May 29, 2008 - * */ - public class GetSpaceTokensCommand extends SpaceCommand implements Command { - public static final Logger log = LoggerFactory - .getLogger(GetSpaceTokensCommand.class); + public static final Logger log = LoggerFactory.getLogger(GetSpaceTokensCommand.class); private static final String SRM_COMMAND = "srmGetSpaceTokens"; private ReservedSpaceCatalog catalog = null; @@ -55,11 +48,10 @@ public OutputData execute(InputData data) { inputData = (IdentityGetSpaceTokensInputData) data; } else { outputData = new GetSpaceTokensOutputData(); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" - + SRM_COMMAND)); - printRequestOutcome(outputData.getStatus(), - (GetSpaceTokensInputData) data); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" + SRM_COMMAND)); + printRequestOutcome(outputData.getStatus(), (GetSpaceTokensInputData) data); return outputData; } @@ -70,14 +62,16 @@ public OutputData execute(InputData data) { GridUserInterface user = inputData.getUser(); if (user == null) { log.debug("GetSpaceTokens: the user field is NULL"); - status = new TReturnStatus(TStatusCode.SRM_AUTHENTICATION_FAILURE, - "Unable to get user credential!"); + status = + new TReturnStatus( + TStatusCode.SRM_AUTHENTICATION_FAILURE, "Unable to get user credential!"); - log.error("srmGetSpaceTokens: <{}> " - + "Request for [spaceTokenDescription:{}] failed with: [status: {}]", - user, - inputData.getSpaceTokenAlias(), - status); + log.error( + "srmGetSpaceTokens: <{}> " + + "Request for [spaceTokenDescription:{}] failed with: [status: {}]", + user, + inputData.getSpaceTokenAlias(), + status); outputData = new GetSpaceTokensOutputData(status, null); return outputData; @@ -85,49 +79,49 @@ public OutputData execute(InputData data) { String spaceAlias = inputData.getSpaceTokenAlias(); log.debug("spaceAlias= {}", spaceAlias); - - ArrayOfTSpaceToken arrayOfSpaceTokens = catalog.getSpaceTokens(user, - spaceAlias); + + ArrayOfTSpaceToken arrayOfSpaceTokens = catalog.getSpaceTokens(user, spaceAlias); if (arrayOfSpaceTokens.size() == 0) { arrayOfSpaceTokens = catalog.getSpaceTokensByAlias(spaceAlias); } - if (arrayOfSpaceTokens.size() == 0) { - if (spaceAlias != null) { - status = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "'userSpaceTokenDescription' does not refer to an existing space"); - } else { - status = new TReturnStatus(TStatusCode.SRM_FAILURE, - "No space tokens owned by this user"); - } - arrayOfSpaceTokens = null; - } else { - status = new TReturnStatus(TStatusCode.SRM_SUCCESS, ""); - } + if (arrayOfSpaceTokens.size() == 0) { + if (spaceAlias != null) { + status = + new TReturnStatus( + TStatusCode.SRM_INVALID_REQUEST, + "'userSpaceTokenDescription' does not refer to an existing space"); + } else { + status = new TReturnStatus(TStatusCode.SRM_FAILURE, "No space tokens owned by this user"); + } + arrayOfSpaceTokens = null; + } else { + status = new TReturnStatus(TStatusCode.SRM_SUCCESS, ""); + } if (status.isSRM_SUCCESS()) { - log.info("srmGetSpaceTokens: <{}> Request for [spaceTokenDescription: {}] " - + "succesfully done with: [status: {}]", - user, - inputData.getSpaceTokenAlias(), - status); + log.info( + "srmGetSpaceTokens: <{}> Request for [spaceTokenDescription: {}] " + + "succesfully done with: [status: {}]", + user, + inputData.getSpaceTokenAlias(), + status); } else { - log.error("srmGetSpaceTokens: <{}> Request for [spaceTokenDescription: {}] " - + "failed with: [status: {}]", - user, - inputData.getSpaceTokenAlias(), - status); + log.error( + "srmGetSpaceTokens: <{}> Request for [spaceTokenDescription: {}] " + + "failed with: [status: {}]", + user, + inputData.getSpaceTokenAlias(), + status); } outputData = new GetSpaceTokensOutputData(status, arrayOfSpaceTokens); return outputData; - } - private void printRequestOutcome(TReturnStatus status, - GetSpaceTokensInputData data) { + private void printRequestOutcome(TReturnStatus status, GetSpaceTokensInputData data) { if (data != null) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, data); diff --git a/src/main/java/it/grid/storm/synchcall/command/space/ReleaseSpaceCommand.java b/src/main/java/it/grid/storm/synchcall/command/space/ReleaseSpaceCommand.java index 2c27474d..bebc0956 100644 --- a/src/main/java/it/grid/storm/synchcall/command/space/ReleaseSpaceCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/space/ReleaseSpaceCommand.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.space; @@ -19,32 +18,25 @@ import it.grid.storm.synchcall.data.space.IdentityReleaseSpaceInputData; import it.grid.storm.synchcall.data.space.ReleaseSpaceInputData; import it.grid.storm.synchcall.data.space.ReleaseSpaceOutputData; - import java.io.File; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents the ReleaseSpaceManager Class. This class hava a - * reseveSpace method that perform all operation nedded to satisfy a SRM space - * release request. - * + * This class represents the ReleaseSpaceManager Class. This class hava a reseveSpace method that + * perform all operation nedded to satisfy a SRM space release request. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - public class ReleaseSpaceCommand extends SpaceCommand implements Command { private final ReservedSpaceCatalog catalog; - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(ReleaseSpaceCommand.class); + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(ReleaseSpaceCommand.class); private static final String SRM_COMMAND = "srmReleaseSpace"; @@ -60,20 +52,17 @@ public OutputData execute(InputData indata) { if (indata instanceof IdentityInputData) { inputData = (IdentityReleaseSpaceInputData) indata; } else { - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" - + SRM_COMMAND)); - printRequestOutcome(outputData.getStatus(), - (ReleaseSpaceInputData) indata); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" + SRM_COMMAND)); + printRequestOutcome(outputData.getStatus(), (ReleaseSpaceInputData) indata); return outputData; } TReturnStatus returnStatus = null; - if ((inputData == null) - || ((inputData != null) && (inputData.getSpaceToken() == null))) { + if ((inputData == null) || ((inputData != null) && (inputData.getSpaceToken() == null))) { log.error("Empty space token."); - returnStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, - "SpaceToken is empty."); + returnStatus = new TReturnStatus(TStatusCode.SRM_INVALID_REQUEST, "SpaceToken is empty."); outputData.setStatus(returnStatus); return outputData; } @@ -81,12 +70,16 @@ public OutputData execute(InputData indata) { GridUserInterface user = inputData.getUser(); if (user == null) { log.debug("Null user credentials."); - returnStatus = new TReturnStatus(TStatusCode.SRM_AUTHENTICATION_FAILURE, - "Unable to get user credential"); + returnStatus = + new TReturnStatus( + TStatusCode.SRM_AUTHENTICATION_FAILURE, "Unable to get user credential"); outputData.setStatus(returnStatus); - log.error("srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " - + "with: [status: {}]", user, inputData.getSpaceToken(), returnStatus); + log.error( + "srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " + "with: [status: {}]", + user, + inputData.getSpaceToken(), + returnStatus); return outputData; } @@ -100,16 +93,22 @@ public OutputData execute(InputData indata) { try { data = catalog.getStorageSpace(inputData.getSpaceToken()); } catch (Throwable e) { - log.error("Error fetching data for space token {}. {}", - inputData.getSpaceToken(), e.getMessage(), e); + log.error( + "Error fetching data for space token {}. {}", + inputData.getSpaceToken(), + e.getMessage(), + e); explanation = "Error building space data from row DB data."; statusCode = TStatusCode.SRM_INTERNAL_ERROR; returnStatus = new TReturnStatus(statusCode, explanation); outputData.setStatus(returnStatus); - log.error("srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " - + "with: [status: {}]", user, inputData.getSpaceToken(), returnStatus); + log.error( + "srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " + "with: [status: {}]", + user, + inputData.getSpaceToken(), + returnStatus); return outputData; } @@ -120,8 +119,11 @@ public OutputData execute(InputData indata) { returnStatus = new TReturnStatus(statusCode, explanation); outputData.setStatus(returnStatus); - log.error("srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " - + "with: [status: {}]", user, inputData.getSpaceToken(), returnStatus); + log.error( + "srmReleaseSpace: <{}> Request for [spacetoken: {}] failed " + "with: [status: {}]", + user, + inputData.getSpaceToken(), + returnStatus); return outputData; } @@ -156,30 +158,35 @@ public OutputData execute(InputData indata) { if (returnStatus.isSRM_SUCCESS()) { - log.error("srmReleaseSpace: <{}> Request for [spacetoken: {}] succesfully done " - + "with: [status: {}]", user, inputData.getSpaceToken(), returnStatus); - - } else { + log.error( + "srmReleaseSpace: <{}> Request for [spacetoken: {}] succesfully done " + + "with: [status: {}]", + user, + inputData.getSpaceToken(), + returnStatus); - log.error("srmReleaseSpace: <" + user + "> Request for [spacetoken:" - + inputData.getSpaceToken() + "] for failed with: [status:" - + returnStatus + "]"); + } else { + log.error( + "srmReleaseSpace: <" + + user + + "> Request for [spacetoken:" + + inputData.getSpaceToken() + + "] for failed with: [status:" + + returnStatus + + "]"); } return outputData; } /** - * - * @param user - * GridUserInterface - * @param data - * StorageSpaceData + * @param user GridUserInterface + * @param data StorageSpaceData * @return TReturnStatus */ - private TReturnStatus manageAuthorizedReleaseSpace(StorageSpaceData data, - GridUserInterface user) { + private TReturnStatus manageAuthorizedReleaseSpace( + StorageSpaceData data, GridUserInterface user) { String spaceFileName; PFN pfn = data.getSpaceFileName(); @@ -192,20 +199,19 @@ private TReturnStatus manageAuthorizedReleaseSpace(StorageSpaceData data, if (catalog.release(user, data.getSpaceToken())) { return new TReturnStatus(TStatusCode.SRM_SUCCESS, "Space Released."); } else { - return new TReturnStatus(TStatusCode.SRM_INTERNAL_ERROR, - "Space removed, but spaceToken was not found in the DB"); + return new TReturnStatus( + TStatusCode.SRM_INTERNAL_ERROR, + "Space removed, but spaceToken was not found in the DB"); } } else { - return new TReturnStatus(TStatusCode.SRM_FAILURE, - "Space can not be removed by StoRM!"); + return new TReturnStatus(TStatusCode.SRM_FAILURE, "Space can not be removed by StoRM!"); } } else { - return new TReturnStatus(TStatusCode.SRM_FAILURE, "SRM Internal failure."); + return new TReturnStatus(TStatusCode.SRM_FAILURE, "SRM Internal failure."); } } - private void printRequestOutcome(TReturnStatus status, - ReleaseSpaceInputData indata) { + private void printRequestOutcome(TReturnStatus status, ReleaseSpaceInputData indata) { if (indata != null) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, indata); @@ -213,5 +219,4 @@ private void printRequestOutcome(TReturnStatus status, CommandHelper.printRequestOutcome(SRM_COMMAND, log, status); } } - } diff --git a/src/main/java/it/grid/storm/synchcall/command/space/ReserveSpaceCommand.java b/src/main/java/it/grid/storm/synchcall/command/space/ReserveSpaceCommand.java index 73a4a3ee..7d1082ec 100644 --- a/src/main/java/it/grid/storm/synchcall/command/space/ReserveSpaceCommand.java +++ b/src/main/java/it/grid/storm/synchcall/command/space/ReserveSpaceCommand.java @@ -1,14 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.command.space; -import java.util.Date; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.acl.AclManager; import it.grid.storm.acl.AclManagerFS; import it.grid.storm.catalogs.InvalidSpaceDataAttributesException; @@ -54,13 +48,15 @@ import it.grid.storm.synchcall.data.space.IdentityReserveSpaceInputData; import it.grid.storm.synchcall.data.space.ReserveSpaceInputData; import it.grid.storm.synchcall.data.space.ReserveSpaceOutputData; +import java.util.Date; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ReserveSpaceCommand extends SpaceCommand implements Command { private ReservedSpaceCatalog catalog; - private static final Logger log = LoggerFactory - .getLogger(ReserveSpaceCommand.class); + private static final Logger log = LoggerFactory.getLogger(ReserveSpaceCommand.class); private NamespaceInterface namespace; @@ -69,15 +65,25 @@ public class ReserveSpaceCommand extends SpaceCommand implements Command { TStatusCode statusCode = TStatusCode.EMPTY; String explanation = null; - private void logRequestSuccess(GridUserInterface user, TSizeInBytes desSize, - TSizeInBytes guarSize, TLifeTimeInSeconds lifetime, - TRetentionPolicyInfo rpinfo, TReturnStatus status) { - - log.info("srmReservespace: <{}> Request for [desiredSizeOfTotalSpace: {}," - + " desiredSizeOfGuaranteedSpace: {}] with " - + "[desiredLifetimeOfReservedSpace: {}, retentionPolicyInfo: {}]" - + "succesfully done with: [status: {}]", user, desSize, guarSize, - lifetime, rpinfo, status); + private void logRequestSuccess( + GridUserInterface user, + TSizeInBytes desSize, + TSizeInBytes guarSize, + TLifeTimeInSeconds lifetime, + TRetentionPolicyInfo rpinfo, + TReturnStatus status) { + + log.info( + "srmReservespace: <{}> Request for [desiredSizeOfTotalSpace: {}," + + " desiredSizeOfGuaranteedSpace: {}] with " + + "[desiredLifetimeOfReservedSpace: {}, retentionPolicyInfo: {}]" + + "succesfully done with: [status: {}]", + user, + desSize, + guarSize, + lifetime, + rpinfo, + status); } private void logRequestFailure(TStatusCode code, String explanation) { @@ -86,17 +92,28 @@ private void logRequestFailure(TStatusCode code, String explanation) { log.error("srmReservespace: request failed with: [status: {}]", status); } - private void logRequestFailure(GridUserInterface user, TSizeInBytes desSize, - TSizeInBytes guarSize, TLifeTimeInSeconds lifetime, - TRetentionPolicyInfo rpinfo, TStatusCode code, String explanation) { + private void logRequestFailure( + GridUserInterface user, + TSizeInBytes desSize, + TSizeInBytes guarSize, + TLifeTimeInSeconds lifetime, + TRetentionPolicyInfo rpinfo, + TStatusCode code, + String explanation) { TReturnStatus status = new TReturnStatus(code, explanation); - log.error("srmReservespace: <{}> Request for [desiredSizeOfTotalSpace: {}," - + " desiredSizeOfGuaranteedSpace: {}] with " - + "[desiredLifetimeOfReservedSpace: {}, retentionPolicyInfo: {}]" - + "failed with: [status: {}]", user, desSize, guarSize, lifetime, rpinfo, - status); + log.error( + "srmReservespace: <{}> Request for [desiredSizeOfTotalSpace: {}," + + " desiredSizeOfGuaranteedSpace: {}] with " + + "[desiredLifetimeOfReservedSpace: {}, retentionPolicyInfo: {}]" + + "failed with: [status: {}]", + user, + desSize, + guarSize, + lifetime, + rpinfo, + status); } public ReserveSpaceCommand() { @@ -107,9 +124,8 @@ public ReserveSpaceCommand() { /** * Method that provide space reservation for srmReserveSpace request. - * - * @param data - * Contain information about data procived in SRM request. + * + * @param data Contain information about data procived in SRM request. * @return SpaceResOutputData that contain all SRM return parameter. * @todo Implement this it.grid.storm.synchcall.space.SpaceManager method */ @@ -120,11 +136,10 @@ public OutputData execute(InputData indata) { data = (IdentityReserveSpaceInputData) indata; } else { GetSpaceMetaDataOutputData outputData = new GetSpaceMetaDataOutputData(); - outputData.setStatus(CommandHelper.buildStatus( - TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" - + SRM_COMMAND)); - printRequestOutcome(outputData.getStatus(), - (ReserveSpaceInputData) indata); + outputData.setStatus( + CommandHelper.buildStatus( + TStatusCode.SRM_NOT_SUPPORTED, "Anonymous user can not perform" + SRM_COMMAND)); + printRequestOutcome(outputData.getStatus(), (ReserveSpaceInputData) indata); return outputData; } log.debug(":reserveSpace start."); @@ -139,9 +154,14 @@ public OutputData execute(InputData indata) { } catch (Exception e) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } @@ -153,9 +173,14 @@ public OutputData execute(InputData indata) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } @@ -168,43 +193,55 @@ public OutputData execute(InputData indata) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } SpaceSize spaceSize = null; try { - spaceSize = computeSpaceSize(data.getDesiredSize(), - data.getGuaranteedSize(), vfs); + spaceSize = computeSpaceSize(data.getDesiredSize(), data.getGuaranteedSize(), vfs); } catch (Exception e) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } StoRI spaceStori = null; try { - spaceStori = getSpaceStoRI(vfs, relativeSpaceFN, - spaceSize.getDesiderataSpaceSize()); + spaceStori = getSpaceStoRI(vfs, relativeSpaceFN, spaceSize.getDesiderataSpaceSize()); } catch (Exception e) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } - log - .debug("Reserve Space File Size: {}", spaceSize.getDesiderataSpaceSize()); + log.debug("Reserve Space File Size: {}", spaceSize.getDesiderataSpaceSize()); try { spaceStori.getSpace().fakeAllot(); @@ -213,9 +250,14 @@ public OutputData execute(InputData indata) { statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to create Space File into filesystem. \n"; - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return manageError(statusCode, explanation); } @@ -226,9 +268,14 @@ public OutputData execute(InputData indata) { } catch (Exception e) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); revertAllocation(spaceStori.getSpace()); return manageError(statusCode, explanation); @@ -236,15 +283,25 @@ public OutputData execute(InputData indata) { TSpaceToken spaceToken = null; try { - spaceToken = registerIntoDB(data.getUser(), data.getSpaceTokenAlias(), - spaceSize.getTotalSize(), spaceSize.getDesiderataSpaceSize(), - data.getSpaceLifetime(), spaceStori.getPFN()); + spaceToken = + registerIntoDB( + data.getUser(), + data.getSpaceTokenAlias(), + spaceSize.getTotalSize(), + spaceSize.getDesiderataSpaceSize(), + data.getSpaceLifetime(), + spaceStori.getPFN()); } catch (Exception e) { log.error(e.getMessage(), e); - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); revertAllocation(spaceStori.getSpace()); return manageError(statusCode, explanation); @@ -254,16 +311,25 @@ public OutputData execute(InputData indata) { try { output = buildOutput(spaceSize, spaceToken, data.getSpaceLifetime()); - logRequestSuccess(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), output.getStatus()); + logRequestSuccess( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + output.getStatus()); } catch (Exception e) { statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to build a valid output object "; - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); revertAllocation(spaceStori.getSpace()); return manageError(statusCode, explanation); } @@ -279,8 +345,8 @@ private void revertAllocation(Space space) { } } - private StoRI getSpaceStoRI(VirtualFS vfs, String relativeSpaceFN, - TSizeInBytes desiderataSpaceSize) throws Exception { + private StoRI getSpaceStoRI( + VirtualFS vfs, String relativeSpaceFN, TSizeInBytes desiderataSpaceSize) throws Exception { StoRI spaceFile = null; try { @@ -316,31 +382,43 @@ private boolean checkParameters(IdentityReserveSpaceInputData data) { log.debug("Null retentionPolicyInfo."); statusCode = TStatusCode.SRM_INVALID_REQUEST; explanation = "RetentionPolicy not specified."; - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return false; } TAccessLatency latency = data.getRetentionPolicyInfo().getAccessLatency(); - TRetentionPolicy retentionPolicy = data.getRetentionPolicyInfo() - .getRetentionPolicy(); + TRetentionPolicy retentionPolicy = data.getRetentionPolicyInfo().getRetentionPolicy(); - if (!((latency == null || latency.equals(TAccessLatency.EMPTY) || latency - .equals(TAccessLatency.ONLINE)) && (retentionPolicy == null - || retentionPolicy.equals(TRetentionPolicy.EMPTY) || retentionPolicy - .equals(TRetentionPolicy.REPLICA)))) { + if (!((latency == null + || latency.equals(TAccessLatency.EMPTY) + || latency.equals(TAccessLatency.ONLINE)) + && (retentionPolicy == null + || retentionPolicy.equals(TRetentionPolicy.EMPTY) + || retentionPolicy.equals(TRetentionPolicy.REPLICA)))) { - log.debug("Invalid retentionPolicyInfo: {}, {}", data - .getRetentionPolicyInfo().getAccessLatency(), data - .getRetentionPolicyInfo().getRetentionPolicy()); + log.debug( + "Invalid retentionPolicyInfo: {}, {}", + data.getRetentionPolicyInfo().getAccessLatency(), + data.getRetentionPolicyInfo().getRetentionPolicy()); statusCode = TStatusCode.SRM_NOT_SUPPORTED; explanation = "RetentionPolicy requested cannot be satisfied."; - logRequestFailure(data.getUser(), data.getDesiredSize(), - data.getGuaranteedSize(), data.getSpaceLifetime(), - data.getRetentionPolicyInfo(), statusCode, explanation); + logRequestFailure( + data.getUser(), + data.getDesiredSize(), + data.getGuaranteedSize(), + data.getSpaceLifetime(), + data.getRetentionPolicyInfo(), + statusCode, + explanation); return false; } @@ -378,19 +456,15 @@ private VirtualFS getSpaceVFS(String spaceFN) throws Exception { return vfs; } - private void setDefaults(IdentityReserveSpaceInputData data, - VirtualFS vfs) { + private void setDefaults(IdentityReserveSpaceInputData data, VirtualFS vfs) { if (data.getRetentionPolicyInfo().getAccessLatency() == null - || data.getRetentionPolicyInfo().getAccessLatency() - .equals(TAccessLatency.EMPTY)) { + || data.getRetentionPolicyInfo().getAccessLatency().equals(TAccessLatency.EMPTY)) { data.getRetentionPolicyInfo().setAccessLatency(TAccessLatency.ONLINE); } if (data.getRetentionPolicyInfo().getRetentionPolicy() == null - || data.getRetentionPolicyInfo().getRetentionPolicy() - .equals(TRetentionPolicy.EMPTY)) { - data.getRetentionPolicyInfo() - .setRetentionPolicy(TRetentionPolicy.REPLICA); + || data.getRetentionPolicyInfo().getRetentionPolicy().equals(TRetentionPolicy.EMPTY)) { + data.getRetentionPolicyInfo().setRetentionPolicy(TRetentionPolicy.REPLICA); } if (data.getSpaceLifetime().isEmpty()) { log.debug("LifeTime is EMPTY. Using default value."); @@ -398,13 +472,12 @@ private void setDefaults(IdentityReserveSpaceInputData data, } } - private SpaceSize computeSpaceSize(TSizeInBytes totalSize, - TSizeInBytes guarSize, VirtualFS vfs) throws Exception { + private SpaceSize computeSpaceSize(TSizeInBytes totalSize, TSizeInBytes guarSize, VirtualFS vfs) + throws Exception { TSizeInBytes desiderataSpaceSize = TSizeInBytes.makeEmpty(); - if ((!(totalSize.isEmpty())) - && (!((guarSize.isEmpty()) || guarSize.value() == 0))) { + if ((!(totalSize.isEmpty())) && (!((guarSize.isEmpty()) || guarSize.value() == 0))) { if (totalSize.value() < guarSize.value()) { log.debug("Error: totalSize < guaranteedSize"); statusCode = TStatusCode.SRM_INVALID_REQUEST; @@ -412,7 +485,7 @@ private SpaceSize computeSpaceSize(TSizeInBytes totalSize, throw new Exception(explanation); } } else { // Assign default values if totalSize and guaranteedSize are - // not defined + // not defined if (!(totalSize.isEmpty())) { guarSize = vfs.getDefaultValues().getDefaultGuaranteedSpaceSize(); if (totalSize.value() < guarSize.value()) { @@ -448,36 +521,30 @@ private SpaceSize computeSpaceSize(TSizeInBytes totalSize, TSizeInBytes freeSpace = null; try { - freeSpace = TSizeInBytes.make(vfs.getFilesystem().getFreeSpace(), - SizeUnit.BYTES); + freeSpace = TSizeInBytes.make(vfs.getFilesystem().getFreeSpace(), SizeUnit.BYTES); } catch (InvalidTSizeAttributesException e) { - log - .debug("Error while retrieving free Space in underlying Filesystem", e); + log.debug("Error while retrieving free Space in underlying Filesystem", e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; - explanation = "Error while retrieving free Space in underlying Filesystem \n" - + e; + explanation = "Error while retrieving free Space in underlying Filesystem \n" + e; throw new Exception(explanation); } catch (NamespaceException ex) { - log - .debug( + log.debug( "Error while retrieving free Space in underlying Filesystem. Unable to retrieve FS Driver", ex); statusCode = TStatusCode.SRM_INTERNAL_ERROR; - explanation = "Error while retrieving free Space in underlying Filesystem. Unable to retrieve FS Driver \n" - + ex; + explanation = + "Error while retrieving free Space in underlying Filesystem. Unable to retrieve FS Driver \n" + + ex; throw new Exception(explanation); } - /** - * @todo Change here, also granted SpaceSize must be considered. - */ + /** @todo Change here, also granted SpaceSize must be considered. */ boolean lower_space = false; // If there is not enogh free space on storage if (freeSpace.value() < desiderataSpaceSize.value()) { if (freeSpace.value() < guarSize.value()) { // Not enough freespace - log - .debug(":reserveSpace Not Enough Free Space on storage!"); + log.debug(":reserveSpace Not Enough Free Space on storage!"); statusCode = TStatusCode.SRM_NO_FREE_SPACE; explanation = "SRM has not more free space."; throw new Exception(explanation); @@ -490,21 +557,18 @@ private SpaceSize computeSpaceSize(TSizeInBytes totalSize, return this.new SpaceSize(desiderataSpaceSize, totalSize, lower_space); } - private String getRelativeSpaceFilePath(VirtualFS vfs, String spaceFN) - throws Exception { + private String getRelativeSpaceFilePath(VirtualFS vfs, String spaceFN) throws Exception { String relativeSpaceFN = null; - relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), - spaceFN); + relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), spaceFN); log.debug("relativeSpaceFN: {}", relativeSpaceFN); return relativeSpaceFN; } - private void setSpaceFilePermissions(StoRI spaceStori, GridUserInterface user) - throws Exception { + private void setSpaceFilePermissions(StoRI spaceStori, GridUserInterface user) throws Exception { FilesystemPermission fp = FilesystemPermission.ReadWrite; @@ -520,8 +584,7 @@ private void setSpaceFilePermissions(StoRI spaceStori, GridUserInterface user) throw new Exception(explanation); } if (localFile == null || localUser == null) { - log.error("ACL setup error. localFile={} , localUser={}", localFile, - localUser); + log.error("ACL setup error. localFile={} , localUser={}", localFile, localUser); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to setting up the ACL "; throw new Exception(explanation); @@ -547,23 +610,35 @@ private void setSpaceFilePermissions(StoRI spaceStori, GridUserInterface user) } } - private TSpaceToken registerIntoDB(GridUserInterface user, - String spaceTokenAlias, TSizeInBytes totalSize, - TSizeInBytes desiderataSpaceSize, TLifeTimeInSeconds lifeTime, PFN pfn) - throws Exception { + private TSpaceToken registerIntoDB( + GridUserInterface user, + String spaceTokenAlias, + TSizeInBytes totalSize, + TSizeInBytes desiderataSpaceSize, + TLifeTimeInSeconds lifeTime, + PFN pfn) + throws Exception { StorageSpaceData spaceData = null; try { - spaceData = new StorageSpaceData(user, TSpaceType.PERMANENT, - spaceTokenAlias, totalSize, desiderataSpaceSize, lifeTime, null, - new Date(), pfn); + spaceData = + new StorageSpaceData( + user, + TSpaceType.PERMANENT, + spaceTokenAlias, + totalSize, + desiderataSpaceSize, + lifeTime, + null, + new Date(), + pfn); } catch (InvalidSpaceDataAttributesException e) { log.debug("Unable to create Storage Space Data", e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to create storage space data."; - logRequestFailure(user, totalSize, desiderataSpaceSize, lifeTime, null, - statusCode, explanation); + logRequestFailure( + user, totalSize, desiderataSpaceSize, lifeTime, null, statusCode, explanation); throw new Exception(explanation); } @@ -579,8 +654,8 @@ private TSpaceToken registerIntoDB(GridUserInterface user, log.debug("Unable to register Storage Space Data into DB", e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to register Storage Space Data into DB."; - logRequestFailure(user, totalSize, desiderataSpaceSize, lifeTime, null, - statusCode, explanation); + logRequestFailure( + user, totalSize, desiderataSpaceSize, lifeTime, null, statusCode, explanation); throw new Exception(explanation); } @@ -592,31 +667,36 @@ private TSpaceToken registerIntoDB(GridUserInterface user, statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to create space token."; - logRequestFailure(user, totalSize, desiderataSpaceSize, lifeTime, null, - statusCode, explanation); + logRequestFailure( + user, totalSize, desiderataSpaceSize, lifeTime, null, statusCode, explanation); throw new Exception(explanation); } return spaceToken; } - private ReserveSpaceOutputData buildOutput(SpaceSize spaceSize, - TSpaceToken spaceToken, TLifeTimeInSeconds lifeTime) throws Exception { + private ReserveSpaceOutputData buildOutput( + SpaceSize spaceSize, TSpaceToken spaceToken, TLifeTimeInSeconds lifeTime) throws Exception { TReturnStatus status = null; - if (!spaceSize.isLowerSpace()) { - status = new TReturnStatus(TStatusCode.SRM_SUCCESS, - "Space Reservation done"); + if (!spaceSize.isLowerSpace()) { + status = new TReturnStatus(TStatusCode.SRM_SUCCESS, "Space Reservation done"); - } else { - status = new TReturnStatus(TStatusCode.SRM_LOWER_SPACE_GRANTED, - "Space Reservation done, lower space granted."); - } + } else { + status = + new TReturnStatus( + TStatusCode.SRM_LOWER_SPACE_GRANTED, "Space Reservation done, lower space granted."); + } ReserveSpaceOutputData outputData = null; try { - outputData = new ReserveSpaceOutputData(spaceSize.getTotalSize(), - spaceSize.getDesiderataSpaceSize(), lifeTime, spaceToken, status); + outputData = + new ReserveSpaceOutputData( + spaceSize.getTotalSize(), + spaceSize.getDesiderataSpaceSize(), + lifeTime, + spaceToken, + status); } catch (InvalidReserveSpaceOutputDataAttributesException e) { log.error(e.getMessage(), e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; @@ -632,8 +712,7 @@ private class SpaceSize { private final TSizeInBytes totalSize; private final boolean lowerSpace; - public SpaceSize(TSizeInBytes desiderataSpaceSize, TSizeInBytes totalSize, - boolean lowerSpace) { + public SpaceSize(TSizeInBytes desiderataSpaceSize, TSizeInBytes totalSize, boolean lowerSpace) { this.desiderataSpaceSize = desiderataSpaceSize; this.totalSize = totalSize; @@ -658,10 +737,8 @@ protected boolean isLowerSpace() { /** * Method that reset an already done reservation to the original status. - * - * @param token - * TSpaceToken that contains information about data procived in SRM - * request. + * + * @param token TSpaceToken that contains information about data procived in SRM request. * @return TReturnStatus that contains of all SRM return parameters. */ public TReturnStatus resetReservation(TSpaceToken token) { @@ -705,8 +782,7 @@ public TReturnStatus resetReservation(TSpaceToken token) { } String relativeSpaceFN = null; - relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), - spaceFN); + relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), spaceFN); log.debug("relativeSpaceFN: {}", relativeSpaceFN); @@ -719,8 +795,7 @@ public TReturnStatus resetReservation(TSpaceToken token) { StoRI spaceFile = null; try { - spaceFile = vfs.createSpace(relativeSpaceFN, - desiderataSpaceSize.value()); + spaceFile = vfs.createSpace(relativeSpaceFN, desiderataSpaceSize.value()); } catch (NamespaceException e) { log.debug(e.getMessage(), e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; @@ -746,8 +821,7 @@ public TReturnStatus resetReservation(TSpaceToken token) { LocalFile localFile = spaceFile.getLocalFile(); LocalUser localUser = user.getLocalUser(); if (localFile == null || localUser == null) { - log.error("ACL setup error. localFile={} localUser={}", localFile, - localUser); + log.error("ACL setup error. localFile={} localUser={}", localFile, localUser); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to setting up the ACL "; return manageErrorStatus(statusCode, explanation); @@ -773,8 +847,7 @@ public TReturnStatus resetReservation(TSpaceToken token) { LocalFile localFile = spaceFile.getLocalFile(); LocalUser localUser = user.getLocalUser(); if (localFile == null || localUser == null) { - log.error("ACL setup error. localFile={} localUser={}", localFile, - localUser); + log.error("ACL setup error. localFile={} localUser={}", localFile, localUser); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to setting up the ACL "; return manageErrorStatus(statusCode, explanation); @@ -795,7 +868,6 @@ public TReturnStatus resetReservation(TSpaceToken token) { return manageErrorStatus(statusCode, explanation); } } - } sdata.setUsedSpaceSize(desiderataSpaceSize); @@ -805,16 +877,14 @@ public TReturnStatus resetReservation(TSpaceToken token) { } catch (DataAccessException e) { log.error(e.getMessage(), e); statusCode = TStatusCode.SRM_INTERNAL_ERROR; - explanation = "Error persisting space token data into the DB\n" - + e.getMessage(); + explanation = "Error persisting space token data into the DB\n" + e.getMessage(); return manageErrorStatus(statusCode, explanation); } return manageErrorStatus(TStatusCode.SRM_SUCCESS, "Successfull creation."); } - public TReturnStatus updateReservation(TSpaceToken token, - TSizeInBytes sizeToAdd, TSURL toSurl) { + public TReturnStatus updateReservation(TSpaceToken token, TSizeInBytes sizeToAdd, TSURL toSurl) { String explanation = null; TStatusCode statusCode = TStatusCode.EMPTY; @@ -856,8 +926,7 @@ public TReturnStatus updateReservation(TSpaceToken token, String relativeSpaceFN = null; - relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), - spaceFN); + relativeSpaceFN = NamespaceUtil.extractRelativePath(vfs.getRootPath(), spaceFN); TSizeInBytes desiderataSpaceSize = sdata.getTotalSpaceSize(); TSizeInBytes availableSize = sdata.getAvailableSpaceSize(); @@ -866,8 +935,8 @@ public TReturnStatus updateReservation(TSpaceToken token, log.debug("Size of removed file: {}" + sizeToAdd.value()); try { - desiderataSpaceSize = TSizeInBytes.make( - availableSize.value() + sizeToAdd.value(), SizeUnit.BYTES); + desiderataSpaceSize = + TSizeInBytes.make(availableSize.value() + sizeToAdd.value(), SizeUnit.BYTES); } catch (InvalidTSizeAttributesException e) { log.error(e.getMessage()); } @@ -910,8 +979,7 @@ public TReturnStatus updateReservation(TSpaceToken token, localFile = spaceFile.getLocalFile(); LocalUser localUser = user.getLocalUser(); if (localFile == null || localUser == null) { - log.error("ACL setup error. localFile={} localUser={}", localFile, - localUser); + log.error("ACL setup error. localFile={} localUser={}", localFile, localUser); revertOldSpaceFileDeletion(localFile); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to setting up the ACL "; @@ -940,16 +1008,14 @@ public TReturnStatus updateReservation(TSpaceToken token, localFile = spaceFile.getLocalFile(); LocalUser localUser = user.getLocalUser(); if (localFile == null || localUser == null) { - log.error("ACL setup error. localFile={} localUser={}", localFile, - localUser); + log.error("ACL setup error. localFile={} localUser={}", localFile, localUser); revertOldSpaceFileDeletion(localFile); statusCode = TStatusCode.SRM_INTERNAL_ERROR; explanation = "Unable to setting up the ACL "; return manageErrorStatus(statusCode, explanation); } else { try { - manager.grantGroupPermission(spaceFile.getLocalFile(), localUser, - fp); + manager.grantGroupPermission(spaceFile.getLocalFile(), localUser, fp); } catch (IllegalArgumentException e) { log.error(e.getMessage(), e); revertOldSpaceFileDeletion(localFile); @@ -968,14 +1034,14 @@ public TReturnStatus updateReservation(TSpaceToken token, } try { - availableSize = TSizeInBytes.make(sdata.getAvailableSpaceSize().value() - + sizeToAdd.value(), SizeUnit.BYTES); + availableSize = + TSizeInBytes.make( + sdata.getAvailableSpaceSize().value() + sizeToAdd.value(), SizeUnit.BYTES); } catch (InvalidTSizeAttributesException e) { log.error(e.getMessage(), e); revertOldSpaceFileDeletion(localFile); statusCode = TStatusCode.SRM_INTERNAL_ERROR; - explanation = "Error computing new available space size\n" - + e.getMessage(); + explanation = "Error computing new available space size\n" + e.getMessage(); return manageErrorStatus(statusCode, explanation); } @@ -987,19 +1053,15 @@ public TReturnStatus updateReservation(TSpaceToken token, log.error(e.getMessage(), e); revertOldSpaceFileDeletion(localFile); statusCode = TStatusCode.SRM_INTERNAL_ERROR; - explanation = "Error persisting space token data into the DB\n" - + e.getMessage(); + explanation = "Error persisting space token data into the DB\n" + e.getMessage(); return manageErrorStatus(statusCode, explanation); } return manageErrorStatus(TStatusCode.SRM_SUCCESS, "Successfull creation."); } - private void revertOldSpaceFileDeletion(LocalFile localFile) { - - } + private void revertOldSpaceFileDeletion(LocalFile localFile) {} - private ReserveSpaceOutputData manageError(TStatusCode statusCode, - String explanation) { + private ReserveSpaceOutputData manageError(TStatusCode statusCode, String explanation) { TReturnStatus status = null; try { @@ -1011,8 +1073,7 @@ private ReserveSpaceOutputData manageError(TStatusCode statusCode, return new ReserveSpaceOutputData(status); } - private TReturnStatus manageErrorStatus(TStatusCode statusCode, - String explanation) { + private TReturnStatus manageErrorStatus(TStatusCode statusCode, String explanation) { TReturnStatus status = null; try { @@ -1023,8 +1084,7 @@ private TReturnStatus manageErrorStatus(TStatusCode statusCode, return status; } - private void printRequestOutcome(TReturnStatus status, - ReserveSpaceInputData data) { + private void printRequestOutcome(TReturnStatus status, ReserveSpaceInputData data) { if (data != null) { CommandHelper.printRequestOutcome(SRM_COMMAND, log, status, data); diff --git a/src/main/java/it/grid/storm/synchcall/common/HiddenFileT1D1Plugin.java b/src/main/java/it/grid/storm/synchcall/common/HiddenFileT1D1Plugin.java index 085104b0..ea40d19a 100644 --- a/src/main/java/it/grid/storm/synchcall/common/HiddenFileT1D1Plugin.java +++ b/src/main/java/it/grid/storm/synchcall/common/HiddenFileT1D1Plugin.java @@ -1,49 +1,42 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.common; import it.grid.storm.namespace.StoRI; import it.grid.storm.namespace.naming.NamespaceUtil; - import java.io.File; import java.io.IOException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HiddenFileT1D1Plugin implements T1D1PluginInterface { - private static final Logger log = LoggerFactory - .getLogger(HiddenFileT1D1Plugin.class); - - public int startMigration(StoRI stori, String prefix) { - - if (stori != null) { - log.debug("HiddenFileT1D1PLugin: SURL filename" + stori.getFilename()); - String localPathWithoutFilename = NamespaceUtil.consumeFileName(stori - .getLocalFile().getPath()); - log.debug("HiddenFileT1D1PLugin: local path without filename " - + localPathWithoutFilename); - String hiddenFileName = localPathWithoutFilename + "." + prefix - + stori.getFilename(); - log.debug("HiddenFileT1D1Plugin: hidden file name " + hiddenFileName); - - File hiddenFile = new File(hiddenFileName); - try { - hiddenFile.createNewFile(); - } catch (IOException e) { - // TODO Auto-generated catch blo - log.debug("HiddenFileT1D1Plugin: Error creating file " + e); - return 1; - } - - return 0; - - } else { - return 1; - } - } - + private static final Logger log = LoggerFactory.getLogger(HiddenFileT1D1Plugin.class); + + public int startMigration(StoRI stori, String prefix) { + + if (stori != null) { + log.debug("HiddenFileT1D1PLugin: SURL filename" + stori.getFilename()); + String localPathWithoutFilename = + NamespaceUtil.consumeFileName(stori.getLocalFile().getPath()); + log.debug("HiddenFileT1D1PLugin: local path without filename " + localPathWithoutFilename); + String hiddenFileName = localPathWithoutFilename + "." + prefix + stori.getFilename(); + log.debug("HiddenFileT1D1Plugin: hidden file name " + hiddenFileName); + + File hiddenFile = new File(hiddenFileName); + try { + hiddenFile.createNewFile(); + } catch (IOException e) { + // TODO Auto-generated catch blo + log.debug("HiddenFileT1D1Plugin: Error creating file " + e); + return 1; + } + + return 0; + + } else { + return 1; + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/common/T1D1PluginInterface.java b/src/main/java/it/grid/storm/synchcall/common/T1D1PluginInterface.java index f18ad20b..5137097a 100644 --- a/src/main/java/it/grid/storm/synchcall/common/T1D1PluginInterface.java +++ b/src/main/java/it/grid/storm/synchcall/common/T1D1PluginInterface.java @@ -1,20 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.common; import it.grid.storm.namespace.StoRI; /** - * Interface for the variuos Plugin that can be used to manage the T1D1 - * migration (different user, hidden file etc) - * + * Interface for the variuos Plugin that can be used to manage the T1D1 migration (different user, + * hidden file etc) + * * @author lucamag - * */ public interface T1D1PluginInterface { - public int startMigration(StoRI stori, String prefix); - + public int startMigration(StoRI stori, String prefix); } diff --git a/src/main/java/it/grid/storm/synchcall/data/AbstractInputData.java b/src/main/java/it/grid/storm/synchcall/data/AbstractInputData.java index 5551d569..1b534d00 100644 --- a/src/main/java/it/grid/storm/synchcall/data/AbstractInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/AbstractInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data; @@ -8,18 +7,21 @@ public abstract class AbstractInputData implements InputData { - private static final String sepBegin = "("; - private static final String sepEnd = ")"; - private static final String arrow = "->"; + private static final String sepBegin = "("; + private static final String sepEnd = ")"; + private static final String arrow = "->"; - @Override - public String display(Map map) { + @Override + public String display(Map map) { - StringBuilder sb = new StringBuilder("["); - for (Object object : map.keySet()) { - sb.append(sepBegin).append(object.toString()).append(arrow) - .append(map.get(object).toString()).append(sepEnd); - } - return sb.append("]").toString(); - } + StringBuilder sb = new StringBuilder("["); + for (Object object : map.keySet()) { + sb.append(sepBegin) + .append(object.toString()) + .append(arrow) + .append(map.get(object).toString()) + .append(sepEnd); + } + return sb.append("]").toString(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/DataHelper.java b/src/main/java/it/grid/storm/synchcall/data/DataHelper.java index b0e6fa93..823455e8 100644 --- a/src/main/java/it/grid/storm/synchcall/data/DataHelper.java +++ b/src/main/java/it/grid/storm/synchcall/data/DataHelper.java @@ -1,19 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data; /* - * + * * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the @@ -23,14 +22,14 @@ public class DataHelper { - public static final String ANONYMOUS_REQUESTOR = "Anonymous"; + public static final String ANONYMOUS_REQUESTOR = "Anonymous"; - public static String getRequestor(InputData data) { + public static String getRequestor(InputData data) { - if (data instanceof IdentityInputData) { - return ((IdentityInputData) data).getPrincipal(); - } else { - return ANONYMOUS_REQUESTOR; - } - } + if (data instanceof IdentityInputData) { + return ((IdentityInputData) data).getPrincipal(); + } else { + return ANONYMOUS_REQUESTOR; + } + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/IdentityInputData.java b/src/main/java/it/grid/storm/synchcall/data/IdentityInputData.java index fda4b45a..12711390 100644 --- a/src/main/java/it/grid/storm/synchcall/data/IdentityInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/IdentityInputData.java @@ -1,29 +1,21 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data; import it.grid.storm.griduser.GridUserInterface; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * - * Authors: - * + * + *

Authors: + * * @author lucamag luca.magnoniATcnaf.infn.it - * * @date = Dec 9, 2008 - * */ - public interface IdentityInputData extends InputData { - public String getPrincipal(); - - public GridUserInterface getUser(); + public String getPrincipal(); -} \ No newline at end of file + public GridUserInterface getUser(); +} diff --git a/src/main/java/it/grid/storm/synchcall/data/InputData.java b/src/main/java/it/grid/storm/synchcall/data/InputData.java index 72c6be66..cf2808e4 100644 --- a/src/main/java/it/grid/storm/synchcall/data/InputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/InputData.java @@ -1,27 +1,19 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data; import java.util.Map; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * - * Authors: - * + * + *

Authors: + * * @author lucamag luca.magnoniATcnaf.infn.it - * * @date = Dec 9, 2008 - * */ - public interface InputData { - public String display(Map map); - -} \ No newline at end of file + public String display(Map map); +} diff --git a/src/main/java/it/grid/storm/synchcall/data/OutputData.java b/src/main/java/it/grid/storm/synchcall/data/OutputData.java index 4b187549..42b23d25 100644 --- a/src/main/java/it/grid/storm/synchcall/data/OutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/OutputData.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data; public interface OutputData { - boolean isSuccess(); - -} \ No newline at end of file + boolean isSuccess(); +} diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesInputData.java index 43a4ba80..abef0d49 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -8,6 +7,5 @@ public interface AbortFilesInputData extends AbortInputData { - public ArrayOfSURLs getArrayOfSURLs(); - + public ArrayOfSURLs getArrayOfSURLs(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesOutputData.java index e8914b47..38115267 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortFilesOutputData.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the AbortFiles Output Data associated with the SRM - * request AbortFiles - * + * This class represents the AbortFiles Output Data associated with the SRM request AbortFiles + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date @@ -15,98 +13,92 @@ import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; import it.grid.storm.srm.types.TReturnStatus; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AbortFilesOutputData extends AbortGeneralOutputData { - private static final Logger log = LoggerFactory - .getLogger(AbortFilesOutputData.class); - private TReturnStatus returnStatus = null; - private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; - - public AbortFilesOutputData() { - - this.returnStatus = null; - this.arrayOfFileStatus = null; - } - - public static AbortFilesOutputData make(AbortGeneralOutputData filesOutData) { - - // Create an output data from an AbortFiles output data. - // this.returnStatus = filesOutData.getReturnStatus(); - // this.arrayOfFileStatus = filesOutData.getArrayOfFileStatuses(); - return new AbortFilesOutputData(filesOutData.getReturnStatus(), - filesOutData.getArrayOfFileStatuses()); - } - - public AbortFilesOutputData(TReturnStatus retStatus, - ArrayOfTSURLReturnStatus arrayOfFileStatus) - // throws InvalidAbortFilesOutputDataAttributeException - { - - boolean ok = (arrayOfFileStatus == null); - - if (!ok) { - ;// throw new - // InvalidAbortFilesOutputDataAttributeException(arrayOfFileStatus); - } - - this.returnStatus = retStatus; - this.arrayOfFileStatus = arrayOfFileStatus; - } - - /** - * Returns the returnStatus field - * - * @return TReturnStatus - */ - @Override - public TReturnStatus getReturnStatus() { - - return returnStatus; - } - - /** - * Set the returnStatus field - * - * @param returnStatus - */ - @Override - public void setReturnStatus(TReturnStatus returnStatus) { - - this.returnStatus = returnStatus; - } - - /** - * Returns the arrayOfFileStatuses field - * - * @return TSURLReturnStatus - */ - @Override - public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { - - return arrayOfFileStatus; - } - - /** - * Set the arrayOfFileStatuses field - * - * @param arrayOfFileStatuses - */ - @Override - public void setArrayOfFileStatuses( - ArrayOfTSURLReturnStatus arrayOfFileStatuses) { - - this.arrayOfFileStatus = arrayOfFileStatuses; - } - - @Override - public boolean isSuccess() { - - // TODO Auto-generated method stub - return false; - } - + private static final Logger log = LoggerFactory.getLogger(AbortFilesOutputData.class); + private TReturnStatus returnStatus = null; + private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; + + public AbortFilesOutputData() { + + this.returnStatus = null; + this.arrayOfFileStatus = null; + } + + public static AbortFilesOutputData make(AbortGeneralOutputData filesOutData) { + + // Create an output data from an AbortFiles output data. + // this.returnStatus = filesOutData.getReturnStatus(); + // this.arrayOfFileStatus = filesOutData.getArrayOfFileStatuses(); + return new AbortFilesOutputData( + filesOutData.getReturnStatus(), filesOutData.getArrayOfFileStatuses()); + } + + public AbortFilesOutputData(TReturnStatus retStatus, ArrayOfTSURLReturnStatus arrayOfFileStatus) + // throws InvalidAbortFilesOutputDataAttributeException + { + + boolean ok = (arrayOfFileStatus == null); + + if (!ok) {; // throw new + // InvalidAbortFilesOutputDataAttributeException(arrayOfFileStatus); + } + + this.returnStatus = retStatus; + this.arrayOfFileStatus = arrayOfFileStatus; + } + + /** + * Returns the returnStatus field + * + * @return TReturnStatus + */ + @Override + public TReturnStatus getReturnStatus() { + + return returnStatus; + } + + /** + * Set the returnStatus field + * + * @param returnStatus + */ + @Override + public void setReturnStatus(TReturnStatus returnStatus) { + + this.returnStatus = returnStatus; + } + + /** + * Returns the arrayOfFileStatuses field + * + * @return TSURLReturnStatus + */ + @Override + public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { + + return arrayOfFileStatus; + } + + /** + * Set the arrayOfFileStatuses field + * + * @param arrayOfFileStatuses + */ + @Override + public void setArrayOfFileStatuses(ArrayOfTSURLReturnStatus arrayOfFileStatuses) { + + this.arrayOfFileStatus = arrayOfFileStatuses; + } + + @Override + public boolean isSuccess() { + + // TODO Auto-generated method stub + return false; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortGeneralOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortGeneralOutputData.java index d1e0fae3..a6d4d928 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortGeneralOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortGeneralOutputData.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the AbortFiles Output Data associated with the SRM - * request AbortFiles - * + * This class represents the AbortFiles Output Data associated with the SRM request AbortFiles + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date @@ -17,82 +15,77 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.exception.InvalidAbortFilesOutputDataAttributeException; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AbortGeneralOutputData implements OutputData { - private static final Logger log = LoggerFactory - .getLogger(AbortGeneralOutputData.class); - private TReturnStatus returnStatus = null; - private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; - - public AbortGeneralOutputData() { + private static final Logger log = LoggerFactory.getLogger(AbortGeneralOutputData.class); + private TReturnStatus returnStatus = null; + private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; - this.returnStatus = null; - this.arrayOfFileStatus = null; - } + public AbortGeneralOutputData() { - public AbortGeneralOutputData(TReturnStatus retStatus, - ArrayOfTSURLReturnStatus arrayOfFileStatus) - throws InvalidAbortFilesOutputDataAttributeException { + this.returnStatus = null; + this.arrayOfFileStatus = null; + } - boolean ok = (arrayOfFileStatus == null); + public AbortGeneralOutputData(TReturnStatus retStatus, ArrayOfTSURLReturnStatus arrayOfFileStatus) + throws InvalidAbortFilesOutputDataAttributeException { - if (!ok) { - throw new InvalidAbortFilesOutputDataAttributeException(arrayOfFileStatus); - } + boolean ok = (arrayOfFileStatus == null); - this.returnStatus = retStatus; - this.arrayOfFileStatus = arrayOfFileStatus; - } + if (!ok) { + throw new InvalidAbortFilesOutputDataAttributeException(arrayOfFileStatus); + } - /** - * Returns the returnStatus field - * - * @return TReturnStatus - */ - public TReturnStatus getReturnStatus() { + this.returnStatus = retStatus; + this.arrayOfFileStatus = arrayOfFileStatus; + } - return returnStatus; - } + /** + * Returns the returnStatus field + * + * @return TReturnStatus + */ + public TReturnStatus getReturnStatus() { - /** - * Set the returnStatus field - * - * @param returnStatus - */ - public void setReturnStatus(TReturnStatus returnStatus) { + return returnStatus; + } - this.returnStatus = returnStatus; - } + /** + * Set the returnStatus field + * + * @param returnStatus + */ + public void setReturnStatus(TReturnStatus returnStatus) { - /** - * Returns the arrayOfFileStatuses field - * - * @return TSURLReturnStatus - */ - public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { + this.returnStatus = returnStatus; + } - return arrayOfFileStatus; - } + /** + * Returns the arrayOfFileStatuses field + * + * @return TSURLReturnStatus + */ + public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { - /** - * Set the arrayOfFileStatuses field - * - * @param arrayOfFileStatuses - */ - public void setArrayOfFileStatuses( - ArrayOfTSURLReturnStatus arrayOfFileStatuses) { + return arrayOfFileStatus; + } - this.arrayOfFileStatus = arrayOfFileStatuses; - } + /** + * Set the arrayOfFileStatuses field + * + * @param arrayOfFileStatuses + */ + public void setArrayOfFileStatuses(ArrayOfTSURLReturnStatus arrayOfFileStatuses) { - public boolean isSuccess() { + this.arrayOfFileStatus = arrayOfFileStatuses; + } - // TODO Auto-generated method stub - return true; - } + public boolean isSuccess() { + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortInputData.java index d41df041..ac99f7b7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -9,12 +8,12 @@ public interface AbortInputData extends InputData { - public static enum AbortType { - ABORT_REQUEST, ABORT_FILES; - } + public static enum AbortType { + ABORT_REQUEST, + ABORT_FILES; + } - public TRequestToken getRequestToken(); - - public AbortType getType(); + public TRequestToken getRequestToken(); + public AbortType getType(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortRequestOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortRequestOutputData.java index d3837680..88ddbc6b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortRequestOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AbortRequestOutputData.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the AbortRequest Output Data associated with the SRM - * request AbortRequest - * + * This class represents the AbortRequest Output Data associated with the SRM request AbortRequest + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 @@ -14,61 +12,55 @@ package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.TReturnStatus; -import it.grid.storm.synchcall.data.OutputData; public class AbortRequestOutputData extends AbortGeneralOutputData { - private TReturnStatus returnStatus = null; + private TReturnStatus returnStatus = null; - public AbortRequestOutputData() { + public AbortRequestOutputData() {} - } + public AbortRequestOutputData(TReturnStatus retStatus) + // throws InvalidAbortRequestOutputDataAttributeException + { - public AbortRequestOutputData(TReturnStatus retStatus) - // throws InvalidAbortRequestOutputDataAttributeException - { + boolean ok = (retStatus == null); - boolean ok = (retStatus == null); + if (!ok) {; // throw new InvalidAbortRequestOutputDataAttributeException(retStatus); + } - if (!ok) { - ;// throw new InvalidAbortRequestOutputDataAttributeException(retStatus); - } + this.returnStatus = retStatus; + } - this.returnStatus = retStatus; - } + public static AbortRequestOutputData make(AbortGeneralOutputData generalOutData) { - public static AbortRequestOutputData make( - AbortGeneralOutputData generalOutData) { + // Create an output data from an AbortFiles output data. + // new AbortRequestOutputData(generalOutData.getReturnStatus()); + return new AbortRequestOutputData(generalOutData.getReturnStatus()); + } - // Create an output data from an AbortFiles output data. - // new AbortRequestOutputData(generalOutData.getReturnStatus()); - return new AbortRequestOutputData(generalOutData.getReturnStatus()); - } + /** + * Returns the returnStatus field + * + * @return TReturnStatus + */ + public TReturnStatus getReturnStatus() { - /** - * Returns the returnStatus field - * - * @return TReturnStatus - */ - public TReturnStatus getReturnStatus() { + return returnStatus; + } - return returnStatus; - } + /** + * Set the returnStatus field + * + * @param returnStatus + */ + public void setReturnStatus(TReturnStatus returnStatus) { - /** - * Set the returnStatus field - * - * @param returnStatus - */ - public void setReturnStatus(TReturnStatus returnStatus) { + this.returnStatus = returnStatus; + } - this.returnStatus = returnStatus; - } - - public boolean isSuccess() { - - // TODO Auto-generated method stub - return false; - } + public boolean isSuccess() { + // TODO Auto-generated method stub + return false; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortFilesInputData.java index bbf5e67d..be92697b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortFilesInputData.java @@ -1,50 +1,45 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the general Abort Input Data associated with the SRM - * request Abort - * + * This class represents the general Abort Input Data associated with the SRM request Abort + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.srm.types.TRequestToken; -public class AnonymousAbortFilesInputData extends - AnonymousAbortGeneralInputData implements AbortFilesInputData { - - private final ArrayOfSURLs arrayOfSURLs; - - public AnonymousAbortFilesInputData(TRequestToken reqToken, - ArrayOfSURLs surlArray) throws IllegalArgumentException { - - super(reqToken, AbortType.ABORT_REQUEST); - if (surlArray == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: surlArray=" - + surlArray); - } - this.arrayOfSURLs = surlArray; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.AbortFilesInputData#getArrayOfSURLs - * () - */ - @Override - public ArrayOfSURLs getArrayOfSURLs() { - - return arrayOfSURLs; - } - +public class AnonymousAbortFilesInputData extends AnonymousAbortGeneralInputData + implements AbortFilesInputData { + + private final ArrayOfSURLs arrayOfSURLs; + + public AnonymousAbortFilesInputData(TRequestToken reqToken, ArrayOfSURLs surlArray) + throws IllegalArgumentException { + + super(reqToken, AbortType.ABORT_REQUEST); + if (surlArray == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: surlArray=" + surlArray); + } + this.arrayOfSURLs = surlArray; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.AbortFilesInputData#getArrayOfSURLs + * () + */ + @Override + public ArrayOfSURLs getArrayOfSURLs() { + + return arrayOfSURLs; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortGeneralInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortGeneralInputData.java index e436a63f..c104fcd4 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortGeneralInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortGeneralInputData.java @@ -1,62 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the general Abort Input Data associated with the SRM - * request Abort - * + * This class represents the general Abort Input Data associated with the SRM request Abort + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.synchcall.data.AbstractInputData; public abstract class AnonymousAbortGeneralInputData extends AbstractInputData - implements AbortInputData { - - private final AbortType type; - - private final TRequestToken reqToken; - - protected AnonymousAbortGeneralInputData(TRequestToken reqToken, - AbortType type) throws IllegalArgumentException { - - if (reqToken == null || type == null) { - throw new IllegalArgumentException( - "Unable to build the object. null arguments: reqToken=" + reqToken - + " type=" + type); - } - this.reqToken = reqToken; - this.type = type; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.AbortInputData#getRequestToken() - */ - @Override - public TRequestToken getRequestToken() { - - return reqToken; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.AbortInputData#getType() - */ - @Override - public AbortType getType() { - - return type; - } - + implements AbortInputData { + + private final AbortType type; + + private final TRequestToken reqToken; + + protected AnonymousAbortGeneralInputData(TRequestToken reqToken, AbortType type) + throws IllegalArgumentException { + + if (reqToken == null || type == null) { + throw new IllegalArgumentException( + "Unable to build the object. null arguments: reqToken=" + reqToken + " type=" + type); + } + this.reqToken = reqToken; + this.type = type; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.AbortInputData#getRequestToken() + */ + @Override + public TRequestToken getRequestToken() { + + return reqToken; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.AbortInputData#getType() + */ + @Override + public AbortType getType() { + + return type; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortRequestInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortRequestInputData.java index 741f3402..552db082 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortRequestInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousAbortRequestInputData.java @@ -1,27 +1,22 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the PutDone Input Data associated with the SRM request - * PutDone - * + * This class represents the PutDone Input Data associated with the SRM request PutDone + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.TRequestToken; -public class AnonymousAbortRequestInputData extends - AnonymousAbortGeneralInputData { +public class AnonymousAbortRequestInputData extends AnonymousAbortGeneralInputData { - public AnonymousAbortRequestInputData(TRequestToken reqToken) - throws IllegalArgumentException { + public AnonymousAbortRequestInputData(TRequestToken reqToken) throws IllegalArgumentException { - super(reqToken, AbortType.ABORT_REQUEST); - } + super(reqToken, AbortType.ABORT_REQUEST); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousExtendFileLifeTimeInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousExtendFileLifeTimeInputData.java index 9178a459..e3e12b6a 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousExtendFileLifeTimeInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousExtendFileLifeTimeInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -10,76 +9,85 @@ import it.grid.storm.synchcall.data.AbstractInputData; public class AnonymousExtendFileLifeTimeInputData extends AbstractInputData - implements ExtendFileLifeTimeInputData { + implements ExtendFileLifeTimeInputData { - private final TRequestToken requestToken; - private final ArrayOfSURLs arrayOfSURLs; - private final TLifeTimeInSeconds newFileLifetime; - private final TLifeTimeInSeconds newPinLifetime; + private final TRequestToken requestToken; + private final ArrayOfSURLs arrayOfSURLs; + private final TLifeTimeInSeconds newFileLifetime; + private final TLifeTimeInSeconds newPinLifetime; - public AnonymousExtendFileLifeTimeInputData(TRequestToken requestToken, - ArrayOfSURLs surlArray, TLifeTimeInSeconds newFileLifetime, - TLifeTimeInSeconds newPinLifetime) throws IllegalArgumentException { + public AnonymousExtendFileLifeTimeInputData( + TRequestToken requestToken, + ArrayOfSURLs surlArray, + TLifeTimeInSeconds newFileLifetime, + TLifeTimeInSeconds newPinLifetime) + throws IllegalArgumentException { - if (requestToken == null || surlArray == null || newFileLifetime == null - || newPinLifetime == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: requestToken=" - + requestToken + " surlArray=" + surlArray + " newFileLifetime=" - + newFileLifetime + " newPinLifetime=" + newPinLifetime); - } - this.requestToken = requestToken; - this.arrayOfSURLs = surlArray; - this.newFileLifetime = newFileLifetime; - this.newPinLifetime = newPinLifetime; - } + if (requestToken == null + || surlArray == null + || newFileLifetime == null + || newPinLifetime == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: requestToken=" + + requestToken + + " surlArray=" + + surlArray + + " newFileLifetime=" + + newFileLifetime + + " newPinLifetime=" + + newPinLifetime); + } + this.requestToken = requestToken; + this.arrayOfSURLs = surlArray; + this.newFileLifetime = newFileLifetime; + this.newPinLifetime = newPinLifetime; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# - * getReqToken() - */ - @Override - public TRequestToken getRequestToken() { + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# + * getReqToken() + */ + @Override + public TRequestToken getRequestToken() { - return requestToken; - } + return requestToken; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# - * getArrayOfSURLs() - */ - @Override - public ArrayOfSURLs getArrayOfSURLs() { + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# + * getArrayOfSURLs() + */ + @Override + public ArrayOfSURLs getArrayOfSURLs() { - return arrayOfSURLs; - } + return arrayOfSURLs; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# - * getNewFileLifetime() - */ - @Override - public TLifeTimeInSeconds getNewFileLifetime() { + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# + * getNewFileLifetime() + */ + @Override + public TLifeTimeInSeconds getNewFileLifetime() { - return newFileLifetime; - } + return newFileLifetime; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# - * getNewPinLifetime() - */ - @Override - public TLifeTimeInSeconds getNewPinLifetime() { - - return newPinLifetime; - } + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData# + * getNewPinLifetime() + */ + @Override + public TLifeTimeInSeconds getNewPinLifetime() { + return newPinLifetime; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousFileTransferInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousFileTransferInputData.java index 858d1cf9..5ca4dcdc 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousFileTransferInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousFileTransferInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -10,79 +9,79 @@ import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousFileTransferInputData extends AbstractInputData implements - FileTransferInputData { - - private final TSURL surl; - private final TURLPrefix transferProtocols; - private TLifeTimeInSeconds desiredPinLifetime = TLifeTimeInSeconds - .makeEmpty(); - private TSpaceToken targetSpaceToken = TSpaceToken.makeEmpty(); - - public AnonymousFileTransferInputData(TSURL surl, TURLPrefix transferProtocols) - throws IllegalArgumentException { - - if (surl == null || transferProtocols == null) { - throw new IllegalArgumentException( - "Unable to create PrepareToPutInputData. Received nul parameters: surl = " - + surl + " , transferProtocols = " + transferProtocols); - } - this.surl = surl; - this.transferProtocols = transferProtocols; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.FileTransferInputData#getSurl() - */ - @Override - public TSURL getSurl() { - - return surl; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.FileTransferInputData# - * getTransferProtocols() - */ - @Override - public TURLPrefix getTransferProtocols() { - - return transferProtocols; - } - - @Override - public void setTargetSpaceToken(TSpaceToken targetSpaceToken) { - - this.targetSpaceToken = targetSpaceToken; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.FileTransferInputData# - * getTargetSpaceToken() - */ - @Override - public TSpaceToken getTargetSpaceToken() { - - return targetSpaceToken; - } - - @Override - public TLifeTimeInSeconds getDesiredPinLifetime() { - - return desiredPinLifetime; - } - - @Override - public void setDesiredPinLifetime(TLifeTimeInSeconds desiredPinLifetime) { - - this.desiredPinLifetime = desiredPinLifetime; - } - +public class AnonymousFileTransferInputData extends AbstractInputData + implements FileTransferInputData { + + private final TSURL surl; + private final TURLPrefix transferProtocols; + private TLifeTimeInSeconds desiredPinLifetime = TLifeTimeInSeconds.makeEmpty(); + private TSpaceToken targetSpaceToken = TSpaceToken.makeEmpty(); + + public AnonymousFileTransferInputData(TSURL surl, TURLPrefix transferProtocols) + throws IllegalArgumentException { + + if (surl == null || transferProtocols == null) { + throw new IllegalArgumentException( + "Unable to create PrepareToPutInputData. Received nul parameters: surl = " + + surl + + " , transferProtocols = " + + transferProtocols); + } + this.surl = surl; + this.transferProtocols = transferProtocols; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.FileTransferInputData#getSurl() + */ + @Override + public TSURL getSurl() { + + return surl; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.FileTransferInputData# + * getTransferProtocols() + */ + @Override + public TURLPrefix getTransferProtocols() { + + return transferProtocols; + } + + @Override + public void setTargetSpaceToken(TSpaceToken targetSpaceToken) { + + this.targetSpaceToken = targetSpaceToken; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.FileTransferInputData# + * getTargetSpaceToken() + */ + @Override + public TSpaceToken getTargetSpaceToken() { + + return targetSpaceToken; + } + + @Override + public TLifeTimeInSeconds getDesiredPinLifetime() { + + return desiredPinLifetime; + } + + @Override + public void setDesiredPinLifetime(TLifeTimeInSeconds desiredPinLifetime) { + + this.desiredPinLifetime = desiredPinLifetime; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferFilesInputData.java index 4439ee1d..633a5b45 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferFilesInputData.java @@ -1,31 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousManageFileTransferFilesInputData extends - AbstractInputData implements ManageFileTransferFilesInputData { +public class AnonymousManageFileTransferFilesInputData extends AbstractInputData + implements ManageFileTransferFilesInputData { - protected final ArrayOfSURLs arrayOfSURLs; + protected final ArrayOfSURLs arrayOfSURLs; - public AnonymousManageFileTransferFilesInputData(ArrayOfSURLs arrayOfSURLs) { + public AnonymousManageFileTransferFilesInputData(ArrayOfSURLs arrayOfSURLs) { - if (arrayOfSURLs == null || arrayOfSURLs.size() == 0) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: " + "arrayOfSURLs=" - + arrayOfSURLs); - } - this.arrayOfSURLs = arrayOfSURLs; - } + if (arrayOfSURLs == null || arrayOfSURLs.size() == 0) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: " + "arrayOfSURLs=" + arrayOfSURLs); + } + this.arrayOfSURLs = arrayOfSURLs; + } - @Override - public ArrayOfSURLs getArrayOfSURLs() { - - return arrayOfSURLs; - } + @Override + public ArrayOfSURLs getArrayOfSURLs() { + return arrayOfSURLs; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferRequestFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferRequestFilesInputData.java index 91f08166..2e79e021 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferRequestFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousManageFileTransferRequestFilesInputData.java @@ -1,42 +1,38 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.srm.types.TRequestToken; -public class AnonymousManageFileTransferRequestFilesInputData extends - AnonymousManageFileTransferFilesInputData implements - ManageFileTransferRequestFilesInputData { - - private final TRequestToken requestToken; - - public AnonymousManageFileTransferRequestFilesInputData( - TRequestToken requestToken, ArrayOfSURLs arrayOfSURLs) - throws IllegalArgumentException { - - super(arrayOfSURLs); - if (requestToken == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: requestToken=" - + requestToken); - } - this.requestToken = requestToken; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.ReleaseFilesInputData#getRequestToken - * () - */ - @Override - public TRequestToken getRequestToken() { - - return requestToken; - } - +public class AnonymousManageFileTransferRequestFilesInputData + extends AnonymousManageFileTransferFilesInputData + implements ManageFileTransferRequestFilesInputData { + + private final TRequestToken requestToken; + + public AnonymousManageFileTransferRequestFilesInputData( + TRequestToken requestToken, ArrayOfSURLs arrayOfSURLs) throws IllegalArgumentException { + + super(arrayOfSURLs); + if (requestToken == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: requestToken=" + requestToken); + } + this.requestToken = requestToken; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.ReleaseFilesInputData#getRequestToken + * () + */ + @Override + public TRequestToken getRequestToken() { + + return requestToken; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPrepareToPutInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPrepareToPutInputData.java index ecb50b27..e1133478 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPrepareToPutInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPrepareToPutInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -13,95 +12,92 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TSizeInBytes; -/** - * @author Michele Dibenedetto - * - */ -public class AnonymousPrepareToPutInputData extends - AnonymousFileTransferInputData implements PrepareToPutInputData { - - private TOverwriteMode overwriteMode = OverwriteModeConverter.getInstance() - .toSTORM(Configuration.getInstance().getDefaultOverwriteMode()); - private TSizeInBytes fileSize = TSizeInBytes.makeEmpty(); - private TLifeTimeInSeconds desiredFileLifetime; - - /** - * @param user - * @param surl - * @param transferProtocols - * @throws IllegalArgumentException - * @throws IllegalStateException - */ - public AnonymousPrepareToPutInputData(TSURL surl, TURLPrefix transferProtocols) - throws IllegalArgumentException, IllegalStateException { - - super(surl, transferProtocols); - this.desiredFileLifetime = TLifeTimeInSeconds.make(Configuration - .getInstance().getFileLifetimeDefault(), TimeUnit.SECONDS); - - } - - public AnonymousPrepareToPutInputData(TSURL surl, - TURLPrefix transferProtocols, TLifeTimeInSeconds desiredFileLifetime) - throws IllegalArgumentException, IllegalStateException { - - this(surl, transferProtocols); - this.desiredFileLifetime = desiredFileLifetime; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData# - * getOverwriteMode() - */ - @Override - public TOverwriteMode getOverwriteMode() { - - return overwriteMode; - } - - @Override - public void setOverwriteMode(TOverwriteMode overwriteMode) { - - this.overwriteMode = overwriteMode; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData#getFileSize - * () - */ - @Override - public TSizeInBytes getFileSize() { - - return fileSize; - } - - @Override - public void setFileSize(TSizeInBytes fileSize) { - - this.fileSize = fileSize; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData# - * getDesiredFileLifetime() - */ - @Override - public TLifeTimeInSeconds getDesiredFileLifetime() { - - return desiredFileLifetime; - } - - @Override - public void setDesiredFileLifetime(TLifeTimeInSeconds desiredFileLifetime) { - - this.desiredFileLifetime = desiredFileLifetime; - } - +/** @author Michele Dibenedetto */ +public class AnonymousPrepareToPutInputData extends AnonymousFileTransferInputData + implements PrepareToPutInputData { + + private TOverwriteMode overwriteMode = + OverwriteModeConverter.getInstance() + .toSTORM(Configuration.getInstance().getDefaultOverwriteMode()); + private TSizeInBytes fileSize = TSizeInBytes.makeEmpty(); + private TLifeTimeInSeconds desiredFileLifetime; + + /** + * @param user + * @param surl + * @param transferProtocols + * @throws IllegalArgumentException + * @throws IllegalStateException + */ + public AnonymousPrepareToPutInputData(TSURL surl, TURLPrefix transferProtocols) + throws IllegalArgumentException, IllegalStateException { + + super(surl, transferProtocols); + this.desiredFileLifetime = + TLifeTimeInSeconds.make( + Configuration.getInstance().getFileLifetimeDefault(), TimeUnit.SECONDS); + } + + public AnonymousPrepareToPutInputData( + TSURL surl, TURLPrefix transferProtocols, TLifeTimeInSeconds desiredFileLifetime) + throws IllegalArgumentException, IllegalStateException { + + this(surl, transferProtocols); + this.desiredFileLifetime = desiredFileLifetime; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData# + * getOverwriteMode() + */ + @Override + public TOverwriteMode getOverwriteMode() { + + return overwriteMode; + } + + @Override + public void setOverwriteMode(TOverwriteMode overwriteMode) { + + this.overwriteMode = overwriteMode; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData#getFileSize + * () + */ + @Override + public TSizeInBytes getFileSize() { + + return fileSize; + } + + @Override + public void setFileSize(TSizeInBytes fileSize) { + + this.fileSize = fileSize; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData# + * getDesiredFileLifetime() + */ + @Override + public TLifeTimeInSeconds getDesiredFileLifetime() { + + return desiredFileLifetime; + } + + @Override + public void setDesiredFileLifetime(TLifeTimeInSeconds desiredFileLifetime) { + + this.desiredFileLifetime = desiredFileLifetime; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPutDoneInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPutDoneInputData.java index 6dcb8bb6..be66fe1a 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPutDoneInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousPutDoneInputData.java @@ -1,19 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; /* - * + * * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the @@ -25,48 +24,49 @@ import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousPutDoneInputData extends AbstractInputData implements - ManageFileTransferRequestFilesInputData { +public class AnonymousPutDoneInputData extends AbstractInputData + implements ManageFileTransferRequestFilesInputData { - private final TRequestToken reqToken; - private final ArrayOfSURLs arrayOfSURLs; + private final TRequestToken reqToken; + private final ArrayOfSURLs arrayOfSURLs; - public AnonymousPutDoneInputData(TRequestToken reqToken, - ArrayOfSURLs surlArray) throws IllegalArgumentException { + public AnonymousPutDoneInputData(TRequestToken reqToken, ArrayOfSURLs surlArray) + throws IllegalArgumentException { - if (surlArray == null || reqToken == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: reqToken=" + reqToken - + " surlArray=" + surlArray); - } - this.reqToken = reqToken; - this.arrayOfSURLs = surlArray; - } + if (surlArray == null || reqToken == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: reqToken=" + + reqToken + + " surlArray=" + + surlArray); + } + this.reqToken = reqToken; + this.arrayOfSURLs = surlArray; + } - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.PutDoneInputData#getRequestToken - * () - */ - @Override - public TRequestToken getRequestToken() { + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.PutDoneInputData#getRequestToken + * () + */ + @Override + public TRequestToken getRequestToken() { - return reqToken; - } + return reqToken; + } - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.datatransfer.PutDoneInputData#getArrayOfSURLs - * () - */ - @Override - public ArrayOfSURLs getArrayOfSURLs() { - - return arrayOfSURLs; - } + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.datatransfer.PutDoneInputData#getArrayOfSURLs + * () + */ + @Override + public ArrayOfSURLs getArrayOfSURLs() { + return arrayOfSURLs; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousReleaseRequestInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousReleaseRequestInputData.java index 4a70af96..dd0fdbfd 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousReleaseRequestInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/AnonymousReleaseRequestInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -8,24 +7,22 @@ import it.grid.storm.synchcall.data.AbstractInputData; public class AnonymousReleaseRequestInputData extends AbstractInputData - implements ManageFileTransferRequestInputData { + implements ManageFileTransferRequestInputData { - private final TRequestToken requestToken; + private final TRequestToken requestToken; - public AnonymousReleaseRequestInputData(TRequestToken requestToken) { + public AnonymousReleaseRequestInputData(TRequestToken requestToken) { - if (requestToken == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: requestToken=" - + requestToken); - } - this.requestToken = requestToken; - } + if (requestToken == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: requestToken=" + requestToken); + } + this.requestToken = requestToken; + } - @Override - public TRequestToken getRequestToken() { - - return requestToken; - } + @Override + public TRequestToken getRequestToken() { + return requestToken; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeInputData.java index 4a7a50c2..39fd9004 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -11,24 +10,15 @@ public interface ExtendFileLifeTimeInputData extends InputData { - /** - * @return the reqToken - */ - public TRequestToken getRequestToken(); + /** @return the reqToken */ + public TRequestToken getRequestToken(); - /** - * @return the arrayOfSURLs - */ - public ArrayOfSURLs getArrayOfSURLs(); + /** @return the arrayOfSURLs */ + public ArrayOfSURLs getArrayOfSURLs(); - /** - * @return the newFileLifetime - */ - public TLifeTimeInSeconds getNewFileLifetime(); - - /** - * @return the newPinLifetime - */ - public TLifeTimeInSeconds getNewPinLifetime(); + /** @return the newFileLifetime */ + public TLifeTimeInSeconds getNewFileLifetime(); + /** @return the newPinLifetime */ + public TLifeTimeInSeconds getNewPinLifetime(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeOutputData.java index cccb48d5..cb9183f2 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ExtendFileLifeTimeOutputData.java @@ -1,10 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** * This class represents the ExtendFileLifeTime Output Data. - * + * * @author Alberto Forti * @author CNAF-INFN Bologna * @date Dec 2006 @@ -18,67 +17,65 @@ public class ExtendFileLifeTimeOutputData implements OutputData { - private TReturnStatus returnStatus = null; - private ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses = null; - - public ExtendFileLifeTimeOutputData() { - - this.returnStatus = null; - this.arrayOfFileStatuses = null; - } - - public ExtendFileLifeTimeOutputData(TReturnStatus retStatus, - ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses) { - - this.returnStatus = retStatus; - this.arrayOfFileStatuses = arrayOfFileStatuses; - } - - /** - * Returns the returnStatus field. - * - * @return TReturnStatus - */ - public TReturnStatus getReturnStatus() { - - return returnStatus; - } - - /** - * Set the returnStatus field. - * - * @param returnStatus - * TReturnStatus - */ - public void setReturnStatus(TReturnStatus returnStatus) { - - this.returnStatus = returnStatus; - } - - /** - * Returns the arrayOfFileStatuses field. - * - * @return ArrayOfTSURLLifetimeReturnStatus - */ - public ArrayOfTSURLLifetimeReturnStatus getArrayOfFileStatuses() { - - return arrayOfFileStatuses; - } - - /** - * Set the arrayOfFileStatuses field. - * - * @param arrayOfFileStatuses - */ - public void setArrayOfFileStatuses( - ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses) { - - this.arrayOfFileStatuses = arrayOfFileStatuses; - } - - public boolean isSuccess() { - - // TODO Auto-generated method stub - return true; - } + private TReturnStatus returnStatus = null; + private ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses = null; + + public ExtendFileLifeTimeOutputData() { + + this.returnStatus = null; + this.arrayOfFileStatuses = null; + } + + public ExtendFileLifeTimeOutputData( + TReturnStatus retStatus, ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses) { + + this.returnStatus = retStatus; + this.arrayOfFileStatuses = arrayOfFileStatuses; + } + + /** + * Returns the returnStatus field. + * + * @return TReturnStatus + */ + public TReturnStatus getReturnStatus() { + + return returnStatus; + } + + /** + * Set the returnStatus field. + * + * @param returnStatus TReturnStatus + */ + public void setReturnStatus(TReturnStatus returnStatus) { + + this.returnStatus = returnStatus; + } + + /** + * Returns the arrayOfFileStatuses field. + * + * @return ArrayOfTSURLLifetimeReturnStatus + */ + public ArrayOfTSURLLifetimeReturnStatus getArrayOfFileStatuses() { + + return arrayOfFileStatuses; + } + + /** + * Set the arrayOfFileStatuses field. + * + * @param arrayOfFileStatuses + */ + public void setArrayOfFileStatuses(ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses) { + + this.arrayOfFileStatuses = arrayOfFileStatuses; + } + + public boolean isSuccess() { + + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferInputData.java index 8a605b7d..a9077e35 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -12,34 +11,21 @@ public interface FileTransferInputData extends InputData { - /** - * @return the surl - */ - public TSURL getSurl(); + /** @return the surl */ + public TSURL getSurl(); - /** - * @return the transferProtocols - */ - public TURLPrefix getTransferProtocols(); + /** @return the transferProtocols */ + public TURLPrefix getTransferProtocols(); - /** - * @return the targetSpaceToken - */ - public TSpaceToken getTargetSpaceToken(); + /** @return the targetSpaceToken */ + public TSpaceToken getTargetSpaceToken(); - /** - * @param targetSpaceToken - */ - public void setTargetSpaceToken(TSpaceToken targetSpaceToken); + /** @param targetSpaceToken */ + public void setTargetSpaceToken(TSpaceToken targetSpaceToken); - /** - * @return - */ - public TLifeTimeInSeconds getDesiredPinLifetime(); - - /** - * @param desiredPinLifetime - */ - public void setDesiredPinLifetime(TLifeTimeInSeconds desiredPinLifetime); + /** @return */ + public TLifeTimeInSeconds getDesiredPinLifetime(); + /** @param desiredPinLifetime */ + public void setDesiredPinLifetime(TLifeTimeInSeconds desiredPinLifetime); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferOutputData.java index 274b3aed..36444c08 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/FileTransferOutputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -10,146 +9,144 @@ import it.grid.storm.srm.types.TTURL; import it.grid.storm.synchcall.data.OutputData; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class FileTransferOutputData implements OutputData { - private final TSURL surl; - private final TTURL turl; - private final TReturnStatus status; - private final TRequestToken requestToken; - - public FileTransferOutputData(TSURL surl, TTURL turl, TReturnStatus status, - TRequestToken requestToken) throws IllegalArgumentException { - - if (surl == null || turl == null || status == null || requestToken == null) { - throw new IllegalArgumentException( - "Unable to create FileTransferOutputData. Received null arguments: " - + "surl = " + surl + " , turl = " + turl + " , status = " + status); - } - this.surl = surl; - this.turl = turl; - this.status = status; - this.requestToken = requestToken; - } - - @Override - public boolean isSuccess() { - - return status.isSRM_SUCCESS(); - } - - public TSURL getSurl() { - - return this.surl; - - } - - public TTURL getTurl() { - - return this.turl; - - } - - public TReturnStatus getStatus() { - - return this.status; - - } - - public TRequestToken getRequestToken() { - - return this.requestToken; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - - StringBuilder builder = new StringBuilder(); - builder.append("FileTransferOutputData [surl="); - builder.append(surl); - builder.append(", turl="); - builder.append(turl); - builder.append(", status="); - builder.append(status); - builder.append(", requestToken="); - builder.append(requestToken); - builder.append("]"); - return builder.toString(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - - final int prime = 31; - int result = 1; - result = prime * result - + ((requestToken == null) ? 0 : requestToken.hashCode()); - result = prime * result + ((status == null) ? 0 : status.hashCode()); - result = prime * result + ((surl == null) ? 0 : surl.hashCode()); - result = prime * result + ((turl == null) ? 0 : turl.hashCode()); - return result; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - FileTransferOutputData other = (FileTransferOutputData) obj; - if (requestToken == null) { - if (other.requestToken != null) { - return false; - } - } else if (!requestToken.equals(other.requestToken)) { - return false; - } - if (status == null) { - if (other.status != null) { - return false; - } - } else if (!status.equals(other.status)) { - return false; - } - if (surl == null) { - if (other.surl != null) { - return false; - } - } else if (!surl.equals(other.surl)) { - return false; - } - if (turl == null) { - if (other.turl != null) { - return false; - } - } else if (!turl.equals(other.turl)) { - return false; - } - return true; - } - + private final TSURL surl; + private final TTURL turl; + private final TReturnStatus status; + private final TRequestToken requestToken; + + public FileTransferOutputData( + TSURL surl, TTURL turl, TReturnStatus status, TRequestToken requestToken) + throws IllegalArgumentException { + + if (surl == null || turl == null || status == null || requestToken == null) { + throw new IllegalArgumentException( + "Unable to create FileTransferOutputData. Received null arguments: " + + "surl = " + + surl + + " , turl = " + + turl + + " , status = " + + status); + } + this.surl = surl; + this.turl = turl; + this.status = status; + this.requestToken = requestToken; + } + + @Override + public boolean isSuccess() { + + return status.isSRM_SUCCESS(); + } + + public TSURL getSurl() { + + return this.surl; + } + + public TTURL getTurl() { + + return this.turl; + } + + public TReturnStatus getStatus() { + + return this.status; + } + + public TRequestToken getRequestToken() { + + return this.requestToken; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(); + builder.append("FileTransferOutputData [surl="); + builder.append(surl); + builder.append(", turl="); + builder.append(turl); + builder.append(", status="); + builder.append(status); + builder.append(", requestToken="); + builder.append(requestToken); + builder.append("]"); + return builder.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + final int prime = 31; + int result = 1; + result = prime * result + ((requestToken == null) ? 0 : requestToken.hashCode()); + result = prime * result + ((status == null) ? 0 : status.hashCode()); + result = prime * result + ((surl == null) ? 0 : surl.hashCode()); + result = prime * result + ((turl == null) ? 0 : turl.hashCode()); + return result; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + FileTransferOutputData other = (FileTransferOutputData) obj; + if (requestToken == null) { + if (other.requestToken != null) { + return false; + } + } else if (!requestToken.equals(other.requestToken)) { + return false; + } + if (status == null) { + if (other.status != null) { + return false; + } + } else if (!status.equals(other.status)) { + return false; + } + if (surl == null) { + if (other.surl != null) { + return false; + } + } else if (!surl.equals(other.surl)) { + return false; + } + if (turl == null) { + if (other.turl != null) { + return false; + } + } else if (!turl.equals(other.turl)) { + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortFilesInputData.java index 55b1308b..cd65a7e9 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortFilesInputData.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the general Abort Input Data associated with the SRM - * request Abort - * + * This class represents the general Abort Input Data associated with the SRM request Abort + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.griduser.GridUserInterface; @@ -20,32 +17,31 @@ import it.grid.storm.synchcall.data.IdentityInputData; public class IdentityAbortFilesInputData extends AnonymousAbortFilesInputData - implements IdentityInputData { - - private final GridUserInterface auth; + implements IdentityInputData { - public IdentityAbortFilesInputData(GridUserInterface auth, - TRequestToken reqToken, ArrayOfSURLs surlArray) - throws IllegalArgumentException { + private final GridUserInterface auth; - super(reqToken, surlArray); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + public IdentityAbortFilesInputData( + GridUserInterface auth, TRequestToken reqToken, ArrayOfSURLs surlArray) + throws IllegalArgumentException { - @Override - public GridUserInterface getUser() { + super(reqToken, surlArray); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - return this.auth; - } + @Override + public GridUserInterface getUser() { - @Override - public String getPrincipal() { + return this.auth; + } - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortRequestInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortRequestInputData.java index 11ca9a56..6abf68c7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortRequestInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityAbortRequestInputData.java @@ -1,48 +1,45 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the PutDone Input Data associated with the SRM request - * PutDone - * + * This class represents the PutDone Input Data associated with the SRM request PutDone + * * @author Magnoni Luca * @author CNAF -INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityAbortRequestInputData extends - AnonymousAbortRequestInputData implements IdentityInputData { +public class IdentityAbortRequestInputData extends AnonymousAbortRequestInputData + implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityAbortRequestInputData(GridUserInterface auth, - TRequestToken reqToken) throws IllegalArgumentException { + public IdentityAbortRequestInputData(GridUserInterface auth, TRequestToken reqToken) + throws IllegalArgumentException { - super(reqToken); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(reqToken); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityExtendFileLifeTimeInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityExtendFileLifeTimeInputData.java index 241a7709..c179bb1a 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityExtendFileLifeTimeInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityExtendFileLifeTimeInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -11,47 +10,46 @@ import it.grid.storm.synchcall.data.IdentityInputData; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * This class represents the ExtendFileLifeTime Input Data. - * - * Authors: - * + * + *

This class represents the ExtendFileLifeTime Input Data. + * + *

Authors: + * * @author = lucamag luca.magnoniATcnaf.infn.it * @author Alberto Forti - * * @date = Oct 10, 2008 - * */ -public class IdentityExtendFileLifeTimeInputData extends - AnonymousExtendFileLifeTimeInputData implements IdentityInputData { - - private final GridUserInterface auth; - - public IdentityExtendFileLifeTimeInputData(GridUserInterface auth, - TRequestToken reqToken, ArrayOfSURLs surlArray, - TLifeTimeInSeconds newFileLifetime, TLifeTimeInSeconds newPinLifetime) - throws IllegalArgumentException { - - super(reqToken, surlArray, newFileLifetime, newPinLifetime); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } - - @Override - public GridUserInterface getUser() { - - return auth; - } - - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } +public class IdentityExtendFileLifeTimeInputData extends AnonymousExtendFileLifeTimeInputData + implements IdentityInputData { + + private final GridUserInterface auth; + + public IdentityExtendFileLifeTimeInputData( + GridUserInterface auth, + TRequestToken reqToken, + ArrayOfSURLs surlArray, + TLifeTimeInSeconds newFileLifetime, + TLifeTimeInSeconds newPinLifetime) + throws IllegalArgumentException { + + super(reqToken, surlArray, newFileLifetime, newPinLifetime); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } + + @Override + public GridUserInterface getUser() { + + return auth; + } + + @Override + public String getPrincipal() { + + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityFileTransferInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityFileTransferInputData.java index 57975659..c8e521dd 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityFileTransferInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityFileTransferInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -9,44 +8,40 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.IdentityInputData; -/** - * @author Michele Dibenedetto - * - */ -public class IdentityFileTransferInputData extends - AnonymousFileTransferInputData implements IdentityInputData { - - protected final GridUserInterface user; - - /** - * @param user - * @param surl - * @param transferProtocols - * @throws IllegalArgumentException - * @throws IllegalStateException - */ - public IdentityFileTransferInputData(GridUserInterface user, TSURL surl, - TURLPrefix transferProtocols) throws IllegalArgumentException, - IllegalStateException { - - super(surl, transferProtocols); - if (user == null) { - throw new IllegalArgumentException( - "Unable to create the object. Received nul parameters: user = " + user); - } - this.user = user; - } - - @Override - public GridUserInterface getUser() { - - return user; - } - - @Override - public String getPrincipal() { - - return user.getDn(); - } - +/** @author Michele Dibenedetto */ +public class IdentityFileTransferInputData extends AnonymousFileTransferInputData + implements IdentityInputData { + + protected final GridUserInterface user; + + /** + * @param user + * @param surl + * @param transferProtocols + * @throws IllegalArgumentException + * @throws IllegalStateException + */ + public IdentityFileTransferInputData( + GridUserInterface user, TSURL surl, TURLPrefix transferProtocols) + throws IllegalArgumentException, IllegalStateException { + + super(surl, transferProtocols); + if (user == null) { + throw new IllegalArgumentException( + "Unable to create the object. Received nul parameters: user = " + user); + } + this.user = user; + } + + @Override + public GridUserInterface getUser() { + + return user; + } + + @Override + public String getPrincipal() { + + return user.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferFilesInputData.java index d3204ef3..a5e3ca52 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferFilesInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -8,32 +7,31 @@ import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityManageFileTransferFilesInputData extends - AnonymousManageFileTransferFilesInputData implements IdentityInputData { +public class IdentityManageFileTransferFilesInputData + extends AnonymousManageFileTransferFilesInputData implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityManageFileTransferFilesInputData(GridUserInterface auth, - ArrayOfSURLs arrayOfSURLs) throws IllegalArgumentException { + public IdentityManageFileTransferFilesInputData(GridUserInterface auth, ArrayOfSURLs arrayOfSURLs) + throws IllegalArgumentException { - super(arrayOfSURLs); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(arrayOfSURLs); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferRequestFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferRequestFilesInputData.java index 409efe97..22b98780 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferRequestFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityManageFileTransferRequestFilesInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -9,33 +8,32 @@ import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityManageFileTransferRequestFilesInputData extends - AnonymousManageFileTransferRequestFilesInputData implements IdentityInputData { +public class IdentityManageFileTransferRequestFilesInputData + extends AnonymousManageFileTransferRequestFilesInputData implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityManageFileTransferRequestFilesInputData( - GridUserInterface auth, TRequestToken requestToken, - ArrayOfSURLs arrayOfSURLs) throws IllegalArgumentException { + public IdentityManageFileTransferRequestFilesInputData( + GridUserInterface auth, TRequestToken requestToken, ArrayOfSURLs arrayOfSURLs) + throws IllegalArgumentException { - super(requestToken, arrayOfSURLs); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(requestToken, arrayOfSURLs); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPrepareToPutInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPrepareToPutInputData.java index 4bdff7ad..c207a925 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPrepareToPutInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPrepareToPutInputData.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the PutDone Input Data associated with the SRM request - * PutDone - * + * This class represents the PutDone Input Data associated with the SRM request PutDone + * * @author Alberto Forti * @author CNAF -INFN Bologna * @date Aug 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.common.types.TURLPrefix; @@ -19,30 +16,31 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityPrepareToPutInputData extends - AnonymousPrepareToPutInputData implements IdentityInputData { +public class IdentityPrepareToPutInputData extends AnonymousPrepareToPutInputData + implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityPrepareToPutInputData(GridUserInterface auth, TSURL surl, - TURLPrefix transferProtocols) throws IllegalArgumentException { + public IdentityPrepareToPutInputData( + GridUserInterface auth, TSURL surl, TURLPrefix transferProtocols) + throws IllegalArgumentException { - super(surl, transferProtocols); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(surl, transferProtocols); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - public GridUserInterface getUser() { + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPutDoneInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPutDoneInputData.java index d3ef62b4..6baac733 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPutDoneInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityPutDoneInputData.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the PutDone Input Data associated with the SRM request - * PutDone - * + * This class represents the PutDone Input Data associated with the SRM request PutDone + * * @author Alberto Forti * @author CNAF -INFN Bologna * @date Aug 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.datatransfer; import it.grid.storm.griduser.GridUserInterface; @@ -20,31 +17,31 @@ import it.grid.storm.synchcall.data.IdentityInputData; public class IdentityPutDoneInputData extends AnonymousPutDoneInputData - implements IdentityInputData { + implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityPutDoneInputData(GridUserInterface auth, - TRequestToken reqToken, ArrayOfSURLs surlArray) - throws IllegalArgumentException { + public IdentityPutDoneInputData( + GridUserInterface auth, TRequestToken reqToken, ArrayOfSURLs surlArray) + throws IllegalArgumentException { - super(reqToken, surlArray); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(reqToken, surlArray); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityReleaseRequestInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityReleaseRequestInputData.java index fa38a2d6..d1ed2734 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityReleaseRequestInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/IdentityReleaseRequestInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -8,32 +7,31 @@ import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityReleaseRequestInputData extends - AnonymousReleaseRequestInputData implements IdentityInputData { +public class IdentityReleaseRequestInputData extends AnonymousReleaseRequestInputData + implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityReleaseRequestInputData(GridUserInterface auth, - TRequestToken requestToken) throws IllegalArgumentException { + public IdentityReleaseRequestInputData(GridUserInterface auth, TRequestToken requestToken) + throws IllegalArgumentException { - super(requestToken); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(requestToken); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferFilesInputData.java index 6d62f00a..c7413c4a 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferFilesInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -9,9 +8,6 @@ public interface ManageFileTransferFilesInputData extends InputData { - /** - * @return the arrayOfSURLs - */ - public ArrayOfSURLs getArrayOfSURLs(); - + /** @return the arrayOfSURLs */ + public ArrayOfSURLs getArrayOfSURLs(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferOutputData.java index 70717750..8eea004d 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferOutputData.java @@ -1,100 +1,91 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURLReturnStatus; import it.grid.storm.synchcall.data.OutputData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * This class represents the PutDone Output Data associated with the SRM request - * PutDone - * - * Authors: - * + * + *

This class represents the PutDone Output Data associated with the SRM request PutDone + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it * @author Alberto Forti - * * @date = Oct 10, 2008 - * */ - public class ManageFileTransferOutputData implements OutputData { - private static final Logger log = LoggerFactory - .getLogger(ManageFileTransferOutputData.class); - private final TReturnStatus returnStatus; - private final ArrayOfTSURLReturnStatus arrayOfFileStatus; - - public ManageFileTransferOutputData(TReturnStatus retStatus) - throws IllegalArgumentException { - - if (retStatus == null) { - log.error("Unable to create the object, invalid arguments: retStatus=" - + retStatus); - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments"); - } - - this.returnStatus = retStatus; - this.arrayOfFileStatus = new ArrayOfTSURLReturnStatus(); - } - - public ManageFileTransferOutputData(TReturnStatus retStatus, - ArrayOfTSURLReturnStatus arrayOfFileStatus) throws IllegalArgumentException { - - if (retStatus == null || arrayOfFileStatus == null - || arrayOfFileStatus.getArray().isEmpty()) { - log.error("Unable to create the object, invalid arguments: retStatus=" - + retStatus + " arrayOfFileStatus=" + arrayOfFileStatus); - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments"); - } - this.returnStatus = retStatus; - this.arrayOfFileStatus = arrayOfFileStatus; - } - - /** - * Returns the returnStatus field - * - * @return TReturnStatus - */ - public TReturnStatus getReturnStatus() { - - return returnStatus; - } - - /** - * Returns the arrayOfFileStatuses field - * - * @return TSURLReturnStatus - */ - public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { - - return arrayOfFileStatus; - } - - public void addFileStatus(TSURLReturnStatus surlStatus) { - - arrayOfFileStatus.addTSurlReturnStatus(surlStatus); - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.OutputData#isSuccess() - */ - public boolean isSuccess() { - - return returnStatus.isSRM_SUCCESS(); - } + private static final Logger log = LoggerFactory.getLogger(ManageFileTransferOutputData.class); + private final TReturnStatus returnStatus; + private final ArrayOfTSURLReturnStatus arrayOfFileStatus; + + public ManageFileTransferOutputData(TReturnStatus retStatus) throws IllegalArgumentException { + + if (retStatus == null) { + log.error("Unable to create the object, invalid arguments: retStatus=" + retStatus); + throw new IllegalArgumentException("Unable to create the object, invalid arguments"); + } + + this.returnStatus = retStatus; + this.arrayOfFileStatus = new ArrayOfTSURLReturnStatus(); + } + + public ManageFileTransferOutputData( + TReturnStatus retStatus, ArrayOfTSURLReturnStatus arrayOfFileStatus) + throws IllegalArgumentException { + + if (retStatus == null || arrayOfFileStatus == null || arrayOfFileStatus.getArray().isEmpty()) { + log.error( + "Unable to create the object, invalid arguments: retStatus=" + + retStatus + + " arrayOfFileStatus=" + + arrayOfFileStatus); + throw new IllegalArgumentException("Unable to create the object, invalid arguments"); + } + this.returnStatus = retStatus; + this.arrayOfFileStatus = arrayOfFileStatus; + } + + /** + * Returns the returnStatus field + * + * @return TReturnStatus + */ + public TReturnStatus getReturnStatus() { + + return returnStatus; + } + + /** + * Returns the arrayOfFileStatuses field + * + * @return TSURLReturnStatus + */ + public ArrayOfTSURLReturnStatus getArrayOfFileStatuses() { + + return arrayOfFileStatus; + } + + public void addFileStatus(TSURLReturnStatus surlStatus) { + + arrayOfFileStatus.addTSurlReturnStatus(surlStatus); + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.OutputData#isSuccess() + */ + public boolean isSuccess() { + + return returnStatus.isSRM_SUCCESS(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestFilesInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestFilesInputData.java index be75cb49..1b002d06 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestFilesInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestFilesInputData.java @@ -1,16 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.synchcall.data.datatransfer; -/** - * @author Michele Dibenedetto - * - */ -public interface ManageFileTransferRequestFilesInputData extends - ManageFileTransferFilesInputData, ManageFileTransferRequestInputData { -} +/** @author Michele Dibenedetto */ +public interface ManageFileTransferRequestFilesInputData + extends ManageFileTransferFilesInputData, ManageFileTransferRequestInputData {} diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestInputData.java index be6b3fca..df287ebc 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/ManageFileTransferRequestInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -9,8 +8,6 @@ public interface ManageFileTransferRequestInputData extends InputData { - /** - * @return the requestToken - */ - public TRequestToken getRequestToken(); + /** @return the requestToken */ + public TRequestToken getRequestToken(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToGetOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToGetOutputData.java index f196d971..6a376534 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToGetOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToGetOutputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -12,48 +11,49 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; -/** - * @author Michele Dibenedetto - */ +/** @author Michele Dibenedetto */ public class PrepareToGetOutputData extends FileTransferOutputData { - private final TSizeInBytes fileSize; - private final TLifeTimeInSeconds remainingPinTime; - - public PrepareToGetOutputData(TSURL surl, TTURL turl, TReturnStatus status, - TRequestToken requestToken, TSizeInBytes fileSize, - TLifeTimeInSeconds remainingPinTime) throws IllegalArgumentException { - - super(surl, turl, status, requestToken); - if (fileSize == null || remainingPinTime == null) { - throw new IllegalArgumentException( - "Unable to create FileTransferOutputData. Received null arguments: " - + "fileSize = " + fileSize + " , remainingPinTime = " - + remainingPinTime); - } - this.fileSize = fileSize; - this.remainingPinTime = remainingPinTime; - } - - /** - * @return the fileSize - */ - public TSizeInBytes getFileSize() { - - return fileSize; - } - - /** - * @return the remainingPinTime - */ - public TLifeTimeInSeconds getRemainingPinTime() { - - return remainingPinTime; - } - - @Override - public boolean isSuccess() { - - return this.getStatus().getStatusCode().equals(TStatusCode.SRM_FILE_PINNED); - } + private final TSizeInBytes fileSize; + private final TLifeTimeInSeconds remainingPinTime; + + public PrepareToGetOutputData( + TSURL surl, + TTURL turl, + TReturnStatus status, + TRequestToken requestToken, + TSizeInBytes fileSize, + TLifeTimeInSeconds remainingPinTime) + throws IllegalArgumentException { + + super(surl, turl, status, requestToken); + if (fileSize == null || remainingPinTime == null) { + throw new IllegalArgumentException( + "Unable to create FileTransferOutputData. Received null arguments: " + + "fileSize = " + + fileSize + + " , remainingPinTime = " + + remainingPinTime); + } + this.fileSize = fileSize; + this.remainingPinTime = remainingPinTime; + } + + /** @return the fileSize */ + public TSizeInBytes getFileSize() { + + return fileSize; + } + + /** @return the remainingPinTime */ + public TLifeTimeInSeconds getRemainingPinTime() { + + return remainingPinTime; + } + + @Override + public boolean isSuccess() { + + return this.getStatus().getStatusCode().equals(TStatusCode.SRM_FILE_PINNED); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutInputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutInputData.java index faa1bf6e..b53aa658 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -10,34 +9,21 @@ public interface PrepareToPutInputData extends FileTransferInputData { - /** - * @return the overwriteMode - */ - public TOverwriteMode getOverwriteMode(); + /** @return the overwriteMode */ + public TOverwriteMode getOverwriteMode(); - /** - * @return the fileSize - */ - public TSizeInBytes getFileSize(); + /** @return the fileSize */ + public TSizeInBytes getFileSize(); - /** - * @return - */ - public TLifeTimeInSeconds getDesiredFileLifetime(); + /** @return */ + public TLifeTimeInSeconds getDesiredFileLifetime(); - /** - * @param desiredFileLifetime - */ - public void setDesiredFileLifetime(TLifeTimeInSeconds desiredFileLifetime); + /** @param desiredFileLifetime */ + public void setDesiredFileLifetime(TLifeTimeInSeconds desiredFileLifetime); - /** - * @param fileSize - */ - public void setFileSize(TSizeInBytes fileSize); - - /** - * @param overwriteMode - */ - public void setOverwriteMode(TOverwriteMode overwriteMode); + /** @param fileSize */ + public void setFileSize(TSizeInBytes fileSize); + /** @param overwriteMode */ + public void setOverwriteMode(TOverwriteMode overwriteMode); } diff --git a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutOutputData.java b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutOutputData.java index bb2b0c65..8c9fff8f 100644 --- a/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/datatransfer/PrepareToPutOutputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.datatransfer; @@ -10,20 +9,19 @@ import it.grid.storm.srm.types.TStatusCode; import it.grid.storm.srm.types.TTURL; -/** - * @author Enrico Vianello - */ +/** @author Enrico Vianello */ public class PrepareToPutOutputData extends FileTransferOutputData { - public PrepareToPutOutputData(TSURL surl, TTURL turl, TReturnStatus status, - TRequestToken requestToken) throws IllegalArgumentException { + public PrepareToPutOutputData( + TSURL surl, TTURL turl, TReturnStatus status, TRequestToken requestToken) + throws IllegalArgumentException { - super(surl, turl, status, requestToken); - } + super(surl, turl, status, requestToken); + } - @Override - public boolean isSuccess() { + @Override + public boolean isSuccess() { - return this.getStatus().getStatusCode().equals(TStatusCode.SRM_SPACE_AVAILABLE); - } + return this.getStatus().getStatusCode().equals(TStatusCode.SRM_SPACE_AVAILABLE); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousLSInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousLSInputData.java index 56e9a333..c725138e 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousLSInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousLSInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -8,117 +7,119 @@ import it.grid.storm.srm.types.TFileStorageType; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousLSInputData extends AbstractInputData implements - LSInputData { - - private final ArrayOfSURLs surlArray; - private final Boolean fullDetailedList; - private final Boolean allLevelRecursive; - private final Integer numOfLevels; - private final Integer offset; - private final Integer count; - private final boolean storageTypeSpecified; - - public AnonymousLSInputData(ArrayOfSURLs surlArray, - TFileStorageType fileStorageType, Boolean fullDetList, Boolean allLev, - Integer numOfLev, Integer offset, Integer count) - throws IllegalArgumentException { - - if (surlArray == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: surlArray=" - + surlArray); - } - this.surlArray = surlArray; - this.storageTypeSpecified = (fileStorageType != null && !fileStorageType - .equals(TFileStorageType.EMPTY)); - this.fullDetailedList = fullDetList; - this.allLevelRecursive = allLev; - this.numOfLevels = numOfLev; - this.offset = offset; - this.count = count; - - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.LSInputData#getSurlArray() - */ - - @Override - public ArrayOfSURLs getSurlArray() { - - return surlArray; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.directory.LSInputData#getStorageTypeSpecified - * () - */ - @Override - public boolean getStorageTypeSpecified() { - - return this.storageTypeSpecified; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.directory.LSInputData#getFullDetailedList() - */ - @Override - public Boolean getFullDetailedList() { - - return this.fullDetailedList; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.directory.LSInputData#getAllLevelRecursive() - */ - @Override - public Boolean getAllLevelRecursive() { - - return this.allLevelRecursive; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.LSInputData#getNumOfLevels() - */ - @Override - public Integer getNumOfLevels() { - - return this.numOfLevels; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.LSInputData#getOffset() - */ - @Override - public Integer getOffset() { - - return this.offset; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.LSInputData#getCount() - */ - @Override - public Integer getCount() { - - return this.count; - } +public class AnonymousLSInputData extends AbstractInputData implements LSInputData { + + private final ArrayOfSURLs surlArray; + private final Boolean fullDetailedList; + private final Boolean allLevelRecursive; + private final Integer numOfLevels; + private final Integer offset; + private final Integer count; + private final boolean storageTypeSpecified; + + public AnonymousLSInputData( + ArrayOfSURLs surlArray, + TFileStorageType fileStorageType, + Boolean fullDetList, + Boolean allLev, + Integer numOfLev, + Integer offset, + Integer count) + throws IllegalArgumentException { + + if (surlArray == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: surlArray=" + surlArray); + } + this.surlArray = surlArray; + this.storageTypeSpecified = + (fileStorageType != null && !fileStorageType.equals(TFileStorageType.EMPTY)); + this.fullDetailedList = fullDetList; + this.allLevelRecursive = allLev; + this.numOfLevels = numOfLev; + this.offset = offset; + this.count = count; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.LSInputData#getSurlArray() + */ + + @Override + public ArrayOfSURLs getSurlArray() { + + return surlArray; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.directory.LSInputData#getStorageTypeSpecified + * () + */ + @Override + public boolean getStorageTypeSpecified() { + + return this.storageTypeSpecified; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.directory.LSInputData#getFullDetailedList() + */ + @Override + public Boolean getFullDetailedList() { + + return this.fullDetailedList; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.directory.LSInputData#getAllLevelRecursive() + */ + @Override + public Boolean getAllLevelRecursive() { + + return this.allLevelRecursive; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.LSInputData#getNumOfLevels() + */ + @Override + public Integer getNumOfLevels() { + + return this.numOfLevels; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.LSInputData#getOffset() + */ + @Override + public Integer getOffset() { + + return this.offset; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.LSInputData#getCount() + */ + @Override + public Integer getCount() { + + return this.count; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMkdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMkdirInputData.java index 4bd95631..039f3c1e 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMkdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMkdirInputData.java @@ -1,30 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousMkdirInputData extends AbstractInputData implements - MkdirInputData { +public class AnonymousMkdirInputData extends AbstractInputData implements MkdirInputData { - private final TSURL surl; + private final TSURL surl; - public AnonymousMkdirInputData(TSURL surl) { + public AnonymousMkdirInputData(TSURL surl) { - if (surl == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: surl=" + surl); - } - this.surl = surl; - } + if (surl == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: surl=" + surl); + } + this.surl = surl; + } - @Override - public TSURL getSurl() { - - return surl; - } + @Override + public TSURL getSurl() { + return surl; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMvInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMvInputData.java index d77422e1..a5a6f840 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMvInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousMvInputData.java @@ -1,50 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousMvInputData extends AbstractInputData implements - MvInputData { - - private final TSURL fromSURL; - private final TSURL toSURL; - - public AnonymousMvInputData(TSURL fromSURL, TSURL toSURL) - throws IllegalArgumentException { - - if (fromSURL == null || toSURL == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: fromSURL=" + fromSURL - + " toSURL=" + toSURL); - } - this.fromSURL = fromSURL; - this.toSURL = toSURL; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.MvInputData#getToSURL() - */ - @Override - public TSURL getToSURL() { - - return toSURL; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.MvInputData#getFromSURL() - */ - @Override - public TSURL getFromSURL() { - - return fromSURL; - } - +public class AnonymousMvInputData extends AbstractInputData implements MvInputData { + + private final TSURL fromSURL; + private final TSURL toSURL; + + public AnonymousMvInputData(TSURL fromSURL, TSURL toSURL) throws IllegalArgumentException { + + if (fromSURL == null || toSURL == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: fromSURL=" + + fromSURL + + " toSURL=" + + toSURL); + } + this.fromSURL = fromSURL; + this.toSURL = toSURL; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.MvInputData#getToSURL() + */ + @Override + public TSURL getToSURL() { + + return toSURL; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.MvInputData#getFromSURL() + */ + @Override + public TSURL getFromSURL() { + + return fromSURL; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmInputData.java index bf6ae663..4e5b4936 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmInputData.java @@ -1,35 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousRmInputData extends AbstractInputData implements - RmInputData { +public class AnonymousRmInputData extends AbstractInputData implements RmInputData { - private final ArrayOfSURLs surlArray; + private final ArrayOfSURLs surlArray; - public AnonymousRmInputData(ArrayOfSURLs array) - throws IllegalArgumentException { + public AnonymousRmInputData(ArrayOfSURLs array) throws IllegalArgumentException { - if (array == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: array=" + array); - } - surlArray = array; - } + if (array == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: array=" + array); + } + surlArray = array; + } - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.RmInputData#getSurlArray() - */ - @Override - public ArrayOfSURLs getSurlArray() { + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.RmInputData#getSurlArray() + */ + @Override + public ArrayOfSURLs getSurlArray() { - return surlArray; - } + return surlArray; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmdirInputData.java index d8311884..5334af13 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/AnonymousRmdirInputData.java @@ -1,50 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousRmdirInputData extends AbstractInputData implements - RmdirInputData { - - private final TSURL surl; - private final Boolean recursive; - - public AnonymousRmdirInputData(TSURL surl, Boolean recursive) - throws IllegalArgumentException { - - if (surl == null || recursive == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: surl=" + surl - + " recursive=" + recursive); - } - this.surl = surl; - this.recursive = recursive; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.RmdirInputData#getSurl() - */ - @Override - public TSURL getSurl() { - - return surl; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.directory.RmdirInputData#getRecursive() - */ - @Override - public Boolean getRecursive() { - - return recursive; - } - +public class AnonymousRmdirInputData extends AbstractInputData implements RmdirInputData { + + private final TSURL surl; + private final Boolean recursive; + + public AnonymousRmdirInputData(TSURL surl, Boolean recursive) throws IllegalArgumentException { + + if (surl == null || recursive == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: surl=" + + surl + + " recursive=" + + recursive); + } + this.surl = surl; + this.recursive = recursive; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.RmdirInputData#getSurl() + */ + @Override + public TSURL getSurl() { + + return surl; + } + + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.directory.RmdirInputData#getRecursive() + */ + @Override + public Boolean getRecursive() { + + return recursive; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityLSInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityLSInputData.java index 633b07c0..57c0172e 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityLSInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityLSInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -10,48 +9,48 @@ import it.grid.storm.synchcall.data.IdentityInputData; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the LS Input Data associated with the SRM request, that - * is it contains info about: ...,ecc. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the LS Input Data associated with the SRM request, that is it contains + * info about: ...,ecc. + * * @author lucamag * @date May 28, 2008 - * */ - -public class IdentityLSInputData extends AnonymousLSInputData implements - IdentityInputData { - - private final GridUserInterface auth; - - public IdentityLSInputData(GridUserInterface auth, ArrayOfSURLs surlArray, - TFileStorageType fileStorageType, Boolean fullDetList, Boolean allLev, - Integer numOfLev, Integer offset, Integer count) - throws IllegalArgumentException { - - super(surlArray, fileStorageType, fullDetList, allLev, numOfLev, offset, - count); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } - - /** - * Get User - */ - @Override - public GridUserInterface getUser() { - - return this.auth; - } - - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } +public class IdentityLSInputData extends AnonymousLSInputData implements IdentityInputData { + + private final GridUserInterface auth; + + public IdentityLSInputData( + GridUserInterface auth, + ArrayOfSURLs surlArray, + TFileStorageType fileStorageType, + Boolean fullDetList, + Boolean allLev, + Integer numOfLev, + Integer offset, + Integer count) + throws IllegalArgumentException { + + super(surlArray, fileStorageType, fullDetList, allLev, numOfLev, offset, count); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } + + /** Get User */ + @Override + public GridUserInterface getUser() { + + return this.auth; + } + + @Override + public String getPrincipal() { + + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMkdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMkdirInputData.java index 7cf718ff..2f8d10b0 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMkdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMkdirInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -10,42 +9,39 @@ /** * This class is part of the StoRM project. - * - * This class represents the Mkdir Input Data associated with the SRM request, - * that is it contains info about: ...,ecc. - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * + * + *

This class represents the Mkdir Input Data associated with the SRM request, that is it + * contains info about: ...,ecc. + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @date May 27, 2008 - * */ +public class IdentityMkdirInputData extends AnonymousMkdirInputData implements IdentityInputData { -public class IdentityMkdirInputData extends AnonymousMkdirInputData implements - IdentityInputData { + private final GridUserInterface auth; - private final GridUserInterface auth; + public IdentityMkdirInputData(GridUserInterface auth, TSURL surl) + throws IllegalArgumentException { - public IdentityMkdirInputData(GridUserInterface auth, TSURL surl) - throws IllegalArgumentException { + super(surl); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - super(surl); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + @Override + public GridUserInterface getUser() { - @Override - public GridUserInterface getUser() { + return this.auth; + } - return this.auth; - } + @Override + public String getPrincipal() { - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMvInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMvInputData.java index c30d64de..c1c7c2d0 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMvInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityMvInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,39 +8,37 @@ import it.grid.storm.synchcall.data.IdentityInputData; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project This class represents the Mv Input - * Data associated with the SRM request, that is it contains info about: - * ...,ecc. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project This class represents the Mv Input Data associated with the SRM request, that + * is it contains info about: ...,ecc. + * * @author lucamag * @date May 28, 2008 */ -public class IdentityMvInputData extends AnonymousMvInputData implements - IdentityInputData { +public class IdentityMvInputData extends AnonymousMvInputData implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityMvInputData(GridUserInterface auth, TSURL fromSURL, - TSURL toSURL) throws IllegalArgumentException { + public IdentityMvInputData(GridUserInterface auth, TSURL fromSURL, TSURL toSURL) + throws IllegalArgumentException { - super(fromSURL, toSURL); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(fromSURL, toSURL); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmInputData.java index c371996f..5d87c4c0 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmInputData.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Rm Input Data associated with the SRM request, that - * is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Rm Input Data associated with the SRM request, that is it contains info + * about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -16,32 +15,30 @@ import it.grid.storm.srm.types.ArrayOfSURLs; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityRmInputData extends AnonymousRmInputData implements - IdentityInputData { +public class IdentityRmInputData extends AnonymousRmInputData implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityRmInputData(GridUserInterface auth, ArrayOfSURLs surlArray) - throws IllegalArgumentException { + public IdentityRmInputData(GridUserInterface auth, ArrayOfSURLs surlArray) + throws IllegalArgumentException { - super(surlArray); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(surlArray); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + @Override + public String getPrincipal() { + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmdirInputData.java index 0a86f035..4a9395e7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/IdentityRmdirInputData.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Rmdir Input Data associated with the SRM request, - * that is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Rmdir Input Data associated with the SRM request, that is it contains + * info about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -16,31 +15,30 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.synchcall.data.IdentityInputData; -public class IdentityRmdirInputData extends AnonymousRmdirInputData implements - IdentityInputData { +public class IdentityRmdirInputData extends AnonymousRmdirInputData implements IdentityInputData { - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityRmdirInputData(GridUserInterface auth, TSURL surl, - Boolean recursive) throws IllegalArgumentException { + public IdentityRmdirInputData(GridUserInterface auth, TSURL surl, Boolean recursive) + throws IllegalArgumentException { - super(surl, recursive); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(surl, recursive); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return this.auth; - } + return this.auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/LSInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/LSInputData.java index d57ce6ff..14556e89 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/LSInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/LSInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,42 +8,28 @@ public interface LSInputData extends InputData { - /** - * Method that returns ArrayOfSurls specify in SRM request. - */ + /** Method that returns ArrayOfSurls specify in SRM request. */ + public ArrayOfSURLs getSurlArray(); - public ArrayOfSURLs getSurlArray(); + /** + * Get method for TFileStorageType + * + * @return + */ + public boolean getStorageTypeSpecified(); - /** - * Get method for TFileStorageType - * - * @return - */ - public boolean getStorageTypeSpecified(); + /** Get Full Detailed List */ + public Boolean getFullDetailedList(); - /** - * Get Full Detailed List - */ - public Boolean getFullDetailedList(); + /** get AllLevelRecurisve */ + public Boolean getAllLevelRecursive(); - /** - * get AllLevelRecurisve - */ - public Boolean getAllLevelRecursive(); + /** get NumOfLevels */ + public Integer getNumOfLevels(); - /** - * get NumOfLevels - */ - public Integer getNumOfLevels(); - - /** - * Get offset - */ - public Integer getOffset(); - - /** - * Get count - */ - public Integer getCount(); + /** Get offset */ + public Integer getOffset(); + /** Get count */ + public Integer getCount(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/LSOutputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/LSOutputData.java index 67dcf8a2..e271de3b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/LSOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/LSOutputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -11,94 +10,76 @@ import it.grid.storm.synchcall.data.exception.InvalidLSOutputAttributeException; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the LS Output Data associated with the SRM request, - * that is it contains info about: ...,ecc. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the LS Output Data associated with the SRM request, that is it contains + * info about: ...,ecc. + * * @author lucamag * @date May 28, 2008 - * */ - public class LSOutputData implements OutputData { - private TReturnStatus returnStatus = null; - private TRequestToken requestToken = null; - private ArrayOfTMetaDataPathDetail details = null; - - public LSOutputData() { - - } + private TReturnStatus returnStatus = null; + private TRequestToken requestToken = null; + private ArrayOfTMetaDataPathDetail details = null; - public LSOutputData(TReturnStatus retStatus, TRequestToken token, - ArrayOfTMetaDataPathDetail details) - throws InvalidLSOutputAttributeException { + public LSOutputData() {} - boolean ok = (details == null); + public LSOutputData( + TReturnStatus retStatus, TRequestToken token, ArrayOfTMetaDataPathDetail details) + throws InvalidLSOutputAttributeException { - if (!ok) - throw new InvalidLSOutputAttributeException(details); + boolean ok = (details == null); - this.returnStatus = retStatus; - this.requestToken = token; - this.details = details; + if (!ok) throw new InvalidLSOutputAttributeException(details); - } + this.returnStatus = retStatus; + this.requestToken = token; + this.details = details; + } - /** - * Method that get return Status. - */ - public TReturnStatus getStatus() { + /** Method that get return Status. */ + public TReturnStatus getStatus() { - return returnStatus; - } + return returnStatus; + } - /** - * Set ReturnStatus - */ - public void setStatus(TReturnStatus retStat) { + /** Set ReturnStatus */ + public void setStatus(TReturnStatus retStat) { - this.returnStatus = retStat; - } + this.returnStatus = retStat; + } - /** - * Method that get return Status. - */ - public TRequestToken getRequestToken() { + /** Method that get return Status. */ + public TRequestToken getRequestToken() { - return this.requestToken; - } + return this.requestToken; + } - /** - * Set TRequestToken - */ - public void setRequestToken(TRequestToken token) { + /** Set TRequestToken */ + public void setRequestToken(TRequestToken token) { - this.requestToken = token; - } + this.requestToken = token; + } - /** - * Method that return ArrayOfTMetaDataPath. - */ - public ArrayOfTMetaDataPathDetail getDetails() { + /** Method that return ArrayOfTMetaDataPath. */ + public ArrayOfTMetaDataPathDetail getDetails() { - return details; - } + return details; + } - /** - * Set ArrayOfTMetaDataPath - */ - public void setDetails(ArrayOfTMetaDataPathDetail details) { + /** Set ArrayOfTMetaDataPath */ + public void setDetails(ArrayOfTMetaDataPathDetail details) { - this.details = details; - } + this.details = details; + } - // @Override - public boolean isSuccess() { + // @Override + public boolean isSuccess() { - // TODO Auto-generated method stub - return true; - } + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/MkdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/MkdirInputData.java index 5490be35..8c5d721b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/MkdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/MkdirInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,10 +8,6 @@ public interface MkdirInputData extends InputData { - /** - * Method that SURL specified in SRM request. - */ - - public TSURL getSurl(); - + /** Method that SURL specified in SRM request. */ + public TSURL getSurl(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/MkdirOutputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/MkdirOutputData.java index 1cf89e84..a4bfe469 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/MkdirOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/MkdirOutputData.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Rm Output Data associated with the SRM request, - * that is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Rm Output Data associated with the SRM request, that is it contains + * info about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -17,45 +16,35 @@ public class MkdirOutputData implements OutputData { - private TReturnStatus returnStatus = null; + private TReturnStatus returnStatus = null; - public MkdirOutputData() { + public MkdirOutputData() {} - } + public MkdirOutputData(TReturnStatus retStatus) { - public MkdirOutputData(TReturnStatus retStatus) { + this.returnStatus = retStatus; + } - this.returnStatus = retStatus; + /** Method that return Status. */ + public TReturnStatus getStatus() { - } + return returnStatus; + } - /** - * Method that return Status. - */ + /** Set ReturnStatus */ + public void setStatus(TReturnStatus retStat) { - public TReturnStatus getStatus() { + this.returnStatus = retStat; + } - return returnStatus; - } - - /** - * Set ReturnStatus - * - */ - public void setStatus(TReturnStatus retStat) { - - this.returnStatus = retStat; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.OutputData#isSuccess() - */ - public boolean isSuccess() { - - // TODO Auto-generated method stub - return true; - } + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.OutputData#isSuccess() + */ + public boolean isSuccess() { + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/MvInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/MvInputData.java index ff9e5799..90315c00 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/MvInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/MvInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,14 +8,9 @@ public interface MvInputData extends InputData { - /** - * @return the toSURL - */ - public TSURL getToSURL(); - - /** - * @return the fromSURL - */ - public TSURL getFromSURL(); + /** @return the toSURL */ + public TSURL getToSURL(); + /** @return the fromSURL */ + public TSURL getFromSURL(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/MvOutputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/MvOutputData.java index eb51b10d..c3daab1d 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/MvOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/MvOutputData.java @@ -1,64 +1,51 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Mv Output Data associated with the SRM request, - * that is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Mv Output Data associated with the SRM request, that is it contains + * info about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 */ package it.grid.storm.synchcall.data.directory; -import java.util.Vector; - import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.exception.InvalidMvOutputAttributeException; public class MvOutputData implements OutputData { - private TReturnStatus returnStatus = null; - - public MvOutputData() { - - } - - public MvOutputData(TReturnStatus retStatus) - throws InvalidMvOutputAttributeException { + private TReturnStatus returnStatus = null; - boolean ok = (retStatus == null); - if (!ok) { - throw new InvalidMvOutputAttributeException(retStatus); - } - this.returnStatus = retStatus; - } + public MvOutputData() {} - /** - * Method that return Status. - */ + public MvOutputData(TReturnStatus retStatus) throws InvalidMvOutputAttributeException { - public TReturnStatus getStatus() { + boolean ok = (retStatus == null); + if (!ok) { + throw new InvalidMvOutputAttributeException(retStatus); + } + this.returnStatus = retStatus; + } - return returnStatus; - } + /** Method that return Status. */ + public TReturnStatus getStatus() { - /** - * Set ReturnStatus - * - */ - public void setStatus(TReturnStatus retStat) { + return returnStatus; + } - this.returnStatus = retStat; - } + /** Set ReturnStatus */ + public void setStatus(TReturnStatus retStat) { - // @Override - public boolean isSuccess() { + this.returnStatus = retStat; + } - // TODO Auto-generated method stub - return true; - } + // @Override + public boolean isSuccess() { + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/RmInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/RmInputData.java index 0efe27b4..05526aad 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/RmInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/RmInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,9 +8,6 @@ public interface RmInputData extends InputData { - /** - * @return the surlArray - */ - public ArrayOfSURLs getSurlArray(); - + /** @return the surlArray */ + public ArrayOfSURLs getSurlArray(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/RmOutputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/RmOutputData.java index d4c9a013..ddd9e96c 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/RmOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/RmOutputData.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Rm Output Data associated with the SRM request, - * that is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Rm Output Data associated with the SRM request, that is it contains + * info about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -18,62 +17,49 @@ public class RmOutputData implements OutputData { - private TReturnStatus returnStatus = null; - private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; + private TReturnStatus returnStatus = null; + private ArrayOfTSURLReturnStatus arrayOfFileStatus = null; - public RmOutputData(TReturnStatus retStatus, ArrayOfTSURLReturnStatus details) { + public RmOutputData(TReturnStatus retStatus, ArrayOfTSURLReturnStatus details) { - if (retStatus == null) { - throw new IllegalArgumentException("RmOutputData: return status NULL"); - } - this.returnStatus = retStatus; - this.arrayOfFileStatus = details; - } - - public RmOutputData(TReturnStatus retStatus) { + if (retStatus == null) { + throw new IllegalArgumentException("RmOutputData: return status NULL"); + } + this.returnStatus = retStatus; + this.arrayOfFileStatus = details; + } - this(retStatus, null); - } + public RmOutputData(TReturnStatus retStatus) { - /** - * Method that return Status. - */ + this(retStatus, null); + } - public TReturnStatus getStatus() { + /** Method that return Status. */ + public TReturnStatus getStatus() { - return returnStatus; - } + return returnStatus; + } - /** - * Set ReturnStatus - * - */ - public void setStatus(TReturnStatus retStat) { + /** Set ReturnStatus */ + public void setStatus(TReturnStatus retStat) { - this.returnStatus = retStat; - } + this.returnStatus = retStat; + } - /** - * Method that return TSURLReturnStatus[]. - */ + /** Method that return TSURLReturnStatus[]. */ + public ArrayOfTSURLReturnStatus getSurlStatus() { - public ArrayOfTSURLReturnStatus getSurlStatus() { + return arrayOfFileStatus; + } - return arrayOfFileStatus; - } + /** Set TSURLReturnStatus */ + public void setSurlStatus(ArrayOfTSURLReturnStatus details) { - /** - * Set TSURLReturnStatus - * - */ - public void setSurlStatus(ArrayOfTSURLReturnStatus details) { + this.arrayOfFileStatus = details; + } - this.arrayOfFileStatus = details; - } - - public boolean isSuccess() { - - return returnStatus.isSRM_SUCCESS(); - } + public boolean isSuccess() { + return returnStatus.isSRM_SUCCESS(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/RmdirInputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/RmdirInputData.java index ef202026..3d4f81b7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/RmdirInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/RmdirInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.directory; @@ -9,14 +8,9 @@ public interface RmdirInputData extends InputData { - /** - * @return the surl - */ - public TSURL getSurl(); - - /** - * @return the recursive - */ - public Boolean getRecursive(); + /** @return the surl */ + public TSURL getSurl(); + /** @return the recursive */ + public Boolean getRecursive(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/directory/RmdirOutputData.java b/src/main/java/it/grid/storm/synchcall/data/directory/RmdirOutputData.java index fe711496..e30adee3 100644 --- a/src/main/java/it/grid/storm/synchcall/data/directory/RmdirOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/directory/RmdirOutputData.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Rm Output Data associated with the SRM request, - * that is it contains info about: ...,ecc. * @author Magnoni Luca - * + * This class represents the Rm Output Data associated with the SRM request, that is it contains + * info about: ...,ecc. * @author Magnoni Luca + * * @author Cnaf -INFN Bologna * @date * @version 1.0 @@ -17,45 +16,35 @@ public class RmdirOutputData implements OutputData { - private TReturnStatus returnStatus = null; + private TReturnStatus returnStatus = null; - public RmdirOutputData() { + public RmdirOutputData() {} - } + public RmdirOutputData(TReturnStatus retStatus) { - public RmdirOutputData(TReturnStatus retStatus) { + this.returnStatus = retStatus; + } - this.returnStatus = retStatus; + /** Method that return Status. */ + public TReturnStatus getStatus() { - } + return returnStatus; + } - /** - * Method that return Status. - */ + /** Set ReturnStatus */ + public void setStatus(TReturnStatus retStat) { - public TReturnStatus getStatus() { + this.returnStatus = retStat; + } - return returnStatus; - } - - /** - * Set ReturnStatus - * - */ - public void setStatus(TReturnStatus retStat) { - - this.returnStatus = retStat; - } - - /* - * (non-Javadoc) - * - * @see it.grid.storm.synchcall.data.OutputData#isSuccess() - */ - public boolean isSuccess() { - - // TODO Auto-generated method stub - return true; - } + /* + * (non-Javadoc) + * + * @see it.grid.storm.synchcall.data.OutputData#isSuccess() + */ + public boolean isSuccess() { + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/discovery/AnonymousPingInputData.java b/src/main/java/it/grid/storm/synchcall/data/discovery/AnonymousPingInputData.java index 3f4f990c..a33087a5 100644 --- a/src/main/java/it/grid/storm/synchcall/data/discovery/AnonymousPingInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/discovery/AnonymousPingInputData.java @@ -1,21 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.discovery; import it.grid.storm.synchcall.data.AbstractInputData; /* - * + * * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the @@ -23,26 +22,24 @@ * the License. */ -public class AnonymousPingInputData extends AbstractInputData implements - PingInputData { +public class AnonymousPingInputData extends AbstractInputData implements PingInputData { - private final String authorizationID; + private final String authorizationID; - public AnonymousPingInputData(String authorizationID) { + public AnonymousPingInputData(String authorizationID) { - this.authorizationID = authorizationID; - } + this.authorizationID = authorizationID; + } - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.discovery.PingInputData#getAuthorizationID() - */ - @Override - public String getAuthorizationID() { - - return this.authorizationID; - } + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.discovery.PingInputData#getAuthorizationID() + */ + @Override + public String getAuthorizationID() { + return this.authorizationID; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/discovery/IdentityPingInputData.java b/src/main/java/it/grid/storm/synchcall/data/discovery/IdentityPingInputData.java index 585fe764..c643611f 100644 --- a/src/main/java/it/grid/storm/synchcall/data/discovery/IdentityPingInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/discovery/IdentityPingInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.discovery; @@ -8,53 +7,48 @@ import it.grid.storm.synchcall.data.IdentityInputData; /** - * This class is part of the StoRM project. This class represents the Ping Input - * Data - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * - * + * This class is part of the StoRM project. This class represents the Ping Input Data + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @author Alberto Forti * @date May 28, 2008 - * */ +public class IdentityPingInputData extends AnonymousPingInputData implements IdentityInputData { -public class IdentityPingInputData extends AnonymousPingInputData implements - IdentityInputData { + private final GridUserInterface requestor; - private final GridUserInterface requestor; + public IdentityPingInputData(GridUserInterface gridUser, String authorizationID) + throws IllegalArgumentException { - public IdentityPingInputData(GridUserInterface gridUser, - String authorizationID) throws IllegalArgumentException { + super(authorizationID); + if (gridUser == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: gridUser=" + gridUser); + } + this.requestor = gridUser; + } - super(authorizationID); - if (gridUser == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: gridUser=" + gridUser); - } - this.requestor = gridUser; - } + /** + * Get the Requestor + * + * @return GridUserInterface + */ + public GridUserInterface getRequestor() { - /** - * Get the Requestor - * - * @return GridUserInterface - */ - public GridUserInterface getRequestor() { + return this.requestor; + } - return this.requestor; - } + @Override + public String getPrincipal() { - @Override - public String getPrincipal() { + return this.requestor.getDn(); + } - return this.requestor.getDn(); - } + @Override + public GridUserInterface getUser() { - @Override - public GridUserInterface getUser() { - - return requestor; - } + return requestor; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/discovery/PingInputData.java b/src/main/java/it/grid/storm/synchcall/data/discovery/PingInputData.java index da823d4b..a8de2f73 100644 --- a/src/main/java/it/grid/storm/synchcall/data/discovery/PingInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/discovery/PingInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.discovery; @@ -8,11 +7,10 @@ public interface PingInputData extends InputData { - /** - * Get the authorizatioID. - * - * @return String - */ - public String getAuthorizationID(); - + /** + * Get the authorizatioID. + * + * @return String + */ + public String getAuthorizationID(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/discovery/PingOutputData.java b/src/main/java/it/grid/storm/synchcall/data/discovery/PingOutputData.java index ed1eaa74..688390a9 100644 --- a/src/main/java/it/grid/storm/synchcall/data/discovery/PingOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/discovery/PingOutputData.java @@ -1,97 +1,84 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.discovery; -import java.util.Iterator; - import it.grid.storm.srm.types.ArrayOfTExtraInfo; -import it.grid.storm.srm.types.TExtraInfo; import it.grid.storm.synchcall.data.OutputData; /** - * This class is part of the StoRM project. This class represents the Ping - * Output Data - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * + * This class is part of the StoRM project. This class represents the Ping Output Data + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @author Alberto Forti - * * @date May 28, 2008 - * */ - public class PingOutputData implements OutputData { - private String versionInfo = null; - private ArrayOfTExtraInfo extraInfoArray = null; - - public PingOutputData() { - - } + private String versionInfo = null; + private ArrayOfTExtraInfo extraInfoArray = null; - public PingOutputData(String versionInfo, ArrayOfTExtraInfo otherInfo) { + public PingOutputData() {} - this.versionInfo = versionInfo; - this.extraInfoArray = otherInfo; - } + public PingOutputData(String versionInfo, ArrayOfTExtraInfo otherInfo) { - /** - * Set versionInfo. - * - * @param versionInfo - * String - */ - public void setVersionInfo(String versionInfo) { + this.versionInfo = versionInfo; + this.extraInfoArray = otherInfo; + } - this.versionInfo = versionInfo; - } + /** + * Set versionInfo. + * + * @param versionInfo String + */ + public void setVersionInfo(String versionInfo) { - /** - * Get versionInfo. - * - * @return String - */ - public String getVersionInfo() { + this.versionInfo = versionInfo; + } - return this.versionInfo; - } + /** + * Get versionInfo. + * + * @return String + */ + public String getVersionInfo() { - /** - * Set extraInfoArray. - * - * @param extraInfoArray - * TExtraInfo - */ - public void setExtraInfoArray(ArrayOfTExtraInfo otherInfo) { + return this.versionInfo; + } - this.extraInfoArray = otherInfo; - } + /** + * Set extraInfoArray. + * + * @param extraInfoArray TExtraInfo + */ + public void setExtraInfoArray(ArrayOfTExtraInfo otherInfo) { - /** - * Get extraInfoArray. - * - * @return TExtraInfo - */ - public ArrayOfTExtraInfo getExtraInfoArray() { + this.extraInfoArray = otherInfo; + } - return this.extraInfoArray; - } + /** + * Get extraInfoArray. + * + * @return TExtraInfo + */ + public ArrayOfTExtraInfo getExtraInfoArray() { - // TODO - public boolean isSuccess() { + return this.extraInfoArray; + } - // TODO Auto-generated method stub - return true; - } + // TODO + public boolean isSuccess() { - public String toString() { + // TODO Auto-generated method stub + return true; + } - String result = versionInfo; - result += this.extraInfoArray.toString(); - return result; + public String toString() { - } + String result = versionInfo; + result += this.extraInfoArray.toString(); + return result; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesInputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesInputDataAttributeException.java index 226d5124..01e154b8 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesInputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesInputDataAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if AbortFiles input data is not - * well formed. * - * + * This class represents an Exception throws if AbortFiles input data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfSURLs; public class InvalidAbortFilesInputDataAttributeException extends Exception { - private boolean nullSurlInfo = true; + private boolean nullSurlInfo = true; - public InvalidAbortFilesInputDataAttributeException(ArrayOfSURLs surlInfo) { + public InvalidAbortFilesInputDataAttributeException(ArrayOfSURLs surlInfo) { - nullSurlInfo = (surlInfo == null); - } + nullSurlInfo = (surlInfo == null); + } - public String toString() { + public String toString() { - return "nullSurlInfo = " + nullSurlInfo; - } + return "nullSurlInfo = " + nullSurlInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesOutputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesOutputDataAttributeException.java index ca08c523..5d3fbbf0 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesOutputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortFilesOutputDataAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if AbortFiles input data is not - * well formed. * - * + * This class represents an Exception throws if AbortFiles input data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; public class InvalidAbortFilesOutputDataAttributeException extends Exception { - private boolean nullSurlStatus = true; - - public InvalidAbortFilesOutputDataAttributeException( - ArrayOfTSURLReturnStatus surlStatus) { + private boolean nullSurlStatus = true; - nullSurlStatus = (surlStatus == null); - } + public InvalidAbortFilesOutputDataAttributeException(ArrayOfTSURLReturnStatus surlStatus) { - public String toString() { + nullSurlStatus = (surlStatus == null); + } - return "nullSurlStatusArray = " + nullSurlStatus; - } + public String toString() { + return "nullSurlStatusArray = " + nullSurlStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralInputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralInputDataAttributeException.java index 106be3fe..5695179b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralInputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralInputDataAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if AbortFiles input data is not - * well formed. * - * + * This class represents an Exception throws if AbortFiles input data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfSURLs; public class InvalidAbortGeneralInputDataAttributeException extends Exception { - private boolean nullSurlInfo = true; + private boolean nullSurlInfo = true; - public InvalidAbortGeneralInputDataAttributeException(ArrayOfSURLs surlInfo) { + public InvalidAbortGeneralInputDataAttributeException(ArrayOfSURLs surlInfo) { - nullSurlInfo = (surlInfo == null); - } + nullSurlInfo = (surlInfo == null); + } - public String toString() { + public String toString() { - return "nullSurlInfo = " + nullSurlInfo; - } + return "nullSurlInfo = " + nullSurlInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralOutputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralOutputDataAttributeException.java index 14dec410..afd73bcd 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralOutputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortGeneralOutputDataAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if AbortFiles input data is not - * well formed. * - * + * This class represents an Exception throws if AbortFiles input data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; public class InvalidAbortGeneralOutputDataAttributeException extends Exception { - private boolean nullSurlStatus = true; - - public InvalidAbortGeneralOutputDataAttributeException( - ArrayOfTSURLReturnStatus surlStatus) { + private boolean nullSurlStatus = true; - nullSurlStatus = (surlStatus == null); - } + public InvalidAbortGeneralOutputDataAttributeException(ArrayOfTSURLReturnStatus surlStatus) { - public String toString() { + nullSurlStatus = (surlStatus == null); + } - return "nullSurlStatusArray = " + nullSurlStatus; - } + public String toString() { + return "nullSurlStatusArray = " + nullSurlStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestInputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestInputDataAttributeException.java index 0b1669c3..9f36829a 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestInputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestInputDataAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if Abort Request input data is not - * well formed. * - * + * This class represents an Exception throws if Abort Request input data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date Dec 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TRequestToken; public class InvalidAbortRequestInputDataAttributeException extends Exception { - private boolean nullTokenInfo = true; + private boolean nullTokenInfo = true; - public InvalidAbortRequestInputDataAttributeException(TRequestToken token) { + public InvalidAbortRequestInputDataAttributeException(TRequestToken token) { - nullTokenInfo = (token == null); - } + nullTokenInfo = (token == null); + } - public String toString() { + public String toString() { - return "nullTokenInfo = " + nullTokenInfo; - } + return "nullTokenInfo = " + nullTokenInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestOutputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestOutputDataAttributeException.java index 8ad779d5..2651a443 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestOutputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidAbortRequestOutputDataAttributeException.java @@ -1,33 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if AbortRequest output data is not - * well formed. * - * + * This class represents an Exception throws if AbortRequest output data is not well formed. * + * * @author Magnoni Luca * @author CNAF - INFN Bologna * @date AUG 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TReturnStatus; public class InvalidAbortRequestOutputDataAttributeException extends Exception { - private boolean nullStatus = true; - - public InvalidAbortRequestOutputDataAttributeException(TReturnStatus retStatus) { + private boolean nullStatus = true; - nullStatus = (retStatus == null); - } + public InvalidAbortRequestOutputDataAttributeException(TReturnStatus retStatus) { - public String toString() { + nullStatus = (retStatus == null); + } - return "nullStatus = " + nullStatus; - } + public String toString() { + return "nullStatus = " + nullStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataInputAttributeException.java index 496023f1..981bd2b7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataInputAttributeException.java @@ -1,36 +1,33 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; -import it.grid.storm.srm.types.ArrayOfTSpaceToken; import it.grid.storm.griduser.GridUserInterface; +import it.grid.storm.srm.types.ArrayOfTSpaceToken; public class InvalidGetSpaceMetaDataInputAttributeException extends Exception { - private boolean nullUser = true; - private boolean nullToken = true; + private boolean nullUser = true; + private boolean nullToken = true; - public InvalidGetSpaceMetaDataInputAttributeException(GridUserInterface user, - ArrayOfTSpaceToken tokenArray) { + public InvalidGetSpaceMetaDataInputAttributeException( + GridUserInterface user, ArrayOfTSpaceToken tokenArray) { - nullToken = (tokenArray == null); - nullUser = (user == null); - } + nullToken = (tokenArray == null); + nullUser = (user == null); + } - public String toString() { + public String toString() { - return "nullTokenArray = " + nullToken + "- nullUser = " + nullUser; - } + return "nullTokenArray = " + nullToken + "- nullUser = " + nullUser; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataOutputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataOutputAttributeException.java index cd13bcd2..a6e0ffd8 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataOutputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidGetSpaceMetaDataOutputAttributeException.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTMetaDataSpace; @@ -19,18 +16,18 @@ public class InvalidGetSpaceMetaDataOutputAttributeException extends Exception { - private boolean nullStatus = true; - private boolean nullMeta = true; + private boolean nullStatus = true; + private boolean nullMeta = true; - public InvalidGetSpaceMetaDataOutputAttributeException(TReturnStatus status, - ArrayOfTMetaDataSpace meta) { + public InvalidGetSpaceMetaDataOutputAttributeException( + TReturnStatus status, ArrayOfTMetaDataSpace meta) { - nullStatus = (status == null); - nullMeta = (meta == null); - } + nullStatus = (status == null); + nullMeta = (meta == null); + } - public String toString() { + public String toString() { - return "nullStatus = " + nullStatus + "- nullArray = " + nullMeta; - } + return "nullStatus = " + nullStatus + "- nullArray = " + nullMeta; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidLSOutputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidLSOutputAttributeException.java index 8696caa5..00517c42 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidLSOutputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidLSOutputAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTMetaDataPathDetail; public class InvalidLSOutputAttributeException extends Exception { - private boolean nullArray = true; + private boolean nullArray = true; - public InvalidLSOutputAttributeException(ArrayOfTMetaDataPathDetail array) { + public InvalidLSOutputAttributeException(ArrayOfTMetaDataPathDetail array) { - nullArray = (array == null); - } + nullArray = (array == null); + } - public String toString() { + public String toString() { - return "nullArray = " + nullArray; - } + return "nullArray = " + nullArray; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMkdirInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMkdirInputAttributeException.java index bf8691a7..ddb40624 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMkdirInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMkdirInputAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TSURL; public class InvalidMkdirInputAttributeException extends Exception { - private boolean nullSurl = true; + private boolean nullSurl = true; - public InvalidMkdirInputAttributeException(TSURL surl) { + public InvalidMkdirInputAttributeException(TSURL surl) { - nullSurl = (surl == null); - } + nullSurl = (surl == null); + } - public String toString() { + public String toString() { - return "nullSurl = " + nullSurl; - } + return "nullSurl = " + nullSurl; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvInputAttributeException.java index bd36ff19..d1507029 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvInputAttributeException.java @@ -1,34 +1,31 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if MvInputData is not well formed. - * * - * + * This class represents an Exception throws if MvInputData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TSURL; public class InvalidMvInputAttributeException extends Exception { - private boolean nullFromSurl = true; - private boolean nullToSurl = true; + private boolean nullFromSurl = true; + private boolean nullToSurl = true; - public InvalidMvInputAttributeException(TSURL fromSURL, TSURL toSURL) { + public InvalidMvInputAttributeException(TSURL fromSURL, TSURL toSURL) { - nullFromSurl = (fromSURL == null); - nullToSurl = (toSURL == null); - } + nullFromSurl = (fromSURL == null); + nullToSurl = (toSURL == null); + } - public String toString() { + public String toString() { - return "nullFromSurl = " + nullFromSurl + " , nullToSURL = " + nullToSurl; - } + return "nullFromSurl = " + nullFromSurl + " , nullToSURL = " + nullToSurl; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvOutputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvOutputAttributeException.java index a66c8a23..8b13f52b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvOutputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidMvOutputAttributeException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; @@ -8,16 +7,15 @@ public class InvalidMvOutputAttributeException extends Exception { - private boolean nullStat = true; + private boolean nullStat = true; - public InvalidMvOutputAttributeException(TReturnStatus stat) { + public InvalidMvOutputAttributeException(TReturnStatus stat) { - nullStat = (stat == null); - } + nullStat = (stat == null); + } - public String toString() { - - return "nullStatus = " + nullStat; - } + public String toString() { + return "nullStatus = " + nullStat; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidPutDoneOutputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidPutDoneOutputAttributeException.java index 8553b45d..6e89a6a8 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidPutDoneOutputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidPutDoneOutputAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if PutDone input data is not well - * formed. * - * + * This class represents an Exception throws if PutDone input data is not well formed. * + * * @author Alberto Forti * @author CNAF - INFN Bologna * @date AUG 2006 * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; public class InvalidPutDoneOutputAttributeException extends Exception { - private boolean nullSurlStatus = true; - - public InvalidPutDoneOutputAttributeException( - ArrayOfTSURLReturnStatus surlStatus) { + private boolean nullSurlStatus = true; - nullSurlStatus = (surlStatus == null); - } + public InvalidPutDoneOutputAttributeException(ArrayOfTSURLReturnStatus surlStatus) { - public String toString() { + nullSurlStatus = (surlStatus == null); + } - return "nullSurlStatusArray = " + nullSurlStatus; - } + public String toString() { + return "nullSurlStatusArray = " + nullSurlStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesInputAttributeException.java index a4e4b40c..b964b533 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesInputAttributeException.java @@ -1,11 +1,9 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if ReleaseFiles input data is not - * well formed. * - * + * This class represents an Exception throws if ReleaseFiles input data is not well formed. * + * * @author Alberto Forti * @author CNAF - INFN Bologna * @date AUG 2006 @@ -17,16 +15,15 @@ public class InvalidReleaseFilesInputAttributeException extends Exception { - private boolean nullArrayOfSURLs = true; + private boolean nullArrayOfSURLs = true; - public InvalidReleaseFilesInputAttributeException(ArrayOfSURLs arrayOfSURLs) { + public InvalidReleaseFilesInputAttributeException(ArrayOfSURLs arrayOfSURLs) { - nullArrayOfSURLs = (arrayOfSURLs == null); - } + nullArrayOfSURLs = (arrayOfSURLs == null); + } - public String toString() { - - return "nullArrayOfSURLs = " + nullArrayOfSURLs; - } + public String toString() { + return "nullArrayOfSURLs = " + nullArrayOfSURLs; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesOutputDataAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesOutputDataAttributeException.java index b38fe244..1f39f88b 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesOutputDataAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseFilesOutputDataAttributeException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; @@ -8,17 +7,15 @@ public class InvalidReleaseFilesOutputDataAttributeException extends Exception { - private boolean nullSurlStatus = true; + private boolean nullSurlStatus = true; - public InvalidReleaseFilesOutputDataAttributeException( - ArrayOfTSURLReturnStatus surlStatus) { + public InvalidReleaseFilesOutputDataAttributeException(ArrayOfTSURLReturnStatus surlStatus) { - nullSurlStatus = (surlStatus == null); - } + nullSurlStatus = (surlStatus == null); + } - public String toString() { - - return "nullSurlStatusArray = " + nullSurlStatus; - } + public String toString() { + return "nullSurlStatusArray = " + nullSurlStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceAttributesException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceAttributesException.java index 36a3a0e0..085b232f 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceAttributesException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceAttributesException.java @@ -1,36 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; -import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.griduser.GridUserInterface; +import it.grid.storm.srm.types.TSpaceToken; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidReleaseSpaceAttributesException extends Exception { - private boolean nullAuth = true; - private boolean nullToken = true; + private boolean nullAuth = true; + private boolean nullToken = true; - public InvalidReleaseSpaceAttributesException(GridUserInterface guser, - TSpaceToken token) { + public InvalidReleaseSpaceAttributesException(GridUserInterface guser, TSpaceToken token) { - nullAuth = (guser == null); - nullToken = (token == null); - } + nullAuth = (guser == null); + nullToken = (token == null); + } - public String toString() { + public String toString() { - return "null-Auth=" + nullAuth + " and null-token=" + nullToken; - } + return "null-Auth=" + nullAuth + " and null-token=" + nullToken; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceOutputDataAttributesException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceOutputDataAttributesException.java index d8805eb8..279e2f06 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceOutputDataAttributesException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReleaseSpaceOutputDataAttributesException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TReturnStatus; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidReleaseSpaceOutputDataAttributesException extends Exception { - private boolean nullStatus = true; + private boolean nullStatus = true; - public InvalidReleaseSpaceOutputDataAttributesException(TReturnStatus status) { + public InvalidReleaseSpaceOutputDataAttributesException(TReturnStatus status) { - nullStatus = (status == null); - } + nullStatus = (status == null); + } - public String toString() { + public String toString() { - return " nullStatus = " + nullStatus; - } + return " nullStatus = " + nullStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceInputDataAttributesException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceInputDataAttributesException.java index 0b4b6c4a..bb6b389d 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceInputDataAttributesException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceInputDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; @@ -9,33 +8,34 @@ import it.grid.storm.srm.types.TSizeInBytes; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidReserveSpaceInputDataAttributesException extends Exception { - private boolean nullAuth = true; - private boolean nullSpaceDes = true; - private boolean nullRetentionPolicyInfo = true; + private boolean nullAuth = true; + private boolean nullSpaceDes = true; + private boolean nullRetentionPolicyInfo = true; - public InvalidReserveSpaceInputDataAttributesException( - GridUserInterface guser, TSizeInBytes spaceDes, - TRetentionPolicyInfo retentionPolicyInfo) { + public InvalidReserveSpaceInputDataAttributesException( + GridUserInterface guser, TSizeInBytes spaceDes, TRetentionPolicyInfo retentionPolicyInfo) { - nullAuth = (guser == null); - nullSpaceDes = (spaceDes == null); - nullRetentionPolicyInfo = (retentionPolicyInfo == null); - } + nullAuth = (guser == null); + nullSpaceDes = (spaceDes == null); + nullRetentionPolicyInfo = (retentionPolicyInfo == null); + } - public String toString() { + public String toString() { - return "The Problem is: null-Auth= " + nullAuth + ", nullSpaceDesired= " - + nullSpaceDes + ", nullRetentionPolicyInfo= " + nullRetentionPolicyInfo; - } + return "The Problem is: null-Auth= " + + nullAuth + + ", nullSpaceDesired= " + + nullSpaceDes + + ", nullRetentionPolicyInfo= " + + nullRetentionPolicyInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceOutputDataAttributesException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceOutputDataAttributesException.java index 239041b2..3ad9f224 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceOutputDataAttributesException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidReserveSpaceOutputDataAttributesException.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.exception; @@ -9,35 +8,37 @@ import it.grid.storm.srm.types.TSpaceToken; /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - public class InvalidReserveSpaceOutputDataAttributesException extends Exception { - private boolean nullType = true; - private boolean negSpaceDes = true; - private boolean negSpaceGuar = true; - private boolean lifetime = true; - private boolean nullToken = true; - private boolean nullStatus = true; - - public InvalidReserveSpaceOutputDataAttributesException( - TSizeInBytes spaceTotal, TSpaceToken spaceToken, TReturnStatus status) { - - negSpaceGuar = (spaceTotal == null); - nullToken = (spaceToken == null); - nullStatus = (status == null); - } - - public String toString() { - - return "null-TotalSpace = " + negSpaceGuar + "- nullToken = " + nullToken - + "- nullStatus = " + nullStatus; - } + private boolean nullType = true; + private boolean negSpaceDes = true; + private boolean negSpaceGuar = true; + private boolean lifetime = true; + private boolean nullToken = true; + private boolean nullStatus = true; + + public InvalidReserveSpaceOutputDataAttributesException( + TSizeInBytes spaceTotal, TSpaceToken spaceToken, TReturnStatus status) { + + negSpaceGuar = (spaceTotal == null); + nullToken = (spaceToken == null); + nullStatus = (status == null); + } + + public String toString() { + + return "null-TotalSpace = " + + negSpaceGuar + + "- nullToken = " + + nullToken + + "- nullStatus = " + + nullStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmInputAttributeException.java index af463333..77127453 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmInputAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfSURLs; -import java.util.Vector; - public class InvalidRmInputAttributeException extends Exception { - private boolean nullSurlInfo = true; + private boolean nullSurlInfo = true; - public InvalidRmInputAttributeException(ArrayOfSURLs surl) { + public InvalidRmInputAttributeException(ArrayOfSURLs surl) { - nullSurlInfo = (surl == null); - } + nullSurlInfo = (surl == null); + } - public String toString() { + public String toString() { - return "nullSurlInfo = " + nullSurlInfo; - } + return "nullSurlInfo = " + nullSurlInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmOutputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmOutputAttributeException.java index bd2b06b4..06ea3ca1 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmOutputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmOutputAttributeException.java @@ -1,34 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; -import java.util.Vector; - public class InvalidRmOutputAttributeException extends Exception { - private boolean nullSurlStatus = true; + private boolean nullSurlStatus = true; - public InvalidRmOutputAttributeException(ArrayOfTSURLReturnStatus surlStatus) { + public InvalidRmOutputAttributeException(ArrayOfTSURLReturnStatus surlStatus) { - nullSurlStatus = (surlStatus == null); - } + nullSurlStatus = (surlStatus == null); + } - public String toString() { + public String toString() { - return "nullSurlStatusArray = " + nullSurlStatus; - } + return "nullSurlStatusArray = " + nullSurlStatus; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmdirInputAttributeException.java b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmdirInputAttributeException.java index 09a6a491..69fc24f9 100644 --- a/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmdirInputAttributeException.java +++ b/src/main/java/it/grid/storm/synchcall/data/exception/InvalidRmdirInputAttributeException.java @@ -1,32 +1,29 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents an Exception throws if SpaceResData is not well formed. - * * - * + * This class represents an Exception throws if SpaceResData is not well formed. * + * * @author Magnoni Luca * @author Cnaf - INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.synchcall.data.exception; import it.grid.storm.srm.types.TSURL; public class InvalidRmdirInputAttributeException extends Exception { - private boolean nullSurlInfo = true; + private boolean nullSurlInfo = true; - public InvalidRmdirInputAttributeException(TSURL surl) { + public InvalidRmdirInputAttributeException(TSURL surl) { - nullSurlInfo = (surl == null); - } + nullSurlInfo = (surl == null); + } - public String toString() { + public String toString() { - return "nullSurlInfo = " + nullSurlInfo; - } + return "nullSurlInfo = " + nullSurlInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceMetaDataInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceMetaDataInputData.java index 45943ced..507459fd 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceMetaDataInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceMetaDataInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -8,32 +7,30 @@ import it.grid.storm.synchcall.data.AbstractInputData; public class AnonymousGetSpaceMetaDataInputData extends AbstractInputData - implements GetSpaceMetaDataInputData { - - private final ArrayOfTSpaceToken spaceTokenArray; - - public AnonymousGetSpaceMetaDataInputData(ArrayOfTSpaceToken tokenArray) - throws IllegalArgumentException { - - if (tokenArray == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: tokenArray=" - + tokenArray); - } - this.spaceTokenArray = tokenArray; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.GetSpaceMetaDataInputData#getTokenArray - * () - */ - @Override - public ArrayOfTSpaceToken getSpaceTokenArray() { - - return spaceTokenArray; - } - + implements GetSpaceMetaDataInputData { + + private final ArrayOfTSpaceToken spaceTokenArray; + + public AnonymousGetSpaceMetaDataInputData(ArrayOfTSpaceToken tokenArray) + throws IllegalArgumentException { + + if (tokenArray == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: tokenArray=" + tokenArray); + } + this.spaceTokenArray = tokenArray; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.GetSpaceMetaDataInputData#getTokenArray + * () + */ + @Override + public ArrayOfTSpaceToken getSpaceTokenArray() { + + return spaceTokenArray; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceTokensInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceTokensInputData.java index f5ee0333..78131d1e 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceTokensInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousGetSpaceTokensInputData.java @@ -1,37 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; import it.grid.storm.synchcall.data.AbstractInputData; public class AnonymousGetSpaceTokensInputData extends AbstractInputData - implements GetSpaceTokensInputData { - - private final String spaceTokenAlias; - - public AnonymousGetSpaceTokensInputData(String spaceTokenAlias) { - - if (spaceTokenAlias == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: spaceTokenAlias=" - + spaceTokenAlias); - } - this.spaceTokenAlias = spaceTokenAlias; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.GetSpaceTokensInputData#getSpaceTokenAlias - * () - */ - @Override - public String getSpaceTokenAlias() { - - return spaceTokenAlias; - } - + implements GetSpaceTokensInputData { + + private final String spaceTokenAlias; + + public AnonymousGetSpaceTokensInputData(String spaceTokenAlias) { + + if (spaceTokenAlias == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: spaceTokenAlias=" + spaceTokenAlias); + } + this.spaceTokenAlias = spaceTokenAlias; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.GetSpaceTokensInputData#getSpaceTokenAlias + * () + */ + @Override + public String getSpaceTokenAlias() { + + return spaceTokenAlias; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReleaseSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReleaseSpaceInputData.java index bd3feda6..f2cffc24 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReleaseSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReleaseSpaceInputData.java @@ -1,53 +1,53 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousReleaseSpaceInputData extends AbstractInputData implements - ReleaseSpaceInputData { - - private final TSpaceToken spaceToken; - private final boolean forceFileRelease; - - public AnonymousReleaseSpaceInputData(TSpaceToken spaceToken, - Boolean forceFileRelease) throws IllegalArgumentException { - - if (spaceToken == null || forceFileRelease == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: spaceToken=" - + spaceToken + " forceFileRelease=" + forceFileRelease); - } - this.spaceToken = spaceToken; - this.forceFileRelease = forceFileRelease; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReleaseSpaceInputData#getSpaceToken() - */ - @Override - public TSpaceToken getSpaceToken() { - - return spaceToken; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReleaseSpaceInputData#isForceFileRelease - * () - */ - @Override - public boolean isForceFileRelease() { - - return forceFileRelease; - } - +public class AnonymousReleaseSpaceInputData extends AbstractInputData + implements ReleaseSpaceInputData { + + private final TSpaceToken spaceToken; + private final boolean forceFileRelease; + + public AnonymousReleaseSpaceInputData(TSpaceToken spaceToken, Boolean forceFileRelease) + throws IllegalArgumentException { + + if (spaceToken == null || forceFileRelease == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: spaceToken=" + + spaceToken + + " forceFileRelease=" + + forceFileRelease); + } + this.spaceToken = spaceToken; + this.forceFileRelease = forceFileRelease; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReleaseSpaceInputData#getSpaceToken() + */ + @Override + public TSpaceToken getSpaceToken() { + + return spaceToken; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReleaseSpaceInputData#isForceFileRelease + * () + */ + @Override + public boolean isForceFileRelease() { + + return forceFileRelease; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReserveSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReserveSpaceInputData.java index f9d0d68e..cce8db18 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReserveSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/AnonymousReserveSpaceInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -10,122 +9,123 @@ import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.synchcall.data.AbstractInputData; -public class AnonymousReserveSpaceInputData extends AbstractInputData implements - ReserveSpaceInputData { - - private final String spaceTokenAlias; - private final TRetentionPolicyInfo retentionPolicyInfo; - private final TSizeInBytes desiredSize; - private final TSizeInBytes guaranteedSize; - private TLifeTimeInSeconds spaceLifetime; - private final ArrayOfTExtraInfo storageSystemInfo; - - public AnonymousReserveSpaceInputData(String spaceTokenAlias, - TRetentionPolicyInfo retentionPolicyInfo, TSizeInBytes spaceDesired, - TSizeInBytes spaceGuaranteed, ArrayOfTExtraInfo storageSystemInfo) - throws IllegalArgumentException { - - if (spaceTokenAlias == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: spaceTokenAlias=" - + spaceTokenAlias); - } - this.spaceTokenAlias = spaceTokenAlias; - if (spaceDesired == null) { - this.desiredSize = TSizeInBytes.makeEmpty(); - } else { - this.desiredSize = spaceDesired; - } - if (spaceGuaranteed == null) { - this.guaranteedSize = TSizeInBytes.makeEmpty(); - } else { - this.guaranteedSize = spaceGuaranteed; - } - this.spaceLifetime = TLifeTimeInSeconds.makeEmpty(); - this.storageSystemInfo = storageSystemInfo; - this.retentionPolicyInfo = retentionPolicyInfo; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceTokenAlias - * () - */ - @Override - public String getSpaceTokenAlias() { - - return spaceTokenAlias; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getRetentionPolicyInfo - * () - */ - @Override - public TRetentionPolicyInfo getRetentionPolicyInfo() { - - return retentionPolicyInfo; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceDesired() - */ - @Override - public TSizeInBytes getDesiredSize() { - - return desiredSize; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceGuaranteed - * () - */ - @Override - public TSizeInBytes getGuaranteedSize() { - - return guaranteedSize; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceLifetime() - */ - @Override - public TLifeTimeInSeconds getSpaceLifetime() { - - return spaceLifetime; - } - - @Override - public void setSpaceLifetime(TLifeTimeInSeconds spaceLifetime) { - - this.spaceLifetime = spaceLifetime; - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getStorageSystemInfo - * () - */ - @Override - public ArrayOfTExtraInfo getStorageSystemInfo() { - - return storageSystemInfo; - } - +public class AnonymousReserveSpaceInputData extends AbstractInputData + implements ReserveSpaceInputData { + + private final String spaceTokenAlias; + private final TRetentionPolicyInfo retentionPolicyInfo; + private final TSizeInBytes desiredSize; + private final TSizeInBytes guaranteedSize; + private TLifeTimeInSeconds spaceLifetime; + private final ArrayOfTExtraInfo storageSystemInfo; + + public AnonymousReserveSpaceInputData( + String spaceTokenAlias, + TRetentionPolicyInfo retentionPolicyInfo, + TSizeInBytes spaceDesired, + TSizeInBytes spaceGuaranteed, + ArrayOfTExtraInfo storageSystemInfo) + throws IllegalArgumentException { + + if (spaceTokenAlias == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: spaceTokenAlias=" + spaceTokenAlias); + } + this.spaceTokenAlias = spaceTokenAlias; + if (spaceDesired == null) { + this.desiredSize = TSizeInBytes.makeEmpty(); + } else { + this.desiredSize = spaceDesired; + } + if (spaceGuaranteed == null) { + this.guaranteedSize = TSizeInBytes.makeEmpty(); + } else { + this.guaranteedSize = spaceGuaranteed; + } + this.spaceLifetime = TLifeTimeInSeconds.makeEmpty(); + this.storageSystemInfo = storageSystemInfo; + this.retentionPolicyInfo = retentionPolicyInfo; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceTokenAlias + * () + */ + @Override + public String getSpaceTokenAlias() { + + return spaceTokenAlias; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getRetentionPolicyInfo + * () + */ + @Override + public TRetentionPolicyInfo getRetentionPolicyInfo() { + + return retentionPolicyInfo; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceDesired() + */ + @Override + public TSizeInBytes getDesiredSize() { + + return desiredSize; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceGuaranteed + * () + */ + @Override + public TSizeInBytes getGuaranteedSize() { + + return guaranteedSize; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getSpaceLifetime() + */ + @Override + public TLifeTimeInSeconds getSpaceLifetime() { + + return spaceLifetime; + } + + @Override + public void setSpaceLifetime(TLifeTimeInSeconds spaceLifetime) { + + this.spaceLifetime = spaceLifetime; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.synchcall.data.space.ReserveSpaceInputData#getStorageSystemInfo + * () + */ + @Override + public ArrayOfTExtraInfo getStorageSystemInfo() { + + return storageSystemInfo; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataInputData.java index b15684be..28b0011d 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -9,9 +8,6 @@ public interface GetSpaceMetaDataInputData extends InputData { - /** - * @return the tokenArray - */ - public ArrayOfTSpaceToken getSpaceTokenArray(); - + /** @return the tokenArray */ + public ArrayOfTSpaceToken getSpaceTokenArray(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataOutputData.java b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataOutputData.java index f249753a..1f8e1b34 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceMetaDataOutputData.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date @@ -15,90 +13,70 @@ */ package it.grid.storm.synchcall.data.space; -import java.io.Serializable; - import it.grid.storm.srm.types.ArrayOfTMetaDataSpace; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.exception.InvalidGetSpaceMetaDataOutputAttributeException; +import java.io.Serializable; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author lucamag * @date May 29, 2008 - * */ - public class GetSpaceMetaDataOutputData implements OutputData, Serializable { - private TReturnStatus status = null; - private ArrayOfTMetaDataSpace metaDataArray = null; - - public GetSpaceMetaDataOutputData() { - - } - - public GetSpaceMetaDataOutputData(TReturnStatus status, - ArrayOfTMetaDataSpace metaDataArray) - throws InvalidGetSpaceMetaDataOutputAttributeException { - - boolean ok = status != null && metaDataArray != null; - - if (!ok) { - throw new InvalidGetSpaceMetaDataOutputAttributeException(status, - metaDataArray); - } - - this.status = status; - this.metaDataArray = metaDataArray; + private TReturnStatus status = null; + private ArrayOfTMetaDataSpace metaDataArray = null; - } + public GetSpaceMetaDataOutputData() {} - /** - * Method that returns GridUser specify in SRM request. - */ + public GetSpaceMetaDataOutputData(TReturnStatus status, ArrayOfTMetaDataSpace metaDataArray) + throws InvalidGetSpaceMetaDataOutputAttributeException { - public TReturnStatus getStatus() { + boolean ok = status != null && metaDataArray != null; - return status; - } + if (!ok) { + throw new InvalidGetSpaceMetaDataOutputAttributeException(status, metaDataArray); + } - /** - * - * - */ - public void setStatus(TReturnStatus status) { + this.status = status; + this.metaDataArray = metaDataArray; + } - this.status = status; + /** Method that returns GridUser specify in SRM request. */ + public TReturnStatus getStatus() { - } + return status; + } - /** - * Method return metaData. i n queue. - */ + /** */ + public void setStatus(TReturnStatus status) { - public ArrayOfTMetaDataSpace getMetaDataSpaceArray() { + this.status = status; + } - return metaDataArray; - } + /** Method return metaData. i n queue. */ + public ArrayOfTMetaDataSpace getMetaDataSpaceArray() { - public void setMetaDataSpaceArray(ArrayOfTMetaDataSpace metaDataArray) { + return metaDataArray; + } - this.metaDataArray = metaDataArray; - } + public void setMetaDataSpaceArray(ArrayOfTMetaDataSpace metaDataArray) { - // @Override - public boolean isSuccess() { + this.metaDataArray = metaDataArray; + } - // TODO Auto-generated method stub - return false; - } + // @Override + public boolean isSuccess() { + // TODO Auto-generated method stub + return false; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensInputData.java index 5167eba4..9faf453d 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -8,9 +7,6 @@ public interface GetSpaceTokensInputData extends InputData { - /** - * @return the spaceTokenAlias - */ - public String getSpaceTokenAlias(); - + /** @return the spaceTokenAlias */ + public String getSpaceTokenAlias(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensOutputData.java b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensOutputData.java index 67f1ea50..a5a7c71f 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/GetSpaceTokensOutputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -9,70 +8,56 @@ import it.grid.storm.synchcall.data.OutputData; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * GetSpaceTokens output data. - * - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

GetSpaceTokens output data. + * * @author lucamag * @author Alberto Forti - * * @date May 29, 2008 - * */ public class GetSpaceTokensOutputData implements OutputData { - private TReturnStatus status = null; - private ArrayOfTSpaceToken arrayOfSpaceTokens = null; + private TReturnStatus status = null; + private ArrayOfTSpaceToken arrayOfSpaceTokens = null; - public GetSpaceTokensOutputData() { + public GetSpaceTokensOutputData() {} - } + public GetSpaceTokensOutputData(TReturnStatus status, ArrayOfTSpaceToken arrayOfSpaceTokens) { - public GetSpaceTokensOutputData(TReturnStatus status, - ArrayOfTSpaceToken arrayOfSpaceTokens) { + this.status = status; + this.arrayOfSpaceTokens = arrayOfSpaceTokens; + } - this.status = status; - this.arrayOfSpaceTokens = arrayOfSpaceTokens; - } + /** Returns the status. */ + public TReturnStatus getStatus() { - /** - * Returns the status. - */ - public TReturnStatus getStatus() { + return status; + } - return status; - } + /** Sets the status. */ + public void setStatus(TReturnStatus status) { - /** - * Sets the status. - */ - public void setStatus(TReturnStatus status) { + this.status = status; + } - this.status = status; - } + /** Returns arrayOfSpaceTokens. */ + public ArrayOfTSpaceToken getArrayOfSpaceTokens() { - /** - * Returns arrayOfSpaceTokens. - */ - public ArrayOfTSpaceToken getArrayOfSpaceTokens() { + return this.arrayOfSpaceTokens; + } - return this.arrayOfSpaceTokens; - } + /** Sets arrayOfSpaceTokens. */ + public void setArrayOfSpaceTokens(ArrayOfTSpaceToken arrayOfSpaceTokens) { - /** - * Sets arrayOfSpaceTokens. - */ - public void setArrayOfSpaceTokens(ArrayOfTSpaceToken arrayOfSpaceTokens) { + this.arrayOfSpaceTokens = arrayOfSpaceTokens; + } - this.arrayOfSpaceTokens = arrayOfSpaceTokens; - } + // @Override + public boolean isSuccess() { - // @Override - public boolean isSuccess() { - - // TODO Auto-generated method stub - return true; - } + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceMetaDataInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceMetaDataInputData.java index 403d9ad6..4b57eeff 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceMetaDataInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceMetaDataInputData.java @@ -1,58 +1,52 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; -import java.io.Serializable; - import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.ArrayOfTSpaceToken; import it.grid.storm.synchcall.data.IdentityInputData; +import java.io.Serializable; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author lucamag * @date May 29, 2008 - * */ +public class IdentityGetSpaceMetaDataInputData extends AnonymousGetSpaceMetaDataInputData + implements Serializable, IdentityInputData { -public class IdentityGetSpaceMetaDataInputData extends - AnonymousGetSpaceMetaDataInputData implements Serializable, IdentityInputData { + /** */ + private static final long serialVersionUID = -7823169083758886055L; - /** - * - */ - private static final long serialVersionUID = -7823169083758886055L; - private final GridUserInterface auth; + private final GridUserInterface auth; - public IdentityGetSpaceMetaDataInputData(GridUserInterface auth, - ArrayOfTSpaceToken tokenArray) throws IllegalArgumentException { + public IdentityGetSpaceMetaDataInputData(GridUserInterface auth, ArrayOfTSpaceToken tokenArray) + throws IllegalArgumentException { - super(tokenArray); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + super(tokenArray); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - @Override - public GridUserInterface getUser() { + @Override + public GridUserInterface getUser() { - return auth; - } + return auth; + } - @Override - public String getPrincipal() { + @Override + public String getPrincipal() { - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceTokensInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceTokensInputData.java index 3fccb376..f1f2a882 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceTokensInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/IdentityGetSpaceTokensInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -8,42 +7,39 @@ import it.grid.storm.synchcall.data.IdentityInputData; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * GetSpaceTokens request input data. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

GetSpaceTokens request input data. + * * @author lucamag * @author Alberto Forti * @date May 29, 2008 - * */ +public class IdentityGetSpaceTokensInputData extends AnonymousGetSpaceTokensInputData + implements IdentityInputData { -public class IdentityGetSpaceTokensInputData extends - AnonymousGetSpaceTokensInputData implements IdentityInputData { + private final GridUserInterface auth; - private final GridUserInterface auth; + public IdentityGetSpaceTokensInputData(GridUserInterface auth, String spaceTokenAlias) { - public IdentityGetSpaceTokensInputData(GridUserInterface auth, - String spaceTokenAlias) { + super(spaceTokenAlias); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } - super(spaceTokenAlias); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } + @Override + public GridUserInterface getUser() { - @Override - public GridUserInterface getUser() { + return auth; + } - return auth; - } + @Override + public String getPrincipal() { - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/IdentityReleaseSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/IdentityReleaseSpaceInputData.java index 789be649..da8c3e15 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/IdentityReleaseSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/IdentityReleaseSpaceInputData.java @@ -1,52 +1,48 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; +import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TSpaceToken; import it.grid.storm.synchcall.data.IdentityInputData; -import it.grid.storm.griduser.GridUserInterface; /** - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ +public class IdentityReleaseSpaceInputData extends AnonymousReleaseSpaceInputData + implements IdentityInputData { -public class IdentityReleaseSpaceInputData extends - AnonymousReleaseSpaceInputData implements IdentityInputData { - - private final GridUserInterface gUser; - - public IdentityReleaseSpaceInputData(GridUserInterface auth, - TSpaceToken spaceToken, Boolean forceFileRelease) - throws IllegalArgumentException { + private final GridUserInterface gUser; - super(spaceToken, forceFileRelease); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.gUser = auth; - } + public IdentityReleaseSpaceInputData( + GridUserInterface auth, TSpaceToken spaceToken, Boolean forceFileRelease) + throws IllegalArgumentException { - @Override - public GridUserInterface getUser() { + super(spaceToken, forceFileRelease); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.gUser = auth; + } - return this.gUser; - } + @Override + public GridUserInterface getUser() { - @Override - public String getPrincipal() { + return this.gUser; + } - return this.gUser.getDn(); - } + @Override + public String getPrincipal() { + return this.gUser.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/IdentityReserveSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/IdentityReserveSpaceInputData.java index eaa2509b..f7987fd8 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/IdentityReserveSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/IdentityReserveSpaceInputData.java @@ -1,58 +1,57 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; -import java.io.Serializable; - import it.grid.storm.griduser.GridUserInterface; -import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.srm.types.ArrayOfTExtraInfo; import it.grid.storm.srm.types.TRetentionPolicyInfo; +import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.synchcall.data.IdentityInputData; +import java.io.Serializable; /** - * This class represents the SpaceReservationData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - -public class IdentityReserveSpaceInputData extends - AnonymousReserveSpaceInputData implements Serializable, IdentityInputData { - - private static final long serialVersionUID = 2840674835389671669L; - private final GridUserInterface auth; - - public IdentityReserveSpaceInputData(GridUserInterface auth, - String spaceTokenAlias, TRetentionPolicyInfo retentionPolicyInfo, - TSizeInBytes spaceDesired, TSizeInBytes spaceGuaranteed, - ArrayOfTExtraInfo storageSystemInfo) throws IllegalArgumentException { - - super(spaceTokenAlias, retentionPolicyInfo, spaceDesired, spaceGuaranteed, - storageSystemInfo); - if (auth == null) { - throw new IllegalArgumentException( - "Unable to create the object, invalid arguments: auth=" + auth); - } - this.auth = auth; - } - - @Override - public GridUserInterface getUser() { - - return auth; - } - - @Override - public String getPrincipal() { - - return this.auth.getDn(); - } +public class IdentityReserveSpaceInputData extends AnonymousReserveSpaceInputData + implements Serializable, IdentityInputData { + + private static final long serialVersionUID = 2840674835389671669L; + private final GridUserInterface auth; + + public IdentityReserveSpaceInputData( + GridUserInterface auth, + String spaceTokenAlias, + TRetentionPolicyInfo retentionPolicyInfo, + TSizeInBytes spaceDesired, + TSizeInBytes spaceGuaranteed, + ArrayOfTExtraInfo storageSystemInfo) + throws IllegalArgumentException { + + super(spaceTokenAlias, retentionPolicyInfo, spaceDesired, spaceGuaranteed, storageSystemInfo); + if (auth == null) { + throw new IllegalArgumentException( + "Unable to create the object, invalid arguments: auth=" + auth); + } + this.auth = auth; + } + + @Override + public GridUserInterface getUser() { + + return auth; + } + + @Override + public String getPrincipal() { + + return this.auth.getDn(); + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceInputData.java index b6448158..605ab1ff 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -9,14 +8,9 @@ public interface ReleaseSpaceInputData extends InputData { - /** - * @return the spaceToken - */ - public TSpaceToken getSpaceToken(); - - /** - * @return the forceFileRelease - */ - public boolean isForceFileRelease(); + /** @return the spaceToken */ + public TSpaceToken getSpaceToken(); + /** @return the forceFileRelease */ + public boolean isForceFileRelease(); } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceOutputData.java b/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceOutputData.java index bdab790e..63d79135 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/ReleaseSpaceOutputData.java @@ -1,65 +1,54 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; -import java.io.Serializable; - import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.exception.InvalidReleaseSpaceOutputDataAttributesException; +import java.io.Serializable; /** - * This class represents the SpaceReservationOutputData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationOutputData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date - * */ public class ReleaseSpaceOutputData implements Serializable, OutputData { - private TReturnStatus status = null; - - public ReleaseSpaceOutputData() { - - } + private TReturnStatus status = null; - public ReleaseSpaceOutputData(TReturnStatus status) - throws InvalidReleaseSpaceOutputDataAttributesException { + public ReleaseSpaceOutputData() {} - boolean ok = status != null; - if (!ok) { - throw new InvalidReleaseSpaceOutputDataAttributesException(status); - } - this.status = status; - } + public ReleaseSpaceOutputData(TReturnStatus status) + throws InvalidReleaseSpaceOutputDataAttributesException { - /** - * Method that return TReturnStatus status. - */ - public TReturnStatus getStatus() { + boolean ok = status != null; + if (!ok) { + throw new InvalidReleaseSpaceOutputDataAttributesException(status); + } + this.status = status; + } - return status; - } + /** Method that return TReturnStatus status. */ + public TReturnStatus getStatus() { - /** - * Method that set TReturnStatus status. - */ - public void setStatus(TReturnStatus newstatus) { + return status; + } - status = newstatus; - } + /** Method that set TReturnStatus status. */ + public void setStatus(TReturnStatus newstatus) { - // @Override - public boolean isSuccess() { + status = newstatus; + } - // TODO Auto-generated method stub - return true; - } + // @Override + public boolean isSuccess() { + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceInputData.java b/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceInputData.java index 0bb602e0..fcafa6b7 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceInputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceInputData.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.data.space; @@ -12,39 +11,24 @@ public interface ReserveSpaceInputData extends InputData { - /** - * @return the spaceTokenAlias - */ - public String getSpaceTokenAlias(); - - /** - * @return the retentionPolicyInfo - */ - public TRetentionPolicyInfo getRetentionPolicyInfo(); - - /** - * @return the spaceDesired - */ - public TSizeInBytes getDesiredSize(); - - /** - * @return the spaceGuaranteed - */ - public TSizeInBytes getGuaranteedSize(); - - /** - * @return the spaceLifetime - */ - public TLifeTimeInSeconds getSpaceLifetime(); - - /** - * @return the storageSystemInfo - */ - public ArrayOfTExtraInfo getStorageSystemInfo(); - - /** - * @param spaceLifetime - */ - void setSpaceLifetime(TLifeTimeInSeconds spaceLifetime); + /** @return the spaceTokenAlias */ + public String getSpaceTokenAlias(); + /** @return the retentionPolicyInfo */ + public TRetentionPolicyInfo getRetentionPolicyInfo(); + + /** @return the spaceDesired */ + public TSizeInBytes getDesiredSize(); + + /** @return the spaceGuaranteed */ + public TSizeInBytes getGuaranteedSize(); + + /** @return the spaceLifetime */ + public TLifeTimeInSeconds getSpaceLifetime(); + + /** @return the storageSystemInfo */ + public ArrayOfTExtraInfo getStorageSystemInfo(); + + /** @param spaceLifetime */ + void setSpaceLifetime(TLifeTimeInSeconds spaceLifetime); } diff --git a/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceOutputData.java b/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceOutputData.java index 21c975ac..18e384d1 100644 --- a/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceOutputData.java +++ b/src/main/java/it/grid/storm/synchcall/data/space/ReserveSpaceOutputData.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the SpaceReservationOutputData associated with the SRM - * request, that is it contains info about: UserID, spaceType, SizeDesired, - * SizeGuaranteed,ecc. Number of files progressing, Number of files finished, - * and whether the request is currently suspended. - * + * This class represents the SpaceReservationOutputData associated with the SRM request, that is it + * contains info about: UserID, spaceType, SizeDesired, SizeGuaranteed,ecc. Number of files + * progressing, Number of files finished, and whether the request is currently suspended. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date @@ -15,127 +13,112 @@ */ package it.grid.storm.synchcall.data.space; -import java.io.Serializable; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import it.grid.storm.srm.types.*; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.exception.InvalidReserveSpaceOutputDataAttributesException; +import java.io.Serializable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ReserveSpaceOutputData implements Serializable, OutputData { - /** - * - */ - private static final long serialVersionUID = -9112229304313364826L; - - private static final Logger log = LoggerFactory.getLogger(ReserveSpaceOutputData.class); - - private TSizeInBytes spaceTotal = null; - private TSizeInBytes spaceGuaranteed = null; - private TLifeTimeInSeconds spaceLifetime = null; - private TSpaceToken spaceToken = null; - private TReturnStatus status = null; - private TRetentionPolicyInfo retentionPolicyInfo = null; + /** */ + private static final long serialVersionUID = -9112229304313364826L; + + private static final Logger log = LoggerFactory.getLogger(ReserveSpaceOutputData.class); - public ReserveSpaceOutputData(TReturnStatus status) { + private TSizeInBytes spaceTotal = null; + private TSizeInBytes spaceGuaranteed = null; + private TLifeTimeInSeconds spaceLifetime = null; + private TSpaceToken spaceToken = null; + private TReturnStatus status = null; + private TRetentionPolicyInfo retentionPolicyInfo = null; - this.status = status; - } + public ReserveSpaceOutputData(TReturnStatus status) { - public ReserveSpaceOutputData(TSizeInBytes spaceTotal, - TSizeInBytes spaceGuaranteed, TLifeTimeInSeconds spaceLifetime, - TSpaceToken spaceToken, TReturnStatus status) - throws InvalidReserveSpaceOutputDataAttributesException { + this.status = status; + } - boolean ok = status != null; + public ReserveSpaceOutputData( + TSizeInBytes spaceTotal, + TSizeInBytes spaceGuaranteed, + TLifeTimeInSeconds spaceLifetime, + TSpaceToken spaceToken, + TReturnStatus status) + throws InvalidReserveSpaceOutputDataAttributesException { - if (!ok) { - throw new InvalidReserveSpaceOutputDataAttributesException(spaceTotal, - spaceToken, status); - } + boolean ok = status != null; - this.spaceTotal = spaceTotal; - this.spaceGuaranteed = spaceGuaranteed; - this.spaceLifetime = spaceLifetime; - this.spaceToken = spaceToken; - this.status = status; - } + if (!ok) { + throw new InvalidReserveSpaceOutputDataAttributesException(spaceTotal, spaceToken, status); + } - /** - * Method that returns the number of files in the SRM request that are - * currently in progress. - */ - public TSpaceToken getSpaceToken() { + this.spaceTotal = spaceTotal; + this.spaceGuaranteed = spaceGuaranteed; + this.spaceLifetime = spaceLifetime; + this.spaceToken = spaceToken; + this.status = status; + } - return spaceToken; - } + /** Method that returns the number of files in the SRM request that are currently in progress. */ + public TSpaceToken getSpaceToken() { - /** - * Method that returns the number of files in the SRM request that are - * currently finished. - */ - public TSizeInBytes getGuaranteedSize() { + return spaceToken; + } - return spaceGuaranteed; - } + /** Method that returns the number of files in the SRM request that are currently finished. */ + public TSizeInBytes getGuaranteedSize() { - public TSizeInBytes getTotalSize() { + return spaceGuaranteed; + } - return spaceTotal; - } + public TSizeInBytes getTotalSize() { - public TLifeTimeInSeconds getLifeTimeInSeconds() { + return spaceTotal; + } - return spaceLifetime; - } + public TLifeTimeInSeconds getLifeTimeInSeconds() { - /** - * Method that return TReturnStatus status. - */ - public TReturnStatus getStatus() { + return spaceLifetime; + } - return status; - } + /** Method that return TReturnStatus status. */ + public TReturnStatus getStatus() { - /** - * Method that returns TRetentionPolicyInfo. - */ - public TRetentionPolicyInfo getRetentionPolicyInfo() { + return status; + } - return retentionPolicyInfo; - } + /** Method that returns TRetentionPolicyInfo. */ + public TRetentionPolicyInfo getRetentionPolicyInfo() { - public void setRetentionPolicyInfo(TRetentionPolicyInfo retentionPolicyInfo) { + return retentionPolicyInfo; + } - this.retentionPolicyInfo = retentionPolicyInfo; - } + public void setRetentionPolicyInfo(TRetentionPolicyInfo retentionPolicyInfo) { - public void setStatus(TReturnStatus status) { + this.retentionPolicyInfo = retentionPolicyInfo; + } - this.status = status; - } + public void setStatus(TReturnStatus status) { - /** - * Print - */ - public void print() { + this.status = status; + } - log.info("****SRM_SR_OutputData******"); - log.info("TSizeInBytesTotal: " + spaceTotal); - log.info("TSizeInBytesGuar: " + spaceGuaranteed); - log.info("LifeTimeInSeconds: " + spaceLifetime); - log.info("TSpaceToken: " + spaceToken); - log.info("TReturnStatus: " + status); + /** Print */ + public void print() { - } + log.info("****SRM_SR_OutputData******"); + log.info("TSizeInBytesTotal: " + spaceTotal); + log.info("TSizeInBytesGuar: " + spaceGuaranteed); + log.info("LifeTimeInSeconds: " + spaceLifetime); + log.info("TSpaceToken: " + spaceToken); + log.info("TReturnStatus: " + status); + } - // @Override - public boolean isSuccess() { + // @Override + public boolean isSuccess() { - // TODO Auto-generated method stub - return true; - } + // TODO Auto-generated method stub + return true; + } } diff --git a/src/main/java/it/grid/storm/synchcall/surl/ExpiredTokenException.java b/src/main/java/it/grid/storm/synchcall/surl/ExpiredTokenException.java index eebfc751..9d92efb2 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/ExpiredTokenException.java +++ b/src/main/java/it/grid/storm/synchcall/surl/ExpiredTokenException.java @@ -1,29 +1,24 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; public class ExpiredTokenException extends RuntimeException { - /** - * - */ - private static final long serialVersionUID = 1016481239170741542L; + /** */ + private static final long serialVersionUID = 1016481239170741542L; - public ExpiredTokenException() { - } + public ExpiredTokenException() {} - public ExpiredTokenException(String message) { - super(message); - } + public ExpiredTokenException(String message) { + super(message); + } - public ExpiredTokenException(Throwable cause) { - super(cause); - } - - public ExpiredTokenException(String message, Throwable cause) { - super(message, cause); - } + public ExpiredTokenException(Throwable cause) { + super(cause); + } + public ExpiredTokenException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStore.java b/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStore.java index 23ef5196..bb4f5c46 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStore.java +++ b/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStore.java @@ -1,16 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.RemovalListener; +import com.google.common.cache.RemovalNotification; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import it.grid.storm.authz.AuthzException; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; @@ -19,25 +23,16 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.RemovalListener; -import com.google.common.cache.RemovalNotification; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - public enum SURLStatusStore implements SURLStatusStoreIF { - INSTANCE; static class Entry { - public static Entry from(TRequestToken token, GridUserInterface usr, - Map statuses) { + public static Entry from( + TRequestToken token, GridUserInterface usr, Map statuses) { return new Entry(token, usr, statuses); } @@ -57,23 +52,16 @@ private Entry(TRequestToken tok, GridUserInterface usr, Map statusStore; - final EnumSet interestingStatuses = EnumSet.of(TStatusCode.SRM_SPACE_AVAILABLE, - TStatusCode.SRM_FILE_BUSY, TStatusCode.SRM_FILE_PINNED, TStatusCode.SRM_REQUEST_QUEUED, - TStatusCode.SRM_REQUEST_INPROGRESS); + final EnumSet interestingStatuses = + EnumSet.of( + TStatusCode.SRM_SPACE_AVAILABLE, + TStatusCode.SRM_FILE_BUSY, + TStatusCode.SRM_FILE_PINNED, + TStatusCode.SRM_REQUEST_QUEUED, + TStatusCode.SRM_REQUEST_INPROGRESS); final ScheduledExecutorService cacheStatsLoggerService = Executors.newScheduledThreadPool(1); private SURLStatusStore() { - statusStore = CacheBuilder.newBuilder() - .maximumSize(1000) - .recordStats() - .expireAfterWrite(10L, TimeUnit.MINUTES) - .concurrencyLevel(1) - .removalListener(new RemovalListener() { - - @Override - public void onRemoval(RemovalNotification notification) { - - if (notification.wasEvicted()) { - logger.debug("SURLStatusStore: Entry {} evicted. Cause: {}", notification.getValue(), - notification.getCause()); + statusStore = + CacheBuilder.newBuilder() + .maximumSize(1000) + .recordStats() + .expireAfterWrite(10L, TimeUnit.MINUTES) + .concurrencyLevel(1) + .removalListener( + new RemovalListener() { + + @Override + public void onRemoval(RemovalNotification notification) { + + if (notification.wasEvicted()) { + logger.debug( + "SURLStatusStore: Entry {} evicted. Cause: {}", + notification.getValue(), + notification.getCause()); + } + } + }) + .build(); + + Runnable cacheStatsLogger = + new Runnable() { + + @Override + public void run() { + + logger.debug( + "SURLStatusStore status: size={}, stats={}", + statusStore.size(), + statusStore.stats()); } - } - }) - .build(); - - Runnable cacheStatsLogger = new Runnable() { - - @Override - public void run() { - - logger.debug("SURLStatusStore status: size={}, stats={}", statusStore.size(), - statusStore.stats()); - - } - }; + }; cacheStatsLoggerService.scheduleWithFixedDelay(cacheStatsLogger, 1L, 1L, TimeUnit.MINUTES); } @@ -152,7 +154,6 @@ public int abortAllRequestForSURL(TSURL surl) { if (e.getValue().surlStatuses.isEmpty()) { toBeRemoved.add(e.getKey()); - } } } @@ -171,8 +172,9 @@ public void authzCheck(Entry e, GridUserInterface user) { if (user != null && e.user != null) { if (!user.getDn().equals(e.user.getDn())) { - String errorMsg = String.format("User %s is not authorized to access request with token %s", - user.getDn(), e.token); + String errorMsg = + String.format( + "User %s is not authorized to access request with token %s", user.getDn(), e.token); throw new AuthzException(errorMsg); } @@ -184,13 +186,21 @@ public void authzCheck(Entry e, GridUserInterface user) { } @Override - public int checkedUpdate(GridUserInterface user, List surls, - TStatusCode requiredStatusCode, TStatusCode newStatusCode, String explanation) { + public int checkedUpdate( + GridUserInterface user, + List surls, + TStatusCode requiredStatusCode, + TStatusCode newStatusCode, + String explanation) { logger.debug( "checkedUpdate: user={}, surls={}, requiredStatusCode={}, " + "newStatusCode={}, explanation={}", - user, surls, requiredStatusCode, newStatusCode, explanation); + user, + surls, + requiredStatusCode, + newStatusCode, + explanation); int updateCount = 0; for (Map.Entry e : statusStore.asMap().entrySet()) { @@ -211,14 +221,15 @@ public int checkedUpdate(GridUserInterface user, List surls, logger.warn( "checkedUpdate: status not updated for surl {}. " + "inCacheStatus does not match requiredStatus. {} != {}", - s, inCacheStatus, requiredStatusCode); + s, + inCacheStatus, + requiredStatusCode); } else { entry.surlStatuses.put(s, new TReturnStatus(newStatusCode, explanation)); updateCount++; } - } } @@ -234,15 +245,22 @@ public int checkedUpdate(GridUserInterface user, List surls, } @Override - public int checkedUpdate(TRequestToken requestToken, List surls, - TStatusCode requiredStatusCode, TStatusCode newStatusCode, String explanation) - throws UnknownTokenException, ExpiredTokenException, - UnknownSurlException { + public int checkedUpdate( + TRequestToken requestToken, + List surls, + TStatusCode requiredStatusCode, + TStatusCode newStatusCode, + String explanation) + throws UnknownTokenException, ExpiredTokenException, UnknownSurlException { logger.debug( "checkedUpdate: token={}, surls={}, requiredStatusCode={}, " + "newStatusCode={}, explanation={}", - requestToken, surls, requiredStatusCode, newStatusCode, explanation); + requestToken, + surls, + requiredStatusCode, + newStatusCode, + explanation); Entry e = statusStore.getIfPresent(requestToken); @@ -265,7 +283,9 @@ public int checkedUpdate(TRequestToken requestToken, List surls, logger.warn( "checkedUpdate: status not updated for surl {}. " + "inCacheStatus does not match requiredStatus. {} != {}", - s, inCacheStatus, requiredStatusCode); + s, + inCacheStatus, + requiredStatusCode); } else { e.surlStatuses.put(s, new TReturnStatus(newStatusCode, explanation)); updateCount++; @@ -278,7 +298,6 @@ public int checkedUpdate(TRequestToken requestToken, List surls, } return updateCount; - } private boolean entryUserMatchesRequestUser(Entry e, GridUserInterface user) { @@ -291,8 +310,8 @@ private boolean entryUserMatchesRequestUser(Entry e, GridUserInterface user) { } @Override - public Map getPinnedSURLsForUser(GridUserInterface user, - TRequestToken token, List surls) { + public Map getPinnedSURLsForUser( + GridUserInterface user, TRequestToken token, List surls) { Map statusMap = Maps.newHashMap(); @@ -342,7 +361,6 @@ public Map getSurlPerTokenStatuses(TSURL surl) if (e.getValue().surlStatuses.containsKey(surl)) { statusMap.put(e.getValue().token, e.getValue().surlStatuses.get(surl)); } - } return statusMap; @@ -358,12 +376,11 @@ public Map getSurlStatuses(GridUserInterface user, TReques return e.surlStatuses; } return null; - } @Override - public Map getSurlStatuses(GridUserInterface user, TRequestToken token, - List surls) { + public Map getSurlStatuses( + GridUserInterface user, TRequestToken token, List surls) { return getSurlStatuses(user, token); } @@ -404,8 +421,8 @@ private boolean hasInterestingStatus(Map surlStatuses) { } @Override - public void store(TRequestToken requestToken, GridUserInterface user, - Map surlStatuses) { + public void store( + TRequestToken requestToken, GridUserInterface user, Map surlStatuses) { Entry e = Entry.from(requestToken, user, surlStatuses); @@ -447,7 +464,6 @@ public int update(TRequestToken requestToken, List surls, TReturnStatus s } return updateCount; - } @Override @@ -482,5 +498,4 @@ public int update(TRequestToken requestToken, TSURL surl, TReturnStatus status) return update(requestToken, Arrays.asList(surl), status); } - } diff --git a/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStoreIF.java b/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStoreIF.java index ecc675d2..ae9a590e 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStoreIF.java +++ b/src/main/java/it/grid/storm/synchcall/surl/SURLStatusStoreIF.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; @@ -9,7 +8,6 @@ import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.srm.types.TSURL; import it.grid.storm.srm.types.TStatusCode; - import java.util.Collection; import java.util.List; import java.util.Map; @@ -18,32 +16,40 @@ public interface SURLStatusStoreIF { public int abortAllRequestForSURL(TSURL surl); - public int checkedUpdate(GridUserInterface user, List surls, - TStatusCode requiredStatusCode, TStatusCode newStatusCode, String explanation); - - public int checkedUpdate(TRequestToken requestToken, List surls, - TStatusCode requiredStatusCode, TStatusCode newStatusCode, String explanation) - throws UnknownTokenException, ExpiredTokenException, - UnknownSurlException; + public int checkedUpdate( + GridUserInterface user, + List surls, + TStatusCode requiredStatusCode, + TStatusCode newStatusCode, + String explanation); + + public int checkedUpdate( + TRequestToken requestToken, + List surls, + TStatusCode requiredStatusCode, + TStatusCode newStatusCode, + String explanation) + throws UnknownTokenException, ExpiredTokenException, UnknownSurlException; public Map getSurlPerTokenStatuses(TSURL surl) throws UnknownSurlException; public Map getSurlStatuses(GridUserInterface user, TRequestToken token); - public Map getSurlStatuses(GridUserInterface user, TRequestToken token, - List surls); + public Map getSurlStatuses( + GridUserInterface user, TRequestToken token, List surls); public Collection getSurlStatuses(GridUserInterface user, TSURL surl) throws UnknownSurlException; - public Map getPinnedSURLsForUser(GridUserInterface user, - TRequestToken token, List surls); + public Map getPinnedSURLsForUser( + GridUserInterface user, TRequestToken token, List surls); public boolean hasEntryForToken(TRequestToken requestToken); - public void store(TRequestToken requestToken, GridUserInterface user, - Map surlStatuses) throws TokenDuplicationException; + public void store( + TRequestToken requestToken, GridUserInterface user, Map surlStatuses) + throws TokenDuplicationException; public int update(TRequestToken requestToken, List surls, TReturnStatus status) throws UnknownTokenException, ExpiredTokenException, UnknownSurlException; @@ -52,5 +58,4 @@ public int update(TRequestToken requestToken, List surls, TReturnStatus s public int update(TRequestToken requestToken, TSURL surl, TReturnStatus status) throws UnknownTokenException, ExpiredTokenException, UnknownSurlException; - } diff --git a/src/main/java/it/grid/storm/synchcall/surl/TokenDuplicationException.java b/src/main/java/it/grid/storm/synchcall/surl/TokenDuplicationException.java index e0aa9ead..f77072cb 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/TokenDuplicationException.java +++ b/src/main/java/it/grid/storm/synchcall/surl/TokenDuplicationException.java @@ -1,34 +1,32 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; public class TokenDuplicationException extends Exception { - private static final long serialVersionUID = 324848807934745061L; + private static final long serialVersionUID = 324848807934745061L; - public TokenDuplicationException() { + public TokenDuplicationException() { - // TODO Auto-generated constructor stub - } + // TODO Auto-generated constructor stub + } - public TokenDuplicationException(String message) { + public TokenDuplicationException(String message) { - super(message); - // TODO Auto-generated constructor stub - } + super(message); + // TODO Auto-generated constructor stub + } - public TokenDuplicationException(Throwable cause) { + public TokenDuplicationException(Throwable cause) { - super(cause); - // TODO Auto-generated constructor stub - } + super(cause); + // TODO Auto-generated constructor stub + } - public TokenDuplicationException(String message, Throwable cause) { - - super(message, cause); - // TODO Auto-generated constructor stub - } + public TokenDuplicationException(String message, Throwable cause) { + super(message, cause); + // TODO Auto-generated constructor stub + } } diff --git a/src/main/java/it/grid/storm/synchcall/surl/UnknownSurlException.java b/src/main/java/it/grid/storm/synchcall/surl/UnknownSurlException.java index 753e98d0..1adc0797 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/UnknownSurlException.java +++ b/src/main/java/it/grid/storm/synchcall/surl/UnknownSurlException.java @@ -1,31 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; public class UnknownSurlException extends Exception { - /** - * - */ - private static final long serialVersionUID = 4930389310396856355L; + /** */ + private static final long serialVersionUID = 4930389310396856355L; - public UnknownSurlException() { - } + public UnknownSurlException() {} - public UnknownSurlException(String message) { - super(message); - } + public UnknownSurlException(String message) { + super(message); + } - public UnknownSurlException(Throwable cause) { + public UnknownSurlException(Throwable cause) { - super(cause); - } + super(cause); + } - public UnknownSurlException(String message, Throwable cause) { - - super(message, cause); - } + public UnknownSurlException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/synchcall/surl/UnknownTokenException.java b/src/main/java/it/grid/storm/synchcall/surl/UnknownTokenException.java index bf7133f9..34c2c99a 100644 --- a/src/main/java/it/grid/storm/synchcall/surl/UnknownTokenException.java +++ b/src/main/java/it/grid/storm/synchcall/surl/UnknownTokenException.java @@ -1,29 +1,26 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.synchcall.surl; public class UnknownTokenException extends RuntimeException { - private static final long serialVersionUID = -9056770694204136172L; + private static final long serialVersionUID = -9056770694204136172L; - public UnknownTokenException() { - } + public UnknownTokenException() {} - public UnknownTokenException(String message) { + public UnknownTokenException(String message) { - super(message); - } + super(message); + } - public UnknownTokenException(Throwable cause) { + public UnknownTokenException(Throwable cause) { - super(cause); - } + super(cause); + } - public UnknownTokenException(String message, Throwable cause) { - - super(message, cause); - } + public UnknownTokenException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/TapeRecallCatalog.java b/src/main/java/it/grid/storm/tape/recalltable/TapeRecallCatalog.java index 04345b8f..10e1650b 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/TapeRecallCatalog.java +++ b/src/main/java/it/grid/storm/tape/recalltable/TapeRecallCatalog.java @@ -1,17 +1,13 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable; import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.BOL; import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.PTG; import com.google.common.collect.Lists; - import it.grid.storm.asynch.Suspendedable; import it.grid.storm.catalogs.BoLPersistentChunkData; import it.grid.storm.catalogs.PersistentChunkData; @@ -22,10 +18,6 @@ import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -34,7 +26,8 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TapeRecallCatalog { @@ -81,7 +74,8 @@ public int getNumberTaskInProgress() throws DataAccessException { } catch (DataAccessException e) { log.error( "Unable to retrieve the number of tasks currently in progress. DataAccessException: {}", - e.getMessage(), e); + e.getMessage(), + e); throw e; } return result; @@ -100,7 +94,8 @@ public int getNumberTaskInProgress(String voName) throws DataAccessException { } catch (DataAccessException e) { log.error( "Unable to retrieve the number of tasks currently in progress. DataAccessException: {}", - e.getMessage(), e); + e.getMessage(), + e); throw e; } return result; @@ -117,8 +112,10 @@ public int getNumberTaskQueued() throws DataAccessException { try { result = tapeRecallDAO.getNumberQueued(); } catch (DataAccessException e) { - log.error("Unable to retrieve the number of tasks queued. DataAccessException: {}", - e.getMessage(), e); + log.error( + "Unable to retrieve the number of tasks queued. DataAccessException: {}", + e.getMessage(), + e); throw e; } return result; @@ -135,15 +132,16 @@ public int getNumberTaskQueued(String voName) throws DataAccessException { try { result = tapeRecallDAO.getNumberQueued(voName); } catch (DataAccessException e) { - log.error("Unable to retrieve the number of tasks queued. DataAccessException: {}", - e.getMessage(), e); + log.error( + "Unable to retrieve the number of tasks queued. DataAccessException: {}", + e.getMessage(), + e); throw e; } return result; } /** - * * Determines how many task rows have a queued state and their deferred start time is elapsed * * @return @throws DataAccessException @@ -156,7 +154,8 @@ public int getReadyForTakeOver() throws DataAccessException { } catch (DataAccessException e) { log.error( "Unable to retrieve the number of tasks ready for the take-over. DataAccessException: {}", - e.getMessage(), e); + e.getMessage(), + e); throw e; } return result; @@ -176,15 +175,14 @@ public int getReadyForTakeOver(String voName) throws DataAccessException { } catch (DataAccessException e) { log.error( "Unable to retrieve the number of tasks ready for the take-over. DataAccessException: {}", - e.getMessage(), e); + e.getMessage(), + e); throw e; } return result; } - /** - * @param taskId @param requestToken @return @throws DataAccessException - */ + /** @param taskId @param requestToken @return @throws DataAccessException */ public TapeRecallTO getTask(UUID taskId, String requestToken) throws DataAccessException { return tapeRecallDAO.getTask(taskId, requestToken); @@ -194,32 +192,26 @@ public TapeRecallTO getTask(UUID taskId, String requestToken) throws DataAccessE * Verifies that a recall task with the given taskId and request token exists on the database * * @param taskId @param requestToken @return true if the recall task exists @throws - * DataAccessException + * DataAccessException */ public boolean existsTask(UUID taskId, String requestToken) throws DataAccessException { return tapeRecallDAO.existsTask(taskId, requestToken); } - /** - * @param groupTaskId @return @throws DataAccessException - */ + /** @param groupTaskId @return @throws DataAccessException */ public List getGroupTasks(UUID groupTaskId) throws DataAccessException { return tapeRecallDAO.getGroupTasks(groupTaskId); } - /** - * @param groupTaskId @return @throws DataAccessException - */ + /** @param groupTaskId @return @throws DataAccessException */ public boolean existsGroupTask(UUID groupTaskId) throws DataAccessException { return tapeRecallDAO.existsGroupTask(groupTaskId); } - /** - * @param maxSize The max number of purged requests @return the number of purged requests - */ + /** @param maxSize The max number of purged requests @return the number of purged requests */ public int purgeCatalog(long expirationTime, int maxSize) { log.debug("purging '{}' completed tasks older than '{}' seconds ...", maxSize, expirationTime); @@ -248,9 +240,7 @@ public List takeoverNTasksWithDoubles(int numberOfTaks) { return taskList; } - /** - * @param numberOfTasks @return - */ + /** @param numberOfTasks @return */ public List getAllInProgressTasks(int numberOfTaks) { List taskList; @@ -268,9 +258,7 @@ public List getAllInProgressTasks(int numberOfTaks) { return taskList; } - /** - * @return - */ + /** @return */ public TapeRecallTO takeoverTask() { TapeRecallTO task = null; @@ -282,9 +270,7 @@ public TapeRecallTO takeoverTask() { return task; } - /** - * @param voName @return - */ + /** @param voName @return */ public TapeRecallTO takeoverTask(String voName) { TapeRecallTO task = null; @@ -296,9 +282,7 @@ public TapeRecallTO takeoverTask(String voName) { return task; } - /** - * @param numberOfTaks @param voName @return - */ + /** @param numberOfTaks @param voName @return */ public List takeoverTasks(int numberOfTaks, String voName) { List taskList = Lists.newArrayList(); @@ -314,7 +298,7 @@ public List takeoverTasks(int numberOfTaks, String voName) { * Method used by PtGChunk and BoLChunk to request the recall of a file * * @param chunk @param voName @param absoluteFileName @return the id of the recall task in charge - * of recall the file @throws DataAccessException + * of recall the file @throws DataAccessException */ public UUID insertTask(Suspendedable chunk, String voName, String absoluteFileName) throws DataAccessException { @@ -352,21 +336,24 @@ public UUID insertNewTask(TapeRecallTO task) throws DataAccessException { * setting the status and the group id to the one of the found row */ UUID groupTaskId = - tapeRecallDAO.insertCloneTask(task, new int[] {TapeRecallStatus.QUEUED.getStatusId(), - TapeRecallStatus.IN_PROGRESS.getStatusId()}, newGroupTaskId); + tapeRecallDAO.insertCloneTask( + task, + new int[] { + TapeRecallStatus.QUEUED.getStatusId(), TapeRecallStatus.IN_PROGRESS.getStatusId() + }, + newGroupTaskId); if (newGroupTaskId != groupTaskId) { log.debug( "Task with taskId {} of request with token {} has benn added to an existent group: {}", - task.getTaskId(), task.getRequestToken().getValue(), groupTaskId); + task.getTaskId(), + task.getRequestToken().getValue(), + groupTaskId); } return groupTaskId; } - /** - * - * @param chunkData @return @throws DataAccessException - */ + /** @param chunkData @return @throws DataAccessException */ private TapeRecallTO getTaskFromChunk(RequestData chunkData) throws DataAccessException { TapeRecallTO task = new TapeRecallTO(); @@ -408,9 +395,11 @@ public boolean changeGroupTaskStatus(UUID groupTaskId, TapeRecallStatus status, throws DataAccessException { synchronized (this) { - if (!tapeRecallDAO.setGroupTaskStatus(groupTaskId, status.getStatusId(), timestamp)) { - log.debug("Updating to status {} at {} didn't affect groupTask {}", status, timestamp, + log.debug( + "Updating to status {} at {} didn't affect groupTask {}", + status, + timestamp, groupTaskId); return false; } @@ -426,11 +415,9 @@ public boolean changeGroupTaskStatus(UUID groupTaskId, TapeRecallStatus status, } } - /** - * @param taskId @param recallTaskStatus @throws IllegalArgumentException - */ - private void updateChuncksStatus(Collection chunkBucket, - TapeRecallStatus recallTaskStatus) { + /** @param taskId @param recallTaskStatus @throws IllegalArgumentException */ + private void updateChuncksStatus( + Collection chunkBucket, TapeRecallStatus recallTaskStatus) { if (chunkBucket == null || chunkBucket.isEmpty() || recallTaskStatus == null) { log.error("Unable to perform the final status update. Provided invalid arguments"); @@ -441,5 +428,4 @@ private void updateChuncksStatus(Collection chunkBucket, chunk.completeRequest(recallTaskStatus); } } - } diff --git a/src/main/java/it/grid/storm/tape/recalltable/TapeRecallException.java b/src/main/java/it/grid/storm/tape/recalltable/TapeRecallException.java index 40f09783..713f5ba8 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/TapeRecallException.java +++ b/src/main/java/it/grid/storm/tape/recalltable/TapeRecallException.java @@ -1,29 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable; -/** - * @author ritz - * - */ +/** @author ritz */ public class TapeRecallException extends Exception { - /** - * - */ + /** */ private static final long serialVersionUID = -7422152723748929996L; - /** - * @param string - */ + /** @param string */ public TapeRecallException(String msg) { super(msg); } - } diff --git a/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusLogic.java b/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusLogic.java index 3474ae2d..12b42593 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusLogic.java +++ b/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusLogic.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; @@ -14,91 +13,105 @@ import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.tape.recalltable.TapeRecallCatalog; import it.grid.storm.tape.recalltable.TapeRecallException; - import java.util.Date; import java.util.UUID; - import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class PutTapeRecallStatusLogic { - private static final Logger log = LoggerFactory - .getLogger(PutTapeRecallStatusLogic.class); - - /** - * @param requestToken - * @param stori - * @return - * @throws TapeRecallException - */ - public static Response serveRequest(String requestToken, StoRI stori) - throws TapeRecallException { - - LocalFile localFile = stori.getLocalFile(); - boolean fileOnDisk; - - try { - fileOnDisk = localFile.isOnDisk(); - } catch (FSException e) { - log.error("Unable to test file {} presence on disk. FSException {}" , localFile.getAbsolutePath() , e.getMessage() , e); - throw new TapeRecallException("Error checking file existence"); - } - - if (!fileOnDisk) { - return Response.ok(false, TEXT_PLAIN_TYPE).status(200).build(); - } - - if (!stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { - // tape not enable for StoRI filesystem, nothing to do - return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); - } - - String pfn = localFile.getAbsolutePath(); - UUID taskId = TapeRecallTO.buildTaskIdFromFileName(pfn); - TapeRecallCatalog rtCat = new TapeRecallCatalog(); - boolean exists = false; - try { - exists = rtCat.existsTask(taskId, requestToken); - } catch (DataAccessException e) { - log.error("Error checking existence of a recall task for taskId={} requestToken={}. DataAccessException: {}" , taskId , requestToken , e.getMessage() , e); - throw new TapeRecallException("Error reading from tape recall table"); - } - if (!exists) { - // no recall tasks for this file, nothing to do - return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); - } - - TapeRecallTO task; - try { - task = rtCat.getTask(taskId, requestToken); - } catch (DataAccessException e) { - log.error("Unable to update task recall status because unable to retrieve groupTaskId for token {}. DataAccessException: {}", requestToken , e.getMessage(),e); - throw new TapeRecallException("Error reading from tape recall table"); - } - - if (TapeRecallStatus.getRecallTaskStatus(task.getStatusId()).equals(SUCCESS)) { - // status already updated, nothing to do - return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); - } - - UUID groupTaskId = task.getGroupTaskId(); - boolean updated; - try { - updated = rtCat.changeGroupTaskStatus(groupTaskId, SUCCESS, new Date()); - } catch (DataAccessException e) { - log.error("Unable to update task recall status for token {} with groupTaskId={}. DataAccessException : {}", requestToken , groupTaskId , e.getMessage() , e); - throw new TapeRecallException("Error updating tape recall table"); - } - if (updated) { - log.info("Task status set to SUCCESS. groupTaskId={} requestToken={} pfn={}" , groupTaskId , requestToken , pfn); - } - return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); - } + private static final Logger log = LoggerFactory.getLogger(PutTapeRecallStatusLogic.class); + + /** + * @param requestToken + * @param stori + * @return + * @throws TapeRecallException + */ + public static Response serveRequest(String requestToken, StoRI stori) throws TapeRecallException { + + LocalFile localFile = stori.getLocalFile(); + boolean fileOnDisk; + + try { + fileOnDisk = localFile.isOnDisk(); + } catch (FSException e) { + log.error( + "Unable to test file {} presence on disk. FSException {}", + localFile.getAbsolutePath(), + e.getMessage(), + e); + throw new TapeRecallException("Error checking file existence"); + } + + if (!fileOnDisk) { + return Response.ok(false, TEXT_PLAIN_TYPE).status(200).build(); + } + + if (!stori.getVirtualFileSystem().getStorageClassType().isTapeEnabled()) { + // tape not enable for StoRI filesystem, nothing to do + return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); + } + + String pfn = localFile.getAbsolutePath(); + UUID taskId = TapeRecallTO.buildTaskIdFromFileName(pfn); + TapeRecallCatalog rtCat = new TapeRecallCatalog(); + boolean exists = false; + try { + exists = rtCat.existsTask(taskId, requestToken); + } catch (DataAccessException e) { + log.error( + "Error checking existence of a recall task for taskId={} requestToken={}. DataAccessException: {}", + taskId, + requestToken, + e.getMessage(), + e); + throw new TapeRecallException("Error reading from tape recall table"); + } + if (!exists) { + // no recall tasks for this file, nothing to do + return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); + } + + TapeRecallTO task; + try { + task = rtCat.getTask(taskId, requestToken); + } catch (DataAccessException e) { + log.error( + "Unable to update task recall status because unable to retrieve groupTaskId for token {}. DataAccessException: {}", + requestToken, + e.getMessage(), + e); + throw new TapeRecallException("Error reading from tape recall table"); + } + + if (TapeRecallStatus.getRecallTaskStatus(task.getStatusId()).equals(SUCCESS)) { + // status already updated, nothing to do + return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); + } + + UUID groupTaskId = task.getGroupTaskId(); + boolean updated; + try { + updated = rtCat.changeGroupTaskStatus(groupTaskId, SUCCESS, new Date()); + } catch (DataAccessException e) { + log.error( + "Unable to update task recall status for token {} with groupTaskId={}. DataAccessException : {}", + requestToken, + groupTaskId, + e.getMessage(), + e); + throw new TapeRecallException("Error updating tape recall table"); + } + if (updated) { + log.info( + "Task status set to SUCCESS. groupTaskId={} requestToken={} pfn={}", + groupTaskId, + requestToken, + pfn); + } + return Response.ok(true, TEXT_PLAIN_TYPE).status(200).build(); + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusValidator.java b/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusValidator.java index 807d5ac9..f6952552 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusValidator.java +++ b/src/main/java/it/grid/storm/tape/recalltable/model/PutTapeRecallStatusValidator.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; @@ -10,130 +9,126 @@ import it.grid.storm.srm.types.TSURL; import it.grid.storm.util.SURLValidator; import it.grid.storm.util.TokenValidator; - import java.util.StringTokenizer; - import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PutTapeRecallStatusValidator implements RequestValidator { - private static final Logger log = LoggerFactory - .getLogger(PutTapeRecallStatusValidator.class); - - private String requestToken = null; - private StoRI stori = null; - private String inputString = null; - private Response validationResponse = null; + private static final Logger log = LoggerFactory.getLogger(PutTapeRecallStatusValidator.class); - public PutTapeRecallStatusValidator(String inputString) { + private String requestToken = null; + private StoRI stori = null; + private String inputString = null; + private Response validationResponse = null; - this.inputString = inputString; - } + public PutTapeRecallStatusValidator(String inputString) { - /** - * Parse and validate input. - *

- * If this method returns true the input data can be retrieved - * with the methods: {@link #getRequestToken()} and {@link #getStoRI()}. - *

- * If this method returns false the response can be retrieved - * with the method {@link #getResponse()}. - * - * @return true for successful validation process, - * false otherwise. - */ - public boolean validate() { + this.inputString = inputString; + } - StringTokenizer tokenizer = new StringTokenizer(inputString, "\n"); + /** + * Parse and validate input. + * + *

If this method returns true the input data can be retrieved with the methods: + * {@link #getRequestToken()} and {@link #getStoRI()}. + * + *

If this method returns false the response can be retrieved with the method + * {@link #getResponse()}. + * + * @return true for successful validation process, false otherwise. + */ + public boolean validate() { - if (tokenizer.countTokens() != 2) { + StringTokenizer tokenizer = new StringTokenizer(inputString, "\n"); - log.trace("putTaskStatus() - input error"); + if (tokenizer.countTokens() != 2) { - validationResponse = Response.status(400).build(); - return false; + log.trace("putTaskStatus() - input error"); - } + validationResponse = Response.status(400).build(); + return false; + } - String requestTokenInput = tokenizer.nextToken(); - String surlInput = tokenizer.nextToken(); + String requestTokenInput = tokenizer.nextToken(); + String surlInput = tokenizer.nextToken(); - if ((!requestTokenInput.startsWith("requestToken=")) - || (!surlInput.startsWith("surl="))) { + if ((!requestTokenInput.startsWith("requestToken=")) || (!surlInput.startsWith("surl="))) { - log.trace("putTaskStatus() - input error"); + log.trace("putTaskStatus() - input error"); - validationResponse = Response.status(400).build(); - return false; + validationResponse = Response.status(400).build(); + return false; + } - } + requestToken = requestTokenInput.substring(requestTokenInput.indexOf('=') + 1); + String surlString = surlInput.substring(surlInput.indexOf('=') + 1); - requestToken = requestTokenInput - .substring(requestTokenInput.indexOf('=') + 1); - String surlString = surlInput.substring(surlInput.indexOf('=') + 1); + if ((requestToken.length() == 0) || (surlString.length() == 0)) { - if ((requestToken.length() == 0) || (surlString.length() == 0)) { + log.trace("putTaskStatus() - input error"); - log.trace("putTaskStatus() - input error"); + validationResponse = Response.status(400).build(); + return false; + } - validationResponse = Response.status(400).build(); - return false; + if (!TokenValidator.valid(requestToken)) { + validationResponse = + Response.status(400).entity("Invalid token: " + requestToken + " \n\n").build(); + return false; + } - } + if (!validateSurl(surlString)) { + return false; + } - if(!TokenValidator.valid(requestToken)){ - validationResponse = Response.status(400).entity("Invalid token: " + requestToken +" \n\n").build(); - return false; - } - - if (!validateSurl(surlString)) { - return false; - } + return true; + } - return true; - } + public String getRequestToken() { - public String getRequestToken() { + return requestToken; + } - return requestToken; - } + public StoRI getStoRI() { - public StoRI getStoRI() { + return stori; + } - return stori; - } + public Response getResponse() { - public Response getResponse() { + return validationResponse; + } - return validationResponse; - } + private boolean validateSurl(String surlString) { - private boolean validateSurl(String surlString) { + TSURL surl; - TSURL surl; + if (!SURLValidator.valid(surlString)) { + validationResponse = + Response.status(400).entity("Invalid surl: " + surlString + "\n\n").build(); + return false; + } - if(!SURLValidator.valid(surlString)){ - validationResponse = Response.status(400).entity("Invalid surl: " + surlString + "\n\n").build(); - return false; - } - - try { + try { - surl = TSURL.makeFromStringValidate(surlString); + surl = TSURL.makeFromStringValidate(surlString); - } catch (InvalidTSURLAttributesException e) { - validationResponse = Response.status(400).build(); - return false; - } - try { - stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); - } catch (Exception e) { - log.warn("Unable to build a stori for surl {} UnapprochableSurlException: {}" , surl , e.getMessage(),e); - return false; - } - return true; - } + } catch (InvalidTSURLAttributesException e) { + validationResponse = Response.status(400).build(); + return false; + } + try { + stori = NamespaceDirector.getNamespace().resolveStoRIbySURL(surl); + } catch (Exception e) { + log.warn( + "Unable to build a stori for surl {} UnapprochableSurlException: {}", + surl, + e.getMessage(), + e); + return false; + } + return true; + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/model/RequestValidator.java b/src/main/java/it/grid/storm/tape/recalltable/model/RequestValidator.java index e5a85d15..c3476a34 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/model/RequestValidator.java +++ b/src/main/java/it/grid/storm/tape/recalltable/model/RequestValidator.java @@ -1,11 +1,10 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; @FunctionalInterface public interface RequestValidator { - public boolean validate(); + public boolean validate(); } diff --git a/src/main/java/it/grid/storm/tape/recalltable/model/TapeRecallStatus.java b/src/main/java/it/grid/storm/tape/recalltable/model/TapeRecallStatus.java index 7cf139f8..d145345f 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/model/TapeRecallStatus.java +++ b/src/main/java/it/grid/storm/tape/recalltable/model/TapeRecallStatus.java @@ -1,73 +1,76 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; import com.google.common.base.Preconditions; public enum TapeRecallStatus { + SUCCESS, + QUEUED, + IN_PROGRESS, + ERROR, + ABORTED, + UNDEFINED; - SUCCESS, QUEUED, IN_PROGRESS, ERROR, ABORTED, UNDEFINED; + public static TapeRecallStatus getRecallTaskStatus(int statusId) { - public static TapeRecallStatus getRecallTaskStatus(int statusId) { + Preconditions.checkArgument(statusId < TapeRecallStatus.values().length); + return TapeRecallStatus.values()[statusId]; + } - Preconditions.checkArgument(statusId < TapeRecallStatus.values().length); - return TapeRecallStatus.values()[statusId]; - } + public int getStatusId() { - public int getStatusId() { + return ordinal(); + } - return ordinal(); - } + /** + * @param tapeRecallStatus + * @return + */ + public boolean isFinalStatus() { - /** - * @param tapeRecallStatus - * @return - */ - public boolean isFinalStatus() { + return equals(SUCCESS) || equals(ERROR) || equals(ABORTED); + } - return equals(SUCCESS) || equals(ERROR) || equals(ABORTED); - } + /** + * Returns true if the provided status id refers to a status that does not allows state + * transitions + * + * @param statusId + * @return + */ + public static boolean isFinalStatus(int statusId) { - /** - * Returns true if the provided status id refers to a status that does not allows state - * transitions - * - * @param statusId - * @return - */ - public static boolean isFinalStatus(int statusId) { + return getRecallTaskStatus(statusId).isFinalStatus(); + } - return getRecallTaskStatus(statusId).isFinalStatus(); - } + /** + * @param otherStatusId + * @return + */ + public boolean precedes(int otherStatusId) { - /** - * @param otherStatusId - * @return - */ - public boolean precedes(int otherStatusId) { + return precedes(getRecallTaskStatus(otherStatusId)); + } - return precedes(getRecallTaskStatus(otherStatusId)); - } + /** + * Determines if there is a sequence of transitions that can bring from this status to the given + * status parameter + * + *

NOTE: valid transitions are : queued -> inProgress inProgress -> \ + * + * @param otherStatus + * @return + */ + public boolean precedes(TapeRecallStatus otherStatus) { - /** - * Determines if there is a sequence of transitions that can bring from this status to the given - * status parameter - * - * NOTE: valid transitions are : queued -> inProgress inProgress -> \ - * - * @param otherStatus - * @return - */ - public boolean precedes(TapeRecallStatus otherStatus) { - - if (equals(otherStatus) || equals(UNDEFINED) || otherStatus.equals(UNDEFINED)) { - return false; - } - if (equals(QUEUED)) { - return true; - } - return equals(IN_PROGRESS) && otherStatus.isFinalStatus(); - } + if (equals(otherStatus) || equals(UNDEFINED) || otherStatus.equals(UNDEFINED)) { + return false; + } + if (equals(QUEUED)) { + return true; + } + return equals(IN_PROGRESS) && otherStatus.isFinalStatus(); + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidator.java b/src/main/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidator.java index 93d7b379..18b5ccb0 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidator.java +++ b/src/main/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidator.java @@ -1,73 +1,69 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; import it.grid.storm.tape.recalltable.resources.TaskInsertRequest; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.Iterator; import java.util.Set; - import javax.validation.ConstraintViolation; import javax.validation.Validation; import javax.validation.Validator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Custom validation class. Jersey validation works but don't show the validation message. The * exception mapper cannot be implemented cause of a Jersey bug: * https://java.net/jira/browse/JERSEY-3153 This class can be used to validate a request object. - **/ + */ public class TaskInsertRequestValidator implements RequestValidator { - private static final Logger log = LoggerFactory.getLogger(TaskInsertRequestValidator.class); - - private Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); + private static final Logger log = LoggerFactory.getLogger(TaskInsertRequestValidator.class); - private TaskInsertRequest request; - private String errorMessage; + private Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); - public TaskInsertRequestValidator(TaskInsertRequest request) { - this.request = request; - this.errorMessage = ""; - } + private TaskInsertRequest request; + private String errorMessage; - /** - * Validate request object. - * - * @return @true if request is valid, @false otherwise and an error message can be retrieved - * with @getErrorMessage. - */ - @Override - public boolean validate() { + public TaskInsertRequestValidator(TaskInsertRequest request) { + this.request = request; + this.errorMessage = ""; + } - log.debug("Validating {}", request); - Set> constraintViolations = - validator.validate(request); - if (constraintViolations.isEmpty()) { - log.debug("Request {} is valid", request); - return true; - } - log.debug("Request is invalid, {} violation(s) found: {}", constraintViolations.size(), - constraintViolations); - StringBuilder b = new StringBuilder(); - Iterator> itr = constraintViolations.iterator(); - while (itr.hasNext()) { - ConstraintViolation constraint = itr.next(); - b.append(constraint.getMessage()); - if (itr.hasNext()) { - b.append(", "); - } - } - errorMessage = b.toString(); - return false; - } + /** + * Validate request object. + * + * @return @true if request is valid, @false otherwise and an error message can be retrieved + * with @getErrorMessage. + */ + @Override + public boolean validate() { - public String getErrorMessage() { - return errorMessage; - } + log.debug("Validating {}", request); + Set> constraintViolations = validator.validate(request); + if (constraintViolations.isEmpty()) { + log.debug("Request {} is valid", request); + return true; + } + log.debug( + "Request is invalid, {} violation(s) found: {}", + constraintViolations.size(), + constraintViolations); + StringBuilder b = new StringBuilder(); + Iterator> itr = constraintViolations.iterator(); + while (itr.hasNext()) { + ConstraintViolation constraint = itr.next(); + b.append(constraint.getMessage()); + if (itr.hasNext()) { + b.append(", "); + } + } + errorMessage = b.toString(); + return false; + } + public String getErrorMessage() { + return errorMessage; + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/persistence/PropertiesDB.java b/src/main/java/it/grid/storm/tape/recalltable/persistence/PropertiesDB.java index 33e24d9f..9b595550 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/persistence/PropertiesDB.java +++ b/src/main/java/it/grid/storm/tape/recalltable/persistence/PropertiesDB.java @@ -1,223 +1,204 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable.persistence; import it.grid.storm.config.Configuration; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.srm.types.TRequestToken; - import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedHashMap; import java.util.List; import java.util.Properties; import java.util.UUID; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author zappi - * - */ +/** @author zappi */ public class PropertiesDB { - private static final Logger log = LoggerFactory.getLogger(PropertiesDB.class); - private static Configuration config = Configuration.getInstance(); - private final String dataFileName = "recall-table.txt"; - private final String propertiesDBName; - - public PropertiesDB() { - - String configurationDir = PropertiesDB.config.configurationDir(); - char sep = File.separatorChar; - propertiesDBName = configurationDir + sep + "etc" + sep + "db" + sep - + dataFileName; - log.debug("Properties RecallTable-DB = {}" , propertiesDBName); - } - - public PropertiesDB(boolean test) { - - String configurationDir; - if (test) { - configurationDir = System.getProperty("user.dir"); - } else { - configurationDir = PropertiesDB.config.configurationDir(); - } - char sep = File.separatorChar; - propertiesDBName = configurationDir + sep + "etc" + sep + "db" + sep - + dataFileName; - // log.debug("Properties RecallTable-DB = " + propertiesDBName); - File tasksDBfile = new File(propertiesDBName); - boolean success = false; - try { - success = tasksDBfile.createNewFile(); - } catch (IOException e) { - log.error("Error while trying to check : {}" , propertiesDBName,e); - } - if (success) { - log.debug("TaskDB = '{}' exists ? {}" , propertiesDBName , success); - } - } - - // *************** PERSISTENCE METHODS **************** - - /** - * - * @param task - * @throws FileNotFoundException - * @throws IOException - * @throws DataAccessException - */ - public void addRecallTask(TapeRecallTO task) throws FileNotFoundException, - IOException, DataAccessException { - - Properties properties = new Properties(); - properties.load(new FileInputStream(propertiesDBName)); - - // Retrieve the Request-Token (unique-key) - TRequestToken taskToken = task.getRequestToken(); - if (taskToken == null) { - log.error("You are trying to store a Task without a task-id."); - throw new DataAccessException( - "You are trying to store a Task without a task-id."); - } - // Build the String related to Task-id - String taskStr = task.toString(); - // Insert the new property entry - properties.setProperty(taskToken.getValue(), taskStr); - // Store the properties into disk - properties.store(new FileOutputStream(propertiesDBName), null); - } - - public void setRecallTask(List listTasks) - throws FileNotFoundException, IOException, DataAccessException { - - Properties properties = new Properties(); - properties.load(new FileInputStream(propertiesDBName)); - - TRequestToken taskToken = null; - String taskStr = null; - for (TapeRecallTO TapeRecallTO : listTasks) { - // Retrieve the Task-id (unique-key) - taskToken = TapeRecallTO.getRequestToken(); - if (taskToken == null) { - log.error("You are trying to store a Task without a RequestToken."); - throw new DataAccessException( - "You are trying to store a Task without a Request-Token."); - } - // Build the String related to Task-id - taskStr = TapeRecallTO.toString(); - // Insert the new property entry - properties.setProperty(taskToken.getValue(), taskStr); - taskToken = null; - } - // Store the properties into disk - properties.store(new FileOutputStream(propertiesDBName), null); - } - - // public List getRecallTask(UUID taskId) throws - // FileNotFoundException, IOException, DataAccessException { - // ArrayList result = new ArrayList(); - // Properties properties = new Properties(); - // properties.load(new FileInputStream(propertiesDBName)); - // - // for (Object values : properties.values()) { - // String v = (String)values; - // TapeRecallTO task = TapeRecallBuilder.build(v); - // if (task.getTaskId().equals(taskId)) { - // result.add(task); - // } - // } - // if (result.isEmpty()) { - // log.error("Unable to retrieve the task with ID = " + taskId); - // throw new DataAccessException("Unable to find the task with ID = " + - // taskId); - // } - // return result; - // } - - public void updateRecallTask(TapeRecallTO task) throws FileNotFoundException, - IOException, DataAccessException { - - Properties properties = new Properties(); - properties.load(new FileInputStream(propertiesDBName)); - - UUID taskId = task.getTaskId(); - - // Check if the Task exists within the Properties DB - boolean taskExist = properties.containsKey(taskId.toString()); - if (!(taskExist)) { - log.error("Unable to find the task with ID = {}" , taskId); - throw new DataAccessException("Unable to find the task with ID = " - + taskId); - } else { - // Build the String related to Task-id - String taskStr = task.toString(); - // Insert the new property entry - properties.setProperty(taskId.toString(), taskStr); - log.debug("Removed tasks '{}'" , taskId); - } - - // Store the properties into disk - properties.store(new FileOutputStream(propertiesDBName), null); - } - - public void deleteRecallTask(UUID taskId) throws FileNotFoundException, - IOException, DataAccessException { - - Properties properties = new Properties(); - properties.load(new FileInputStream(propertiesDBName)); - - // Retrieve the Task from taskId - String task = properties.getProperty(taskId.toString()); - if (task == null) { - log.error("Unable to find the task with ID = {}" , taskId); - throw new DataAccessException("Unable to find the task with ID = " - + taskId); - } else { - properties.remove(taskId); - log.debug("Removed tasks '{}'" , taskId); - } - - // Store the properties into disk - properties.store(new FileOutputStream(propertiesDBName), null); - } - - // public LinkedHashMap getAll() throws - // FileNotFoundException, IOException, DataAccessException { - // - // LinkedHashMap tasksDBmem = new - // LinkedHashMap(); - // ArrayList tasksList = new ArrayList(); - // Properties properties = new Properties(); - // properties.load(new FileInputStream(propertiesDBName)); - // Collection values = properties.values(); - // for (Object element : values) { - // String line = (String) element; - // TapeRecallTO task = TapeRecallBuilder.build(line); - // tasksList.add(task); - // } - // TapeRecallTO[] tasksArray = tasksList.toArray(new - // TapeRecallTO[tasksList.size()]); - // Arrays.sort(tasksArray); - // // Create the ordered LinkedHashMap - // for (TapeRecallTO element : tasksArray) { - // tasksDBmem.put(element.getRequestToken(), element); - // } - // return tasksDBmem; - // } + private static final Logger log = LoggerFactory.getLogger(PropertiesDB.class); + private static Configuration config = Configuration.getInstance(); + private final String dataFileName = "recall-table.txt"; + private final String propertiesDBName; + + public PropertiesDB() { + + String configurationDir = PropertiesDB.config.configurationDir(); + char sep = File.separatorChar; + propertiesDBName = configurationDir + sep + "etc" + sep + "db" + sep + dataFileName; + log.debug("Properties RecallTable-DB = {}", propertiesDBName); + } + + public PropertiesDB(boolean test) { + + String configurationDir; + if (test) { + configurationDir = System.getProperty("user.dir"); + } else { + configurationDir = PropertiesDB.config.configurationDir(); + } + char sep = File.separatorChar; + propertiesDBName = configurationDir + sep + "etc" + sep + "db" + sep + dataFileName; + // log.debug("Properties RecallTable-DB = " + propertiesDBName); + File tasksDBfile = new File(propertiesDBName); + boolean success = false; + try { + success = tasksDBfile.createNewFile(); + } catch (IOException e) { + log.error("Error while trying to check : {}", propertiesDBName, e); + } + if (success) { + log.debug("TaskDB = '{}' exists ? {}", propertiesDBName, success); + } + } + + // *************** PERSISTENCE METHODS **************** + + /** + * @param task + * @throws FileNotFoundException + * @throws IOException + * @throws DataAccessException + */ + public void addRecallTask(TapeRecallTO task) + throws FileNotFoundException, IOException, DataAccessException { + + Properties properties = new Properties(); + properties.load(new FileInputStream(propertiesDBName)); + + // Retrieve the Request-Token (unique-key) + TRequestToken taskToken = task.getRequestToken(); + if (taskToken == null) { + log.error("You are trying to store a Task without a task-id."); + throw new DataAccessException("You are trying to store a Task without a task-id."); + } + // Build the String related to Task-id + String taskStr = task.toString(); + // Insert the new property entry + properties.setProperty(taskToken.getValue(), taskStr); + // Store the properties into disk + properties.store(new FileOutputStream(propertiesDBName), null); + } + + public void setRecallTask(List listTasks) + throws FileNotFoundException, IOException, DataAccessException { + + Properties properties = new Properties(); + properties.load(new FileInputStream(propertiesDBName)); + + TRequestToken taskToken = null; + String taskStr = null; + for (TapeRecallTO TapeRecallTO : listTasks) { + // Retrieve the Task-id (unique-key) + taskToken = TapeRecallTO.getRequestToken(); + if (taskToken == null) { + log.error("You are trying to store a Task without a RequestToken."); + throw new DataAccessException("You are trying to store a Task without a Request-Token."); + } + // Build the String related to Task-id + taskStr = TapeRecallTO.toString(); + // Insert the new property entry + properties.setProperty(taskToken.getValue(), taskStr); + taskToken = null; + } + // Store the properties into disk + properties.store(new FileOutputStream(propertiesDBName), null); + } + + // public List getRecallTask(UUID taskId) throws + // FileNotFoundException, IOException, DataAccessException { + // ArrayList result = new ArrayList(); + // Properties properties = new Properties(); + // properties.load(new FileInputStream(propertiesDBName)); + // + // for (Object values : properties.values()) { + // String v = (String)values; + // TapeRecallTO task = TapeRecallBuilder.build(v); + // if (task.getTaskId().equals(taskId)) { + // result.add(task); + // } + // } + // if (result.isEmpty()) { + // log.error("Unable to retrieve the task with ID = " + taskId); + // throw new DataAccessException("Unable to find the task with ID = " + + // taskId); + // } + // return result; + // } + + public void updateRecallTask(TapeRecallTO task) + throws FileNotFoundException, IOException, DataAccessException { + + Properties properties = new Properties(); + properties.load(new FileInputStream(propertiesDBName)); + + UUID taskId = task.getTaskId(); + + // Check if the Task exists within the Properties DB + boolean taskExist = properties.containsKey(taskId.toString()); + if (!(taskExist)) { + log.error("Unable to find the task with ID = {}", taskId); + throw new DataAccessException("Unable to find the task with ID = " + taskId); + } else { + // Build the String related to Task-id + String taskStr = task.toString(); + // Insert the new property entry + properties.setProperty(taskId.toString(), taskStr); + log.debug("Removed tasks '{}'", taskId); + } + + // Store the properties into disk + properties.store(new FileOutputStream(propertiesDBName), null); + } + + public void deleteRecallTask(UUID taskId) + throws FileNotFoundException, IOException, DataAccessException { + + Properties properties = new Properties(); + properties.load(new FileInputStream(propertiesDBName)); + + // Retrieve the Task from taskId + String task = properties.getProperty(taskId.toString()); + if (task == null) { + log.error("Unable to find the task with ID = {}", taskId); + throw new DataAccessException("Unable to find the task with ID = " + taskId); + } else { + properties.remove(taskId); + log.debug("Removed tasks '{}'", taskId); + } + + // Store the properties into disk + properties.store(new FileOutputStream(propertiesDBName), null); + } + + // public LinkedHashMap getAll() throws + // FileNotFoundException, IOException, DataAccessException { + // + // LinkedHashMap tasksDBmem = new + // LinkedHashMap(); + // ArrayList tasksList = new ArrayList(); + // Properties properties = new Properties(); + // properties.load(new FileInputStream(propertiesDBName)); + // Collection values = properties.values(); + // for (Object element : values) { + // String line = (String) element; + // TapeRecallTO task = TapeRecallBuilder.build(line); + // tasksList.add(task); + // } + // TapeRecallTO[] tasksArray = tasksList.toArray(new + // TapeRecallTO[tasksList.size()]); + // Arrays.sort(tasksArray); + // // Create the ordered LinkedHashMap + // for (TapeRecallTO element : tasksArray) { + // tasksDBmem.put(element.getRequestToken(), element); + // } + // return tasksDBmem; + // } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/persistence/TapeRecallBuilder.java b/src/main/java/it/grid/storm/tape/recalltable/persistence/TapeRecallBuilder.java index e0a83131..3bf2cfd0 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/persistence/TapeRecallBuilder.java +++ b/src/main/java/it/grid/storm/tape/recalltable/persistence/TapeRecallBuilder.java @@ -1,36 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable.persistence; -/** - * @author zappi - * - */ +/** @author zappi */ public class TapeRecallBuilder { - /** - * { "filename":""; "dn":""; "fqans":["fqan":"", - * "fqan":""]; "vo-name":"" } - **/ + /** + * { "filename":""; "dn":""; "fqans":["fqan":"", "fqan":""]; + * "vo-name":"" } + */ + public static final String TASK_START = "{"; - public static final String TASK_START = "{"; - public static final String TASK_END = "}"; - public static final String ELEMENT_SEP = " # "; - public static final String FN_PREFIX = "filename"; - public static final String DN_PREFIX = "dn"; - public static final String FQANS_PREFIX = "fqans"; - public static final String FQANS_ARRAY_START = "["; - public static final String FQANS_ARRAY_END = "]"; - public static final String FQAN_PREFIX = "fqan"; - public static final String FQAN_SEP = ","; - public static final String VONAME_PREFIX = "vo-name"; - public static final String USERID_PREFIX = "userId"; - public static final String EQUAL_CHAR = "="; + public static final String TASK_END = "}"; + public static final String ELEMENT_SEP = " # "; + public static final String FN_PREFIX = "filename"; + public static final String DN_PREFIX = "dn"; + public static final String FQANS_PREFIX = "fqans"; + public static final String FQANS_ARRAY_START = "["; + public static final String FQANS_ARRAY_END = "]"; + public static final String FQAN_PREFIX = "fqan"; + public static final String FQAN_SEP = ","; + public static final String VONAME_PREFIX = "vo-name"; + public static final String USERID_PREFIX = "userId"; + public static final String EQUAL_CHAR = "="; - private TapeRecallBuilder() {} + private TapeRecallBuilder() {} } diff --git a/src/main/java/it/grid/storm/tape/recalltable/providers/TapeRecallTOListMessageBodyWriter.java b/src/main/java/it/grid/storm/tape/recalltable/providers/TapeRecallTOListMessageBodyWriter.java index 96e82eb2..bab27f85 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/providers/TapeRecallTOListMessageBodyWriter.java +++ b/src/main/java/it/grid/storm/tape/recalltable/providers/TapeRecallTOListMessageBodyWriter.java @@ -1,19 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.providers; import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.tape.recalltable.persistence.TapeRecallBuilder; - import java.io.IOException; import java.io.OutputStream; import java.lang.annotation.Annotation; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.List; - import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; @@ -22,58 +19,63 @@ @Provider @Produces("text/plain") -public class TapeRecallTOListMessageBodyWriter implements - MessageBodyWriter> { +public class TapeRecallTOListMessageBodyWriter implements MessageBodyWriter> { - @Override - public long getSize(List tasks, Class type, - Type genericType, Annotation[] annotations, MediaType mediaType) { + @Override + public long getSize( + List tasks, + Class type, + Type genericType, + Annotation[] annotations, + MediaType mediaType) { - return -1; - } + return -1; + } - @Override - public boolean isWriteable(Class type, Type genericType, - Annotation[] annotations, MediaType mediaType) { + @Override + public boolean isWriteable( + Class type, Type genericType, Annotation[] annotations, MediaType mediaType) { - boolean isWritable; + boolean isWritable; - if (List.class.isAssignableFrom(type) - && genericType instanceof ParameterizedType) { + if (List.class.isAssignableFrom(type) && genericType instanceof ParameterizedType) { - ParameterizedType parameterizedType = (ParameterizedType) genericType; - Type[] actualTypeArgs = (parameterizedType.getActualTypeArguments()); + ParameterizedType parameterizedType = (ParameterizedType) genericType; + Type[] actualTypeArgs = (parameterizedType.getActualTypeArguments()); - isWritable = (actualTypeArgs.length == 1 && actualTypeArgs[0] - .equals(TapeRecallTO.class)); + isWritable = (actualTypeArgs.length == 1 && actualTypeArgs[0].equals(TapeRecallTO.class)); - } else { + } else { - isWritable = false; - } + isWritable = false; + } - return isWritable; - } + return isWritable; + } - @Override - public void writeTo(List tasks, Class type, - Type genericType, Annotation[] annotations, MediaType mediaType, - MultivaluedMap httpHeaders, OutputStream entityStream) - throws IOException { + @Override + public void writeTo( + List tasks, + Class type, + Type genericType, + Annotation[] annotations, + MediaType mediaType, + MultivaluedMap httpHeaders, + OutputStream entityStream) + throws IOException { - StringBuilder sb = new StringBuilder(); + StringBuilder sb = new StringBuilder(); - sb.append("{"); + sb.append("{"); - for (TapeRecallTO t : tasks) { - - sb.append(t.toGEMSS()); - sb.append(TapeRecallBuilder.ELEMENT_SEP); - } + for (TapeRecallTO t : tasks) { - sb.append("}"); + sb.append(t.toGEMSS()); + sb.append(TapeRecallBuilder.ELEMENT_SEP); + } - entityStream.write(sb.toString().getBytes()); - } + sb.append("}"); + entityStream.write(sb.toString().getBytes()); + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequest.java b/src/main/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequest.java index d222be92..5514e658 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequest.java +++ b/src/main/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequest.java @@ -1,126 +1,138 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.resources; -import javax.validation.constraints.DecimalMax; -import javax.validation.constraints.DecimalMin; -import javax.validation.constraints.NotNull; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; +import javax.validation.constraints.DecimalMax; +import javax.validation.constraints.DecimalMin; +import javax.validation.constraints.NotNull; @JsonInclude(Include.NON_NULL) public class TaskInsertRequest { - public static final String MAX_RETRY_ATTEMPTS = "4"; - - @NotNull(message = "Request must contain a STFN") - private String stfn; - @NotNull(message = "Request must contain a userId") - private String userId; - - @DecimalMin(value = "0", message = "Retry attempts must be more or equal than zero.") - @DecimalMax(value = MAX_RETRY_ATTEMPTS, - message = "Retry attempts must be less or equal than " + MAX_RETRY_ATTEMPTS + ".") - private int retryAttempts; - - private String voName; - private Integer pinLifetime; - - @JsonCreator - public TaskInsertRequest(@JsonProperty("stfn") String stfn, - @JsonProperty("userId") String userId, @JsonProperty("retryAttempts") int retryAttempts, - @JsonProperty("voName") String voName, @JsonProperty("pinLifetime") Integer pinLifetime) { - - this.stfn = stfn; - this.retryAttempts = retryAttempts; - this.voName = voName; - this.pinLifetime = pinLifetime; - this.userId = userId; - } - - public TaskInsertRequest(Builder builder) { - this.stfn = builder.stfn; - this.retryAttempts = builder.retryAttempts; - this.voName = builder.voName; - this.pinLifetime = builder.pinLifetime; - this.userId = builder.userId; - } - - public String getStfn() { - return stfn; - } - - public String getUserId() { - return userId; - } - - public int getRetryAttempts() { - return retryAttempts; - } - - public String getVoName() { - return voName; - } - - public Integer getPinLifetime() { - return this.pinLifetime; - } - - public static Builder builder() { - return new Builder(); - } - - @Override - public String toString() { - return "TaskInsertRequest [stfn=" + stfn + ", userId=" + userId + ", retryAttempts=" - + retryAttempts + ", voName=" + voName + ", pinLifetime=" + pinLifetime + "]"; - } - - public static class Builder { - - private String stfn; - private String userId; - private int retryAttempts; - - private String voName; - private Integer pinLifetime; - - public Builder() { - this.retryAttempts = 0; - } - - public Builder stfn(String stfn) { - this.stfn = stfn; - return this; - } - - public Builder userId(String userId) { - this.userId = userId; - return this; - } - - public Builder retryAttempts(int retryAttempts) { - this.retryAttempts = retryAttempts; - return this; - } - - public Builder voName(String voName) { - this.voName = voName; - return this; - } - - public Builder pinLifetime(int pinLifetime) { - this.pinLifetime = pinLifetime; - return this; - } - - public TaskInsertRequest build() { - return new TaskInsertRequest(this); - } - } + public static final String MAX_RETRY_ATTEMPTS = "4"; + + @NotNull(message = "Request must contain a STFN") + private String stfn; + + @NotNull(message = "Request must contain a userId") + private String userId; + + @DecimalMin(value = "0", message = "Retry attempts must be more or equal than zero.") + @DecimalMax( + value = MAX_RETRY_ATTEMPTS, + message = "Retry attempts must be less or equal than " + MAX_RETRY_ATTEMPTS + ".") + private int retryAttempts; + + private String voName; + private Integer pinLifetime; + + @JsonCreator + public TaskInsertRequest( + @JsonProperty("stfn") String stfn, + @JsonProperty("userId") String userId, + @JsonProperty("retryAttempts") int retryAttempts, + @JsonProperty("voName") String voName, + @JsonProperty("pinLifetime") Integer pinLifetime) { + + this.stfn = stfn; + this.retryAttempts = retryAttempts; + this.voName = voName; + this.pinLifetime = pinLifetime; + this.userId = userId; + } + + public TaskInsertRequest(Builder builder) { + this.stfn = builder.stfn; + this.retryAttempts = builder.retryAttempts; + this.voName = builder.voName; + this.pinLifetime = builder.pinLifetime; + this.userId = builder.userId; + } + + public String getStfn() { + return stfn; + } + + public String getUserId() { + return userId; + } + + public int getRetryAttempts() { + return retryAttempts; + } + + public String getVoName() { + return voName; + } + + public Integer getPinLifetime() { + return this.pinLifetime; + } + + public static Builder builder() { + return new Builder(); + } + + @Override + public String toString() { + return "TaskInsertRequest [stfn=" + + stfn + + ", userId=" + + userId + + ", retryAttempts=" + + retryAttempts + + ", voName=" + + voName + + ", pinLifetime=" + + pinLifetime + + "]"; + } + + public static class Builder { + + private String stfn; + private String userId; + private int retryAttempts; + + private String voName; + private Integer pinLifetime; + + public Builder() { + this.retryAttempts = 0; + } + + public Builder stfn(String stfn) { + this.stfn = stfn; + return this; + } + + public Builder userId(String userId) { + this.userId = userId; + return this; + } + + public Builder retryAttempts(int retryAttempts) { + this.retryAttempts = retryAttempts; + return this; + } + + public Builder voName(String voName) { + this.voName = voName; + return this; + } + + public Builder pinLifetime(int pinLifetime) { + this.pinLifetime = pinLifetime; + return this; + } + + public TaskInsertRequest build() { + return new TaskInsertRequest(this); + } + } } diff --git a/src/main/java/it/grid/storm/tape/recalltable/resources/TaskResource.java b/src/main/java/it/grid/storm/tape/recalltable/resources/TaskResource.java index 60c8eff0..c0e996b2 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/resources/TaskResource.java +++ b/src/main/java/it/grid/storm/tape/recalltable/resources/TaskResource.java @@ -1,10 +1,7 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable.resources; import static it.grid.storm.persistence.model.TapeRecallTO.RecallTaskType.RCLL; @@ -30,10 +27,6 @@ import it.grid.storm.tape.recalltable.model.PutTapeRecallStatusValidator; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; import it.grid.storm.tape.recalltable.model.TaskInsertRequestValidator; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -42,7 +35,6 @@ import java.util.Date; import java.util.List; import java.util.UUID; - import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -54,6 +46,8 @@ import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.GenericEntity; import javax.ws.rs.core.Response; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @author Riccardo Zappi @@ -86,10 +80,10 @@ public TaskResource(ResourceService service, TapeRecallCatalog recallCatalog) { /** * Get recall tasks that are currently in progress. - * + * * @param maxResults the maximum number of result to be returned * @return a Response with a 200 code containing a list of the tasks currently in progress or with - * a 500 if something went wrong + * a 500 if something went wrong */ @GET public Response getTasks(@QueryParam("maxResults") Integer maxResults) { @@ -103,24 +97,24 @@ public Response getTasks(@QueryParam("maxResults") Integer maxResults) { /** * This method takes a request token and a SURL encoded as a string as follows - * + * *
    *  requestToken= surl=
    * 
- * + * * This method checks that the requested SURL has been recalled and if so updates the request * status to the proper final status. - * - * This method returns a 200 response status and a string containing either true or false. It + * + *

This method returns a 200 response status and a string containing either true or false. It * returns true if the file is present on the filesystem, false otherwise (this may happen when * querying the status of a surl for which the recall operation is still in progress on a tape * enabled storage area). - * - * This method returns a 500 response in case of errors - * - * The StoRM Frontend calls this method whenever a ptg or bol status request is submitted and the - * related ptg or bol status is marked as in progress in StoRM database. (for both tape enabled - * and disk only SA). + * + *

This method returns a 500 response in case of errors + * + *

The StoRM Frontend calls this method whenever a ptg or bol status request is submitted and + * the related ptg or bol status is marked as in progress in StoRM database. (for both tape + * enabled and disk only SA). */ @PUT @Consumes("text/plain") @@ -158,13 +152,12 @@ public Response putTaskStatus(InputStream input) { /** * Updates the status or retry value of a recall task. Called by GEMSS after a recall tasks is * finished. - * */ @PUT @Path("/{groupTaskId}") @Consumes("text/plain") - public void putNewTaskStatusOrRetryValue(@PathParam("groupTaskId") UUID groupTaskId, - InputStream input) throws TapeRecallException { + public void putNewTaskStatusOrRetryValue( + @PathParam("groupTaskId") UUID groupTaskId, InputStream input) throws TapeRecallException { log.debug("Requested to change recall table value for taskId {}", groupTaskId); @@ -191,11 +184,18 @@ public void putNewTaskStatusOrRetryValue(@PathParam("groupTaskId") UUID groupTas } catch (DataAccessException e) { - log.error("Unable to retrieve Recall Group Task with ID = '{}' DataAccessException: {}", - groupTaskId, e.getMessage(), e); - - throw new TapeRecallException("Unable to retrieve recall group task " + "with ID = '" - + groupTaskId + "' " + e.getMessage()); + log.error( + "Unable to retrieve Recall Group Task with ID = '{}' DataAccessException: {}", + groupTaskId, + e.getMessage(), + e); + + throw new TapeRecallException( + "Unable to retrieve recall group task " + + "with ID = '" + + groupTaskId + + "' " + + e.getMessage()); } String keyRetryValue = config.getRetryValueKey(); @@ -243,18 +243,25 @@ public void putNewTaskStatusOrRetryValue(@PathParam("groupTaskId") UUID groupTas try { - recallCatalog.changeGroupTaskStatus(groupTaskId, - TapeRecallStatus.getRecallTaskStatus(intValue), new Date()); + recallCatalog.changeGroupTaskStatus( + groupTaskId, TapeRecallStatus.getRecallTaskStatus(intValue), new Date()); } catch (DataAccessException e) { log.error( "Unable to change the status for group task id {} to status {} DataAccessException : {}", - groupTaskId, intValue, e.getMessage(), e); + groupTaskId, + intValue, + e.getMessage(), + e); throw new TapeRecallException( - "Unable to change the status for group task id " + groupTaskId + " to status " - + intValue + " . DataAccessException : " + e.getMessage()); + "Unable to change the status for group task id " + + groupTaskId + + " to status " + + intValue + + " . DataAccessException : " + + e.getMessage()); } } else { @@ -268,7 +275,7 @@ public void putNewTaskStatusOrRetryValue(@PathParam("groupTaskId") UUID groupTas /** * Creates a new recall task. - * + * * @author Enrico Vianello */ @POST @@ -305,12 +312,14 @@ public Response postNewTask(TaskInsertRequest request) { try { - voName = resource.getVirtualFileSystem() - .getApproachableRules() - .get(0) - .getSubjectRules() - .getVONameMatchingRule() - .getVOName(); + voName = + resource + .getVirtualFileSystem() + .getApproachableRules() + .get(0) + .getSubjectRules() + .getVONameMatchingRule() + .getVOName(); } catch (NamespaceException e) { @@ -319,9 +328,10 @@ public Response postNewTask(TaskInsertRequest request) { } if (request.getVoName() != null && !request.getVoName().equals(voName)) { - String message = String.format( - "The voName included in the request does not match the voName resolved for this request: %s != %s", - request.getVoName(), voName); + String message = + String.format( + "The voName included in the request does not match the voName resolved for this request: %s != %s", + request.getVoName(), voName); log.error(message); throw new WebApplicationException(message, BAD_REQUEST); } @@ -352,8 +362,9 @@ public Response postNewTask(TaskInsertRequest request) { throw new WebApplicationException(e, INTERNAL_SERVER_ERROR); } - String location = String.format("/recalltable/task/%s?requestToken=%s", groupTaskId, - task.getRequestToken().getValue()); + String location = + String.format( + "/recalltable/task/%s?requestToken=%s", groupTaskId, task.getRequestToken().getValue()); log.debug("Location: {}", location); return Response.created(URI.create(location)).build(); @@ -362,7 +373,8 @@ public Response postNewTask(TaskInsertRequest request) { @GET @Path("/{groupTaskId}") @Produces(APPLICATION_JSON) - public Response getGroupTaskInfo(@PathParam("groupTaskId") String groupTaskId, + public Response getGroupTaskInfo( + @PathParam("groupTaskId") String groupTaskId, @QueryParam("requestToken") String requestToken) { log.info("GET info for groupTaskId={} and requestToken={})", groupTaskId, requestToken); @@ -387,8 +399,8 @@ public Response getGroupTaskInfo(@PathParam("groupTaskId") String groupTaskId, } } if (task == null) { - throw new WebApplicationException("No task found for requestToken " + requestToken, - NOT_FOUND); + throw new WebApplicationException( + "No task found for requestToken " + requestToken, NOT_FOUND); } String jsonString = null; @@ -402,10 +414,7 @@ public Response getGroupTaskInfo(@PathParam("groupTaskId") String groupTaskId, return Response.ok(jsonString).build(); } - /** - * Utility method. - * - */ + /** Utility method. */ private String buildInputString(InputStream input) { BufferedReader reader = new BufferedReader(new InputStreamReader(input)); @@ -439,5 +448,4 @@ private String buildInputString(InputStream input) { return sb.toString(); } - } diff --git a/src/main/java/it/grid/storm/tape/recalltable/resources/TasksCardinality.java b/src/main/java/it/grid/storm/tape/recalltable/resources/TasksCardinality.java index 54a50359..4b68b175 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/resources/TasksCardinality.java +++ b/src/main/java/it/grid/storm/tape/recalltable/resources/TasksCardinality.java @@ -1,92 +1,83 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable.resources; import it.grid.storm.persistence.exceptions.DataAccessException; import it.grid.storm.tape.recalltable.TapeRecallCatalog; import it.grid.storm.tape.recalltable.TapeRecallException; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Response; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * @author ritz - * - */ +/** @author ritz */ @Path("/recalltable/cardinality/tasks/") public class TasksCardinality { - private static final Logger log = LoggerFactory.getLogger(TasksCardinality.class); - - /** - * Get the number of tasks that are queued. - * - * @return - * @throws TapeRecallException - */ - @GET - @Path("/queued") - @Produces("text/plain") - public Response getNumberQueued() { - - int nQueued = 0; - - try { - - TapeRecallCatalog rtCat = new TapeRecallCatalog(); - nQueued = rtCat.getNumberTaskQueued(); - - } catch (DataAccessException e) { - - String errorStr = "Unable to use RecallTable DB."; - log.error(errorStr, e); - return Response.serverError().entity(errorStr).build(); - } - - if (nQueued > 0) { - log.info("Number of tasks queued = {}", nQueued); - } else { - log.trace("Number of tasks queued = {}", nQueued); - } - return Response.ok().entity(Integer.toString(nQueued)).build(); - } - - /** - * Get the number of tasks that are ready for take over. - * - * @return - */ - @GET - @Path("/readyTakeOver") - @Produces("text/plain") - public Response getReadyForTakeover() { - - int nReadyForTakeover = 0; - - try { - - TapeRecallCatalog rtCat = new TapeRecallCatalog(); - nReadyForTakeover = rtCat.getReadyForTakeOver(); - - } catch (DataAccessException e) { - - String errorStr = "Unable to use RecallTable DB."; - log.error(errorStr, e); - return Response.serverError().entity(errorStr).build(); - } - - log.debug("Number of tasks queued = {}", nReadyForTakeover); - return Response.ok().entity(Integer.toString(nReadyForTakeover)).build(); - } - -} \ No newline at end of file + private static final Logger log = LoggerFactory.getLogger(TasksCardinality.class); + + /** + * Get the number of tasks that are queued. + * + * @return + * @throws TapeRecallException + */ + @GET + @Path("/queued") + @Produces("text/plain") + public Response getNumberQueued() { + + int nQueued = 0; + + try { + + TapeRecallCatalog rtCat = new TapeRecallCatalog(); + nQueued = rtCat.getNumberTaskQueued(); + + } catch (DataAccessException e) { + + String errorStr = "Unable to use RecallTable DB."; + log.error(errorStr, e); + return Response.serverError().entity(errorStr).build(); + } + + if (nQueued > 0) { + log.info("Number of tasks queued = {}", nQueued); + } else { + log.trace("Number of tasks queued = {}", nQueued); + } + return Response.ok().entity(Integer.toString(nQueued)).build(); + } + + /** + * Get the number of tasks that are ready for take over. + * + * @return + */ + @GET + @Path("/readyTakeOver") + @Produces("text/plain") + public Response getReadyForTakeover() { + + int nReadyForTakeover = 0; + + try { + + TapeRecallCatalog rtCat = new TapeRecallCatalog(); + nReadyForTakeover = rtCat.getReadyForTakeOver(); + + } catch (DataAccessException e) { + + String errorStr = "Unable to use RecallTable DB."; + log.error(errorStr, e); + return Response.serverError().entity(errorStr).build(); + } + + log.debug("Number of tasks queued = {}", nReadyForTakeover); + return Response.ok().entity(Integer.toString(nReadyForTakeover)).build(); + } +} diff --git a/src/main/java/it/grid/storm/tape/recalltable/resources/TasksResource.java b/src/main/java/it/grid/storm/tape/recalltable/resources/TasksResource.java index b235ab07..3e15ae45 100644 --- a/src/main/java/it/grid/storm/tape/recalltable/resources/TasksResource.java +++ b/src/main/java/it/grid/storm/tape/recalltable/resources/TasksResource.java @@ -1,24 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ +/** */ package it.grid.storm.tape.recalltable.resources; import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import it.grid.storm.config.Configuration; import it.grid.storm.persistence.model.TapeRecallTO; import it.grid.storm.tape.recalltable.TapeRecallCatalog; import it.grid.storm.tape.recalltable.TapeRecallException; import it.grid.storm.tape.recalltable.model.TapeRecallStatus; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -27,261 +19,260 @@ import java.util.HashMap; import java.util.List; import java.util.UUID; - import javax.ws.rs.Consumes; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.GenericEntity; import javax.ws.rs.core.Response; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author zappi - * - */ +/** @author zappi */ @Path("/recalltable/tasks") public class TasksResource { - private static final Logger log = LoggerFactory.getLogger(TasksResource.class); - - private static Configuration config = Configuration.getInstance(); - - /** - * Return recall tasks for being taken over. The status of the tasks that - * are returned is set to in progress. - * - * @param input a key value pair in which the value is the number - * of results to be returned in the - * @return the tasks ready to takeover - * @throws TapeRecallException - */ - @PUT - @Consumes("text/plain") - @Produces("text/plain") - public Response putTakeoverTasks(InputStream input) throws TapeRecallException { - - // retrieve the Input String - String inputStr = buildInputString(input); - - log.debug("@PUT (input string) = '{}'" , inputStr); - - // retrieve the number of tasks to takeover (default = 1) - int numbOfTask = 1; - - // retrieve value from Body param - String keyTakeover = config.getTaskoverKey(); - - int eqIndex = inputStr.indexOf('='); - - if (eqIndex > 0) { - - String value = inputStr.substring(eqIndex); - String key = inputStr.substring(0, eqIndex); - - if (key.equals(keyTakeover)) { - - try { - - // trim out the '\n' end. - numbOfTask = Integer.valueOf(value.substring(1, value.length() - 1)); - - } catch (NumberFormatException e) { - - throw new TapeRecallException("Unable to understand " + - "the number value = '" + value + "'"); - } - } - } - - // retrieve the tasks - List tasks = new TapeRecallCatalog().takeoverNTasksWithDoubles(numbOfTask); - - HashMap> groupTaskMap = buildGroupTaskMap(tasks); - - List groupTasks = Lists.newArrayList(); - - for (List groupTaskList : groupTaskMap.values()) { - - try { - - groupTasks.add(makeOne(groupTaskList)); - - } catch (IllegalArgumentException e) { - - log.error("Unable to makeOne the task list . IllegalArgumentException : {}" , e.getMessage() , e); - log.error("Erroneous task list (long output): {}" , groupTaskList.toString()); - log.error("Skip the erroneous task list and go on...Please contact StoRM support"); - } - } - - if (tasks.size() > groupTasks.size()) { - - log.debug("Taking over some multy-group tasks"); - } - - log.debug("Number of tasks recalled : <{}> over <{}> tasks requested" , groupTasks.size() , tasks.size()); - - // need a generic entity - GenericEntity> entity = - new GenericEntity>(tasks) {}; - - return Response.ok(entity).build(); - } - - /** - * Creates a map with the taskIds as keys and the list of tasks related to - * each taskId (key) as value - * - * @param tasks - * @return - */ - private HashMap> buildGroupTaskMap(List tasks) { - - HashMap> groupTaskMap = Maps.newHashMap(); - - for (TapeRecallTO task : tasks) { - - List taskList = - groupTaskMap.get(task.getGroupTaskId()); - - if (taskList == null) { - - taskList = Lists.newArrayList(); - groupTaskMap.put(task.getGroupTaskId(), taskList); - } - - taskList.add(task); - } - - return groupTaskMap; - } - - /** - * Given a list of tasks with the same taskId produces a single task merging - * the list members - * - * @param recallTasks - * @return - */ - private TapeRecallTO makeOne(List recallTasks) { - - TapeRecallTO taskTO = new TapeRecallTO(); - - UUID taskId = recallTasks.get(0).getTaskId(); - - // verify that all have the same task id - for (TapeRecallTO recallTask : recallTasks) { - - if (!recallTask.getTaskId().equals(taskId)) { - - log.error("Received a list of not omogeneous tasks, the taskid '{}' is not matched by : {}" , taskId , recallTask); - - throw new IllegalArgumentException( - "Received a list of not omogeneous tasks"); - } - } - - for (TapeRecallTO recallTask : recallTasks) { - - // set common fields from any of the tasks - taskTO.setTaskId(recallTask.getTaskId()); - taskTO.setGroupTaskId(recallTask.getGroupTaskId()); - taskTO.setRequestToken(recallTask.getRequestToken()); - taskTO.setRequestType(recallTask.getRequestType()); - taskTO.setFileName(recallTask.getFileName()); - taskTO.setUserID(recallTask.getUserID()); - taskTO.setVoName(recallTask.getVoName()); - taskTO.setStatus(TapeRecallStatus.QUEUED); - - break; - } - - /* - * merge task on recall related fields to have a pin that starts as soon as - * requested and last as long as needed - */ - - int maxRetryAttempt = 0; - - Date minInsertionInstant = null; - Date minDeferredRecallInstant = null; - Date maxPinExpirationInstant = null; - - for (TapeRecallTO recallTask : recallTasks) { - - if (recallTask.getRetryAttempt() > maxRetryAttempt) { - maxRetryAttempt = recallTask.getRetryAttempt(); - } - - if (minInsertionInstant == null - || recallTask.getInsertionInstant().before(minInsertionInstant)) { - - minInsertionInstant = recallTask.getInsertionInstant(); - } - - if (minDeferredRecallInstant == null - || recallTask.getDeferredRecallInstant().before(minDeferredRecallInstant)) { - - minDeferredRecallInstant = recallTask.getDeferredRecallInstant(); - } - - Date currentPinExpirationInstant = - new Date(recallTask.getDeferredRecallInstant().getTime() + (recallTask.getPinLifetime() * 1000)); - - if (maxPinExpirationInstant == null - || currentPinExpirationInstant.after(maxPinExpirationInstant)) { - - maxPinExpirationInstant = currentPinExpirationInstant; - } - } - - taskTO.setRetryAttempt(maxRetryAttempt); - taskTO.setInsertionInstant(minInsertionInstant); - taskTO.setDeferredRecallInstant(minDeferredRecallInstant); - - int pinLifeTime = (int) (maxPinExpirationInstant.getTime() - minDeferredRecallInstant.getTime()) / 1000; - - taskTO.setPinLifetime(pinLifeTime); - - return taskTO; - } - - /** - * Utility method. - * - */ - private String buildInputString(InputStream input) { - - BufferedReader reader = new BufferedReader(new InputStreamReader(input)); - - StringBuilder sb = new StringBuilder(); - - String line = null; - - try { - - while ((line = reader.readLine()) != null) { - - sb.append(line + "\n"); - } - - } catch (IOException e) { - - log.error(e.getMessage(), e); - - } finally { - - try { - - input.close(); - - } catch (IOException e) { - - log.error(e.getMessage(), e); - } - } - - return sb.toString(); - } - -} \ No newline at end of file + private static final Logger log = LoggerFactory.getLogger(TasksResource.class); + + private static Configuration config = Configuration.getInstance(); + + /** + * Return recall tasks for being taken over. The status of the tasks that are returned is set to + * in progress. + * + * @param input a key value pair in which the value is the number of results to be returned in the + * @return the tasks ready to takeover + * @throws TapeRecallException + */ + @PUT + @Consumes("text/plain") + @Produces("text/plain") + public Response putTakeoverTasks(InputStream input) throws TapeRecallException { + + // retrieve the Input String + String inputStr = buildInputString(input); + + log.debug("@PUT (input string) = '{}'", inputStr); + + // retrieve the number of tasks to takeover (default = 1) + int numbOfTask = 1; + + // retrieve value from Body param + String keyTakeover = config.getTaskoverKey(); + + int eqIndex = inputStr.indexOf('='); + + if (eqIndex > 0) { + + String value = inputStr.substring(eqIndex); + String key = inputStr.substring(0, eqIndex); + + if (key.equals(keyTakeover)) { + + try { + + // trim out the '\n' end. + numbOfTask = Integer.valueOf(value.substring(1, value.length() - 1)); + + } catch (NumberFormatException e) { + + throw new TapeRecallException( + "Unable to understand " + "the number value = '" + value + "'"); + } + } + } + + // retrieve the tasks + List tasks = new TapeRecallCatalog().takeoverNTasksWithDoubles(numbOfTask); + + HashMap> groupTaskMap = buildGroupTaskMap(tasks); + + List groupTasks = Lists.newArrayList(); + + for (List groupTaskList : groupTaskMap.values()) { + + try { + + groupTasks.add(makeOne(groupTaskList)); + + } catch (IllegalArgumentException e) { + + log.error( + "Unable to makeOne the task list . IllegalArgumentException : {}", e.getMessage(), e); + log.error("Erroneous task list (long output): {}", groupTaskList.toString()); + log.error("Skip the erroneous task list and go on...Please contact StoRM support"); + } + } + + if (tasks.size() > groupTasks.size()) { + + log.debug("Taking over some multy-group tasks"); + } + + log.debug( + "Number of tasks recalled : <{}> over <{}> tasks requested", + groupTasks.size(), + tasks.size()); + + // need a generic entity + GenericEntity> entity = new GenericEntity>(tasks) {}; + + return Response.ok(entity).build(); + } + + /** + * Creates a map with the taskIds as keys and the list of tasks related to each taskId (key) as + * value + * + * @param tasks + * @return + */ + private HashMap> buildGroupTaskMap(List tasks) { + + HashMap> groupTaskMap = Maps.newHashMap(); + + for (TapeRecallTO task : tasks) { + + List taskList = groupTaskMap.get(task.getGroupTaskId()); + + if (taskList == null) { + + taskList = Lists.newArrayList(); + groupTaskMap.put(task.getGroupTaskId(), taskList); + } + + taskList.add(task); + } + + return groupTaskMap; + } + + /** + * Given a list of tasks with the same taskId produces a single task merging the list members + * + * @param recallTasks + * @return + */ + private TapeRecallTO makeOne(List recallTasks) { + + TapeRecallTO taskTO = new TapeRecallTO(); + + UUID taskId = recallTasks.get(0).getTaskId(); + + // verify that all have the same task id + for (TapeRecallTO recallTask : recallTasks) { + + if (!recallTask.getTaskId().equals(taskId)) { + + log.error( + "Received a list of not omogeneous tasks, the taskid '{}' is not matched by : {}", + taskId, + recallTask); + + throw new IllegalArgumentException("Received a list of not omogeneous tasks"); + } + } + + for (TapeRecallTO recallTask : recallTasks) { + + // set common fields from any of the tasks + taskTO.setTaskId(recallTask.getTaskId()); + taskTO.setGroupTaskId(recallTask.getGroupTaskId()); + taskTO.setRequestToken(recallTask.getRequestToken()); + taskTO.setRequestType(recallTask.getRequestType()); + taskTO.setFileName(recallTask.getFileName()); + taskTO.setUserID(recallTask.getUserID()); + taskTO.setVoName(recallTask.getVoName()); + taskTO.setStatus(TapeRecallStatus.QUEUED); + + break; + } + + /* + * merge task on recall related fields to have a pin that starts as soon as + * requested and last as long as needed + */ + + int maxRetryAttempt = 0; + + Date minInsertionInstant = null; + Date minDeferredRecallInstant = null; + Date maxPinExpirationInstant = null; + + for (TapeRecallTO recallTask : recallTasks) { + + if (recallTask.getRetryAttempt() > maxRetryAttempt) { + maxRetryAttempt = recallTask.getRetryAttempt(); + } + + if (minInsertionInstant == null + || recallTask.getInsertionInstant().before(minInsertionInstant)) { + + minInsertionInstant = recallTask.getInsertionInstant(); + } + + if (minDeferredRecallInstant == null + || recallTask.getDeferredRecallInstant().before(minDeferredRecallInstant)) { + + minDeferredRecallInstant = recallTask.getDeferredRecallInstant(); + } + + Date currentPinExpirationInstant = + new Date( + recallTask.getDeferredRecallInstant().getTime() + + (recallTask.getPinLifetime() * 1000)); + + if (maxPinExpirationInstant == null + || currentPinExpirationInstant.after(maxPinExpirationInstant)) { + + maxPinExpirationInstant = currentPinExpirationInstant; + } + } + + taskTO.setRetryAttempt(maxRetryAttempt); + taskTO.setInsertionInstant(minInsertionInstant); + taskTO.setDeferredRecallInstant(minDeferredRecallInstant); + + int pinLifeTime = + (int) (maxPinExpirationInstant.getTime() - minDeferredRecallInstant.getTime()) / 1000; + + taskTO.setPinLifetime(pinLifeTime); + + return taskTO; + } + + /** Utility method. */ + private String buildInputString(InputStream input) { + + BufferedReader reader = new BufferedReader(new InputStreamReader(input)); + + StringBuilder sb = new StringBuilder(); + + String line = null; + + try { + + while ((line = reader.readLine()) != null) { + + sb.append(line + "\n"); + } + + } catch (IOException e) { + + log.error(e.getMessage(), e); + + } finally { + + try { + + input.close(); + + } catch (IOException e) { + + log.error(e.getMessage(), e); + } + } + + return sb.toString(); + } +} diff --git a/src/main/java/it/grid/storm/util/GPFSSizeHelper.java b/src/main/java/it/grid/storm/util/GPFSSizeHelper.java index adef51f4..3ed6781f 100644 --- a/src/main/java/it/grid/storm/util/GPFSSizeHelper.java +++ b/src/main/java/it/grid/storm/util/GPFSSizeHelper.java @@ -1,26 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.util; -/** - * @author baltico - * - */ +/** @author baltico */ public class GPFSSizeHelper { - private GPFSSizeHelper() { - - } - - public static long getBytesFromKIB(long kibiBytes) { + private GPFSSizeHelper() {} - if (kibiBytes < 0) { - throw new IllegalArgumentException("Invalid kibiBytes number: " - + kibiBytes); - } - return kibiBytes * 1024; - } + public static long getBytesFromKIB(long kibiBytes) { + if (kibiBytes < 0) { + throw new IllegalArgumentException("Invalid kibiBytes number: " + kibiBytes); + } + return kibiBytes * 1024; + } } diff --git a/src/main/java/it/grid/storm/util/SURLValidator.java b/src/main/java/it/grid/storm/util/SURLValidator.java index ead02b1a..9d1d02d0 100644 --- a/src/main/java/it/grid/storm/util/SURLValidator.java +++ b/src/main/java/it/grid/storm/util/SURLValidator.java @@ -1,24 +1,22 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.util; import java.util.regex.Matcher; import java.util.regex.Pattern; - public class SURLValidator { - - static final String SRM_URL_REGEXP = - "^srm://[A-Za-z0-9\\.\\-\\[\\]:]+(:\\d{1,4})?/(srm/managerv2\\?SFN=)?\\S*$"; - static final Pattern pattern = Pattern.compile(SRM_URL_REGEXP); - - static public boolean valid(String surl){ - - Matcher matcher = pattern.matcher(surl); + static final String SRM_URL_REGEXP = + "^srm://[A-Za-z0-9\\.\\-\\[\\]:]+(:\\d{1,4})?/(srm/managerv2\\?SFN=)?\\S*$"; + + static final Pattern pattern = Pattern.compile(SRM_URL_REGEXP); + + public static boolean valid(String surl) { + + Matcher matcher = pattern.matcher(surl); - return(matcher.matches()); - } + return (matcher.matches()); + } } diff --git a/src/main/java/it/grid/storm/util/TokenValidator.java b/src/main/java/it/grid/storm/util/TokenValidator.java index 21034748..2d7f16a3 100644 --- a/src/main/java/it/grid/storm/util/TokenValidator.java +++ b/src/main/java/it/grid/storm/util/TokenValidator.java @@ -1,22 +1,20 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.util; import java.util.regex.Matcher; import java.util.regex.Pattern; - public class TokenValidator { - - static final String UUID_REGEXP = - "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"; - - static final Pattern pattern = Pattern.compile(UUID_REGEXP); - - static public boolean valid(String token){ - Matcher matcher = pattern.matcher(token); - return(matcher.matches()); - } + + static final String UUID_REGEXP = + "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"; + + static final Pattern pattern = Pattern.compile(UUID_REGEXP); + + public static boolean valid(String token) { + Matcher matcher = pattern.matcher(token); + return (matcher.matches()); + } } diff --git a/src/main/java/it/grid/storm/util/VirtualFSHelper.java b/src/main/java/it/grid/storm/util/VirtualFSHelper.java index d44a1294..11467128 100644 --- a/src/main/java/it/grid/storm/util/VirtualFSHelper.java +++ b/src/main/java/it/grid/storm/util/VirtualFSHelper.java @@ -1,17 +1,14 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.util; -import java.util.List; - import com.google.common.collect.Lists; - import it.grid.storm.namespace.NamespaceDirector; import it.grid.storm.namespace.model.Capability; import it.grid.storm.namespace.model.Quota; import it.grid.storm.namespace.model.VirtualFS; +import java.util.List; public class VirtualFSHelper { @@ -46,11 +43,9 @@ public static List getGPFSQuotaEnabledFilesystems() { List allVFS = NamespaceDirector.getNamespace().getAllDefinedVFS(); for (VirtualFS vfs : allVFS) { - if (isGPFSQuotaEnabledForVFS(vfs)) - fss.add(vfs); + if (isGPFSQuotaEnabledForVFS(vfs)) fss.add(vfs); } return fss; - } } diff --git a/src/main/java/it/grid/storm/xmlrpc/StoRMXmlRpcException.java b/src/main/java/it/grid/storm/xmlrpc/StoRMXmlRpcException.java index 92026911..e86af010 100644 --- a/src/main/java/it/grid/storm/xmlrpc/StoRMXmlRpcException.java +++ b/src/main/java/it/grid/storm/xmlrpc/StoRMXmlRpcException.java @@ -1,37 +1,28 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public class StoRMXmlRpcException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1L; - - public StoRMXmlRpcException() { - - } + /** */ + private static final long serialVersionUID = 1L; - public StoRMXmlRpcException(String message) { + public StoRMXmlRpcException() {} - super(message); - } + public StoRMXmlRpcException(String message) { - public StoRMXmlRpcException(Throwable cause) { + super(message); + } - super(cause); - } + public StoRMXmlRpcException(Throwable cause) { - public StoRMXmlRpcException(String message, Throwable cause) { + super(cause); + } - super(message, cause); - } + public StoRMXmlRpcException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/XMLRPCExecutor.java b/src/main/java/it/grid/storm/xmlrpc/XMLRPCExecutor.java index f0ff3ddb..585956a6 100644 --- a/src/main/java/it/grid/storm/xmlrpc/XMLRPCExecutor.java +++ b/src/main/java/it/grid/storm/xmlrpc/XMLRPCExecutor.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc; @@ -16,152 +15,144 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ConveterFactory; - import java.util.ArrayList; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. - * + * * @author lucamag * @date May 27, 2008 */ - public class XMLRPCExecutor { - private static ArrayList bookKeepers = HealthDirector - .getHealthMonitor().getBookKeepers(); - - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(XMLRPCExecutor.class); - - /** - * @param type - * @param inputParam - * @return - */ - - public Map execute(OperationType type, Map inputParam) - throws StoRMXmlRpcException { - - long startTime = System.currentTimeMillis(); - long duration = System.nanoTime(); - log.debug("Executing a '{}' call" , type.toString()); - log.debug(" Structure size : {}" , inputParam.size()); - Converter converter = ConveterFactory.getConverter(type); - SynchcallDispatcher dispatcher = SynchcallDispatcherFactory.getDispatcher(); - - log.debug("Converting input data with Converter {}", converter.getClass().getName()); - InputData inputData = converter.convertToInputData(inputParam); - - log.debug("Dispatching request using SynchcallDispatcher {}" - , dispatcher.getClass().getName()); - OutputData outputData; - try { - outputData = dispatcher.processRequest(type, inputData); - } catch (IllegalArgumentException e) { - log - .error("Unable to process the request. Error from the SynchcallDispatcher. IllegalArgumentException: {}" - , e.getMessage(),e); - throw new StoRMXmlRpcException( - "Unable to process the request. IllegalArgumentException: " - + e.getMessage()); - } catch (CommandException e) { - log - .error("Unable to execute the request. Error from the SynchcallDispatcher. CommandException: {}" - , e.getMessage(),e); - throw new StoRMXmlRpcException( - "Unable to process the request. CommandException: " + e.getMessage()); - } - Map outputParam = converter.convertFromOutputData(outputData); - duration = System.nanoTime() - duration; - - logExecution(convertOperationType(type), - DataHelper.getRequestor(inputData), startTime, - TimeUnit.NANOSECONDS.toMillis(duration), - outputData.isSuccess()); - - return outputParam; - } - - /** - * Method used to book the execution of SYNCH operation - */ - private void logExecution(it.grid.storm.health.OperationType opType, - String dn, long startTime, long duration, boolean successResult) { - - LogEvent event = new LogEvent(opType, dn, startTime, duration, - successResult); - if (!(bookKeepers.isEmpty())) { - log.debug("Found # {} bookeepers." , bookKeepers.size()); - for (int i = 0; i < bookKeepers.size(); i++) { - bookKeepers.get(i).addLogEvent(event); - } - } - } - - /** - * TOREMOVE! this is a temporary code since two different class of - * OperationTYpe are defined. This is to convert the two kind of operation - * type, from the onw used here, enum based, to the one requested by the - * hearthbeat. - */ - private it.grid.storm.health.OperationType convertOperationType( - OperationType type) { - - switch (type) { - case PTG: - return it.grid.storm.health.OperationType.PTG; - case SPTG: - return it.grid.storm.health.OperationType.SPTG; - case PTP: - return it.grid.storm.health.OperationType.PTP; - case SPTP: - return it.grid.storm.health.OperationType.SPTP; - case COPY: - return it.grid.storm.health.OperationType.COPY; - case BOL: - return it.grid.storm.health.OperationType.BOL; - case AF: - return it.grid.storm.health.OperationType.AF; - case AR: - return it.grid.storm.health.OperationType.AR; - case EFL: - return it.grid.storm.health.OperationType.EFL; - case GSM: - return it.grid.storm.health.OperationType.GSM; - case GST: - return it.grid.storm.health.OperationType.GST; - case LS: - return it.grid.storm.health.OperationType.LS; - case MKD: - return it.grid.storm.health.OperationType.MKD; - case MV: - return it.grid.storm.health.OperationType.MV; - case PNG: - return it.grid.storm.health.OperationType.PNG; - case PD: - return it.grid.storm.health.OperationType.PD; - case RF: - return it.grid.storm.health.OperationType.RF; - case RESSP: - return it.grid.storm.health.OperationType.RS; - case RELSP: - return it.grid.storm.health.OperationType.RSP; - case RM: - return it.grid.storm.health.OperationType.RM; - case RMD: - return it.grid.storm.health.OperationType.RMD; - default: - return it.grid.storm.health.OperationType.UNDEF; - } - } - + private static ArrayList bookKeepers = + HealthDirector.getHealthMonitor().getBookKeepers(); + + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(XMLRPCExecutor.class); + + /** + * @param type + * @param inputParam + * @return + */ + public Map execute(OperationType type, Map inputParam) throws StoRMXmlRpcException { + + long startTime = System.currentTimeMillis(); + long duration = System.nanoTime(); + log.debug("Executing a '{}' call", type.toString()); + log.debug(" Structure size : {}", inputParam.size()); + Converter converter = ConveterFactory.getConverter(type); + SynchcallDispatcher dispatcher = SynchcallDispatcherFactory.getDispatcher(); + + log.debug("Converting input data with Converter {}", converter.getClass().getName()); + InputData inputData = converter.convertToInputData(inputParam); + + log.debug("Dispatching request using SynchcallDispatcher {}", dispatcher.getClass().getName()); + OutputData outputData; + try { + outputData = dispatcher.processRequest(type, inputData); + } catch (IllegalArgumentException e) { + log.error( + "Unable to process the request. Error from the SynchcallDispatcher. IllegalArgumentException: {}", + e.getMessage(), + e); + throw new StoRMXmlRpcException( + "Unable to process the request. IllegalArgumentException: " + e.getMessage()); + } catch (CommandException e) { + log.error( + "Unable to execute the request. Error from the SynchcallDispatcher. CommandException: {}", + e.getMessage(), + e); + throw new StoRMXmlRpcException( + "Unable to process the request. CommandException: " + e.getMessage()); + } + Map outputParam = converter.convertFromOutputData(outputData); + duration = System.nanoTime() - duration; + + logExecution( + convertOperationType(type), + DataHelper.getRequestor(inputData), + startTime, + TimeUnit.NANOSECONDS.toMillis(duration), + outputData.isSuccess()); + + return outputParam; + } + + /** Method used to book the execution of SYNCH operation */ + private void logExecution( + it.grid.storm.health.OperationType opType, + String dn, + long startTime, + long duration, + boolean successResult) { + + LogEvent event = new LogEvent(opType, dn, startTime, duration, successResult); + if (!(bookKeepers.isEmpty())) { + log.debug("Found # {} bookeepers.", bookKeepers.size()); + for (int i = 0; i < bookKeepers.size(); i++) { + bookKeepers.get(i).addLogEvent(event); + } + } + } + + /** + * TOREMOVE! this is a temporary code since two different class of OperationTYpe are defined. This + * is to convert the two kind of operation type, from the onw used here, enum based, to the one + * requested by the hearthbeat. + */ + private it.grid.storm.health.OperationType convertOperationType(OperationType type) { + + switch (type) { + case PTG: + return it.grid.storm.health.OperationType.PTG; + case SPTG: + return it.grid.storm.health.OperationType.SPTG; + case PTP: + return it.grid.storm.health.OperationType.PTP; + case SPTP: + return it.grid.storm.health.OperationType.SPTP; + case COPY: + return it.grid.storm.health.OperationType.COPY; + case BOL: + return it.grid.storm.health.OperationType.BOL; + case AF: + return it.grid.storm.health.OperationType.AF; + case AR: + return it.grid.storm.health.OperationType.AR; + case EFL: + return it.grid.storm.health.OperationType.EFL; + case GSM: + return it.grid.storm.health.OperationType.GSM; + case GST: + return it.grid.storm.health.OperationType.GST; + case LS: + return it.grid.storm.health.OperationType.LS; + case MKD: + return it.grid.storm.health.OperationType.MKD; + case MV: + return it.grid.storm.health.OperationType.MV; + case PNG: + return it.grid.storm.health.OperationType.PNG; + case PD: + return it.grid.storm.health.OperationType.PD; + case RF: + return it.grid.storm.health.OperationType.RF; + case RESSP: + return it.grid.storm.health.OperationType.RS; + case RELSP: + return it.grid.storm.health.OperationType.RSP; + case RM: + return it.grid.storm.health.OperationType.RM; + case RMD: + return it.grid.storm.health.OperationType.RMD; + default: + return it.grid.storm.health.OperationType.UNDEF; + } + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/XMLRPCHttpServer.java b/src/main/java/it/grid/storm/xmlrpc/XMLRPCHttpServer.java index e60f4ab5..6fb823ad 100644 --- a/src/main/java/it/grid/storm/xmlrpc/XMLRPCHttpServer.java +++ b/src/main/java/it/grid/storm/xmlrpc/XMLRPCHttpServer.java @@ -1,26 +1,27 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Synchronous Call xmlrpc Server . This class hava a - * set of Handler that manage the FE call invoking the right BackEnd manager. - * + * This class represents the Synchronous Call xmlrpc Server . This class hava a set of Handler that + * manage the FE call invoking the right BackEnd manager. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.xmlrpc; import static it.grid.storm.metrics.StormMetricRegistry.METRIC_REGISTRY; +import com.codahale.metrics.jetty8.InstrumentedHandler; +import it.grid.storm.config.Configuration; +import it.grid.storm.metrics.NamedInstrumentedSelectChannelConnector; +import it.grid.storm.metrics.NamedInstrumentedThreadPool; +import it.grid.storm.rest.JettyThread; import java.util.EnumSet; - import javax.servlet.DispatcherType; import javax.servlet.Filter; - import org.apache.xmlrpc.webserver.XmlRpcServlet; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.FilterHolder; @@ -29,28 +30,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.codahale.metrics.jetty8.InstrumentedHandler; - -import it.grid.storm.config.Configuration; -import it.grid.storm.metrics.NamedInstrumentedSelectChannelConnector; -import it.grid.storm.metrics.NamedInstrumentedThreadPool; -import it.grid.storm.rest.JettyThread; - public final class XMLRPCHttpServer { - /** - * Logger - */ + /** Logger */ private static final Logger LOG = LoggerFactory.getLogger(XMLRPCHttpServer.class); - /** - * The Jetty server hosting the Apache XML-RPC machinery - */ + /** The Jetty server hosting the Apache XML-RPC machinery */ private final Server server; - /** - * True if a web server has been started - */ + /** True if a web server has been started */ private boolean running = false; public static final int DEFAULT_MAX_THREAD_NUM = 256; @@ -67,7 +55,6 @@ public XMLRPCHttpServer(int port, int maxThreadNum, int maxQueueSize) server = buildWebServer(port, maxThreadNum, maxQueueSize); }; - private void configureThreadPool(Server s, int maxThreadNum, int maxQueueSize) { int threadNumber = maxThreadNum; @@ -89,14 +76,16 @@ private void configureThreadPool(Server s, int maxThreadNum, int maxQueueSize) { s.setThreadPool(tp); - LOG.info("Configured XMLRPC server threadpool: maxThreads={}, maxQueueSize={}", threadNumber, + LOG.info( + "Configured XMLRPC server threadpool: maxThreads={}, maxQueueSize={}", + threadNumber, queueSize); } - private void configureConnector(Server server, int port) { - NamedInstrumentedSelectChannelConnector connector = new NamedInstrumentedSelectChannelConnector( - "xmlrpc-connector", port, METRIC_REGISTRY.getRegistry()); + NamedInstrumentedSelectChannelConnector connector = + new NamedInstrumentedSelectChannelConnector( + "xmlrpc-connector", port, METRIC_REGISTRY.getRegistry()); server.addConnector(connector); } @@ -129,11 +118,11 @@ private void configureHandler(Server server) throws StoRMXmlRpcException { servletContextHandler.addFilter(filterHolder, "/*", EnumSet.of(DispatcherType.REQUEST)); } - InstrumentedHandler ih = new InstrumentedHandler(METRIC_REGISTRY.getRegistry(), - servletContextHandler, "xmlrpc-handler"); + InstrumentedHandler ih = + new InstrumentedHandler( + METRIC_REGISTRY.getRegistry(), servletContextHandler, "xmlrpc-handler"); server.setHandler(ih); - } private Server buildWebServer(int port, int maxThreadNum, int maxQueueSize) @@ -152,7 +141,7 @@ private Server buildWebServer(int port, int maxThreadNum, int maxQueueSize) /* * (non-Javadoc) - * + * * @see it.grid.storm.xmlrpc.XMLRPCServerInterface#createServer() */ public synchronized void start() { @@ -168,13 +157,9 @@ public synchronized void start() { LOG.info("Jetty server hosting the XML-RPM machinery is running"); } - } - /** - * @throws Exception - * - */ + /** @throws Exception */ public synchronized void stop() { LOG.info("Stopping Jetty server hosting the XML-RPC machinery"); @@ -191,13 +176,10 @@ public synchronized void stop() { return; } - } running = false; LOG.info("Jetty server hosting the XML-RPM machinery is not running"); - } - } diff --git a/src/main/java/it/grid/storm/xmlrpc/XMLRPCMethods.java b/src/main/java/it/grid/storm/xmlrpc/XMLRPCMethods.java index b27129a4..1085a590 100644 --- a/src/main/java/it/grid/storm/xmlrpc/XMLRPCMethods.java +++ b/src/main/java/it/grid/storm/xmlrpc/XMLRPCMethods.java @@ -1,17 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Synchronous Call xmlrpc Server . This class hava a - * set of Handler that manage the FE call invoking the right BackEnd manager. - * + * This class represents the Synchronous Call xmlrpc Server . This class hava a set of Handler that + * manage the FE call invoking the right BackEnd manager. + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.xmlrpc; import it.grid.storm.common.OperationType; @@ -20,160 +18,136 @@ public class XMLRPCMethods { - private final XMLRPCExecutor executor = new XMLRPCExecutor(); - - public XMLRPCMethods() { - - }; - - public Map ping(Map inputParam) throws StoRMXmlRpcException { + private final XMLRPCExecutor executor = new XMLRPCExecutor(); - return executor.execute(OperationType.PNG, inputParam); - } + public XMLRPCMethods() {}; - public Map putDone(Map inputParam) throws StoRMXmlRpcException { + public Map ping(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.PD, inputParam); - } + return executor.execute(OperationType.PNG, inputParam); + } - public Map releaseFiles(Map inputParam) throws StoRMXmlRpcException { + public Map putDone(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.RF, inputParam); - } + return executor.execute(OperationType.PD, inputParam); + } - public Map extendFileLifeTime(Map inputParam) throws StoRMXmlRpcException { + public Map releaseFiles(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.EFL, inputParam); - } + return executor.execute(OperationType.RF, inputParam); + } - public Map abortRequest(Map inputParam) throws StoRMXmlRpcException { + public Map extendFileLifeTime(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.AR, inputParam); - } + return executor.execute(OperationType.EFL, inputParam); + } - public Map abortFiles(HashMap inputParam) throws StoRMXmlRpcException { + public Map abortRequest(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.AF, inputParam); - } + return executor.execute(OperationType.AR, inputParam); + } - public Map reserveSpace(Map inputParam) throws StoRMXmlRpcException { + public Map abortFiles(HashMap inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.RESSP, inputParam); - } + return executor.execute(OperationType.AF, inputParam); + } - /** - * GetSpaceMetaData - */ - public Map getSpaceMetaData(HashMap inputParam) throws StoRMXmlRpcException { + public Map reserveSpace(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.GSM, inputParam); - } + return executor.execute(OperationType.RESSP, inputParam); + } - /** - * GetSpaceTokens - * - * @param inputParam - * @return - */ - public Map getSpaceTokens(Map inputParam) throws StoRMXmlRpcException { + /** GetSpaceMetaData */ + public Map getSpaceMetaData(HashMap inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.GST, inputParam); - } + return executor.execute(OperationType.GSM, inputParam); + } - /** - * ReleaseSpace - */ + /** + * GetSpaceTokens + * + * @param inputParam + * @return + */ + public Map getSpaceTokens(Map inputParam) throws StoRMXmlRpcException { - public Map ReleaseSpace(Map inputParam) throws StoRMXmlRpcException { + return executor.execute(OperationType.GST, inputParam); + } - return executor.execute(OperationType.RELSP, inputParam); - } + /** ReleaseSpace */ + public Map ReleaseSpace(Map inputParam) throws StoRMXmlRpcException { - /** - * SrmLs request. This method catch an SrmLs request passed by StoRM FrontEnd - * trough xmlrpc communication. The Hastable is the default Java type used to - * represent structure passed by xmlrpc. - * - * @param Hastable - * output parameter structure returned. - * @param inputParameter - * input parameter structure received from xmlrpc call. - */ + return executor.execute(OperationType.RELSP, inputParam); + } - public Map ls(Map inputParam) throws StoRMXmlRpcException { + /** + * SrmLs request. This method catch an SrmLs request passed by StoRM FrontEnd trough xmlrpc + * communication. The Hastable is the default Java type used to represent structure passed by + * xmlrpc. + * + * @param Hastable output parameter structure returned. + * @param inputParameter input parameter structure received from xmlrpc call. + */ + public Map ls(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.LS, inputParam); - } + return executor.execute(OperationType.LS, inputParam); + } - /** - * SrmMkdir functionality. - */ + /** SrmMkdir functionality. */ + public Map mkdir(Map inputParam) throws StoRMXmlRpcException { - public Map mkdir(Map inputParam) throws StoRMXmlRpcException { + return executor.execute(OperationType.MKD, inputParam); + } - return executor.execute(OperationType.MKD, inputParam); - } + /** + * SrmRmdir functionality. + * + * @param inputParam + * @return + */ + public Map rmdir(Map inputParam) throws StoRMXmlRpcException { - /** - * SrmRmdir functionality. - * - * @param inputParam - * @return - */ - public Map rmdir(Map inputParam) throws StoRMXmlRpcException { + return executor.execute(OperationType.RMD, inputParam); + } - return executor.execute(OperationType.RMD, inputParam); - } + /** + * SrmRm functionality. + * + * @param inputParam + * @return + */ + public Map rm(Map inputParam) throws StoRMXmlRpcException { - /** - * SrmRm functionality. - * - * @param inputParam - * @return - */ - public Map rm(Map inputParam) throws StoRMXmlRpcException { + return executor.execute(OperationType.RM, inputParam); + } - return executor.execute(OperationType.RM, inputParam); - } + /** SrmMv functionality. */ + public Map mv(Map inputParam) throws StoRMXmlRpcException { - /** - * SrmMv functionality. - */ + return executor.execute(OperationType.MV, inputParam); + } - public Map mv(Map inputParam) throws StoRMXmlRpcException { + /** SrmPrepareToPut functionality. */ + public Map prepareToPut(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.MV, inputParam); - } + return executor.execute(OperationType.PTP, inputParam); + } - /** - * SrmPrepareToPut functionality. - */ - public Map prepareToPut(Map inputParam) throws StoRMXmlRpcException { + /** SrmPrepareToPutStatus functionality. */ + public Map prepareToPutStatus(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.PTP, inputParam); - } + return executor.execute(OperationType.SPTP, inputParam); + } - /** - * SrmPrepareToPutStatus functionality. - */ - public Map prepareToPutStatus(Map inputParam) throws StoRMXmlRpcException { + /** SrmPrepareToGet functionality. */ + public Map prepareToGet(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.SPTP, inputParam); - } - - /** - * SrmPrepareToGet functionality. - */ - public Map prepareToGet(Map inputParam) throws StoRMXmlRpcException { + return executor.execute(OperationType.PTG, inputParam); + } - return executor.execute(OperationType.PTG, inputParam); - } - - /** - * SrmPrepareToGetStatus functionality. - */ - public Map prepareToGetStatus(Map inputParam) throws StoRMXmlRpcException { + /** SrmPrepareToGetStatus functionality. */ + public Map prepareToGetStatus(Map inputParam) throws StoRMXmlRpcException { - return executor.execute(OperationType.SPTG, inputParam); - } + return executor.execute(OperationType.SPTG, inputParam); + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/XmlRpcTokenFilter.java b/src/main/java/it/grid/storm/xmlrpc/XmlRpcTokenFilter.java index 84b9bd5a..36ed95e3 100644 --- a/src/main/java/it/grid/storm/xmlrpc/XmlRpcTokenFilter.java +++ b/src/main/java/it/grid/storm/xmlrpc/XmlRpcTokenFilter.java @@ -1,13 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc; import java.io.IOException; import java.util.regex.Matcher; import java.util.regex.Pattern; - import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; @@ -16,121 +14,103 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Drop requests that does not contain the expected security token. - * + * * @author valerioventuri - * */ public class XmlRpcTokenFilter implements Filter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(XMLRPCHttpServer.class); - - /** - * - */ - public static final String STORM_HEADER_PATTERN_STRING = "^\\s*STORM/(\\S+).*$"; - - /** - * - */ - public static final Pattern STORM_HEADER_PATTERN = Pattern - .compile(STORM_HEADER_PATTERN_STRING); - - /** - * This is the string that has to be provided for requests to go through. - */ - private String secret; + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(XMLRPCHttpServer.class); - /** - * Constructor. - * - * @param secret - */ - public XmlRpcTokenFilter(String secret) { + /** */ + public static final String STORM_HEADER_PATTERN_STRING = "^\\s*STORM/(\\S+).*$"; - this.secret = secret; - } + /** */ + public static final Pattern STORM_HEADER_PATTERN = Pattern.compile(STORM_HEADER_PATTERN_STRING); - /** - * Pass over to the next filter if and only if the request contains a security - * token that match the expected. - * - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, - FilterChain chain) throws IOException, ServletException { + /** This is the string that has to be provided for requests to go through. */ + private String secret; - HttpServletRequest httpRequest = (HttpServletRequest) request; + /** + * Constructor. + * + * @param secret + */ + public XmlRpcTokenFilter(String secret) { - String header = httpRequest.getHeader("User-Agent"); + this.secret = secret; + } - String token = parseStormToken(header); + /** + * Pass over to the next filter if and only if the request contains a security token that match + * the expected. + */ + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { - if (token == null) { + HttpServletRequest httpRequest = (HttpServletRequest) request; - log - .error("The XML-RPC request security token is missing. The calling service is probably misconfigured."); + String header = httpRequest.getHeader("User-Agent"); - ((HttpServletResponse) response).getWriter().write( - prepareXml("The request security token is missing")); + String token = parseStormToken(header); - return; + if (token == null) { - } + log.error( + "The XML-RPC request security token is missing. The calling service is probably misconfigured."); - if (!token.equals(this.secret)) { + ((HttpServletResponse) response) + .getWriter() + .write(prepareXml("The request security token is missing")); - log - .error("The XML-RPC request security token does not match. The calling service is probably misconfigured."); + return; + } - ((HttpServletResponse) response).getWriter().write( - prepareXml("The request security token does not match")); + if (!token.equals(this.secret)) { - return; - } + log.error( + "The XML-RPC request security token does not match. The calling service is probably misconfigured."); - chain.doFilter(request, response); - } + ((HttpServletResponse) response) + .getWriter() + .write(prepareXml("The request security token does not match")); - /** - * - */ - public static final String parseStormToken(String headerContent) { + return; + } - Matcher m = STORM_HEADER_PATTERN.matcher(headerContent); + chain.doFilter(request, response); + } - if (m.matches()) - return m.group(1); + /** */ + public static final String parseStormToken(String headerContent) { - return null; - } + Matcher m = STORM_HEADER_PATTERN.matcher(headerContent); - private String prepareXml(String message) { + if (m.matches()) return m.group(1); - return "" - + "" - + "faultCode0" - + "faultString" + message + "" - + ""; - } - - @Override - public void init(FilterConfig arg0) throws ServletException { + return null; + } - } + private String prepareXml(String message) { - @Override - public void destroy() { + return "" + + "" + + "faultCode0" + + "faultString" + + message + + "" + + ""; + } - } + @Override + public void init(FilterConfig arg0) throws ServletException {} + @Override + public void destroy() {} } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/Converter.java b/src/main/java/it/grid/storm/xmlrpc/converter/Converter.java index 5ef8f37f..7100c4f7 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/Converter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/Converter.java @@ -1,40 +1,30 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter; import it.grid.storm.synchcall.data.InputData; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.xmlrpc.StoRMXmlRpcException; - import java.util.Map; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * - * Authors: - * + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it - * * @date = Oct 9, 2008 - * */ - public interface Converter { - /** - * This method return a RmInputData created from input Hashtable structure of - * an xmlrpc Rm v2.1 call. Rm Input Data can be used to invoke mkdir method of - * DirectoryFunctionsManager - */ - public abstract InputData convertToInputData(Map inputParam) - throws IllegalArgumentException, StoRMXmlRpcException; - - public abstract Map convertFromOutputData( - OutputData outputData) throws IllegalArgumentException; + /** + * This method return a RmInputData created from input Hashtable structure of an xmlrpc Rm v2.1 + * call. Rm Input Data can be used to invoke mkdir method of DirectoryFunctionsManager + */ + public abstract InputData convertToInputData(Map inputParam) + throws IllegalArgumentException, StoRMXmlRpcException; -} \ No newline at end of file + public abstract Map convertFromOutputData(OutputData outputData) + throws IllegalArgumentException; +} diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/ConveterFactory.java b/src/main/java/it/grid/storm/xmlrpc/converter/ConveterFactory.java index aa7791a3..3e177150 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/ConveterFactory.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/ConveterFactory.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter; @@ -9,10 +8,10 @@ import it.grid.storm.xmlrpc.converter.datatransfer.AbortFilesConverter; import it.grid.storm.xmlrpc.converter.datatransfer.AbortRequestConverter; import it.grid.storm.xmlrpc.converter.datatransfer.ExtendFileLifeTimeConverter; +import it.grid.storm.xmlrpc.converter.datatransfer.ManageFileTransferRequestConverter; import it.grid.storm.xmlrpc.converter.datatransfer.PrepareToGetRequestConverter; import it.grid.storm.xmlrpc.converter.datatransfer.PrepareToPutRequestConverter; import it.grid.storm.xmlrpc.converter.datatransfer.PutDoneConverter; -import it.grid.storm.xmlrpc.converter.datatransfer.ManageFileTransferRequestConverter; import it.grid.storm.xmlrpc.converter.directory.LsConverter; import it.grid.storm.xmlrpc.converter.directory.MkdirConverter; import it.grid.storm.xmlrpc.converter.directory.MvConverter; @@ -27,85 +26,79 @@ import org.slf4j.LoggerFactory; /** - * ************************************************************************ This - * file is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not - * use this file except in compliance with the License. You may obtain a copy of - * the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by - * applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS - * OF ANY KIND, either express or implied. See the License for the specific + * ************************************************************************ This file is part of the + * StoRM project. Copyright (c) 2008 INFN-CNAF. + * + *

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file + * except in compliance with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in + * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. - * ************************************************************************ This - * class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * Authors: - * + * ************************************************************************ This class is part of + * the StoRM project. Copyright (c) 2008 INFN-CNAF. + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it * @date = Oct 9, 2008 */ - public class ConveterFactory { - private static final Logger log = LoggerFactory - .getLogger(ConveterFactory.class); - - /** - * @param type - * @return null if no converter is available for the provided OperationType - */ - public static Converter getConverter(OperationType type) - throws StoRMXmlRpcException { + private static final Logger log = LoggerFactory.getLogger(ConveterFactory.class); - switch (type) { - case RM: - return new RmConverter(); - case RMD: - return new RmdirConverter(); - case MKD: - return new MkdirConverter(); - case MV: - return new MvConverter(); - case LS: - return new LsConverter(); + /** + * @param type + * @return null if no converter is available for the provided OperationType + */ + public static Converter getConverter(OperationType type) throws StoRMXmlRpcException { - case PNG: - return new PingConverter(); + switch (type) { + case RM: + return new RmConverter(); + case RMD: + return new RmdirConverter(); + case MKD: + return new MkdirConverter(); + case MV: + return new MvConverter(); + case LS: + return new LsConverter(); - case GSM: - return new GetSpaceMetaDataConverter(); - case GST: - return new GetSpaceTokensConverter(); - case RESSP: - return new ReserveSpaceConverter(); - case RELSP: - return new ReleaseSpaceConverter(); + case PNG: + return new PingConverter(); - case PD: - return new PutDoneConverter(); - case RF: - return new ManageFileTransferRequestConverter(); - case EFL: - return new ExtendFileLifeTimeConverter(); - case AF: - return new AbortFilesConverter(); - case AR: - return new AbortRequestConverter(); + case GSM: + return new GetSpaceMetaDataConverter(); + case GST: + return new GetSpaceTokensConverter(); + case RESSP: + return new ReserveSpaceConverter(); + case RELSP: + return new ReleaseSpaceConverter(); - case PTP: - return new PrepareToPutRequestConverter(); - case SPTP: - return new ManageFileTransferRequestConverter(); - case PTG: - return new PrepareToGetRequestConverter(); - case SPTG: - return new ManageFileTransferRequestConverter(); - default: - log.error("No Converter available for OperationType {}" , type); - throw new StoRMXmlRpcException( - "No Converter available for OperationType " + type); - } - } + case PD: + return new PutDoneConverter(); + case RF: + return new ManageFileTransferRequestConverter(); + case EFL: + return new ExtendFileLifeTimeConverter(); + case AF: + return new AbortFilesConverter(); + case AR: + return new AbortRequestConverter(); + case PTP: + return new PrepareToPutRequestConverter(); + case SPTP: + return new ManageFileTransferRequestConverter(); + case PTG: + return new PrepareToGetRequestConverter(); + case SPTG: + return new ManageFileTransferRequestConverter(); + default: + log.error("No Converter available for OperationType {}", type); + throw new StoRMXmlRpcException("No Converter available for OperationType " + type); + } + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/ParameterDisplayHelper.java b/src/main/java/it/grid/storm/xmlrpc/converter/ParameterDisplayHelper.java index da47eef7..abf1bc65 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/ParameterDisplayHelper.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/ParameterDisplayHelper.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter; @@ -9,32 +8,30 @@ public class ParameterDisplayHelper { - private static final String sepBegin = "("; - private static final String sepEnd = ")"; - private static final String arrow = "->"; - - public static String display(Map map) { - - StringBuilder sb = new StringBuilder("["); - for (Object mapKey : map.keySet()) { - String mapKeyStr = mapKey.toString(); - sb.append(sepBegin).append(mapKeyStr); - if ((mapKeyStr.equals("userFQANS")) - || (mapKeyStr.equals(ArrayOfSURLs.ARRAY_OF_SURLS))) { - sb.append(arrow).append("["); - Object[] mapKeyValues = (Object[]) map.get(mapKey); - for (int i = 0; i < mapKeyValues.length - 1; i++) { - sb.append(mapKeyValues[i].toString()).append(","); - } - sb.append(mapKeyValues[mapKeyValues.length - 1]).append("]"); - - } else { - String mapKeyValue = "'" + (map.get(mapKey)).toString() + "'"; - sb.append(arrow).append(mapKeyValue).append("]"); - } - } - - return sb.append("]").toString(); - } - + private static final String sepBegin = "("; + private static final String sepEnd = ")"; + private static final String arrow = "->"; + + public static String display(Map map) { + + StringBuilder sb = new StringBuilder("["); + for (Object mapKey : map.keySet()) { + String mapKeyStr = mapKey.toString(); + sb.append(sepBegin).append(mapKeyStr); + if ((mapKeyStr.equals("userFQANS")) || (mapKeyStr.equals(ArrayOfSURLs.ARRAY_OF_SURLS))) { + sb.append(arrow).append("["); + Object[] mapKeyValues = (Object[]) map.get(mapKey); + for (int i = 0; i < mapKeyValues.length - 1; i++) { + sb.append(mapKeyValues[i].toString()).append(","); + } + sb.append(mapKeyValues[mapKeyValues.length - 1]).append("]"); + + } else { + String mapKeyValue = "'" + (map.get(mapKey)).toString() + "'"; + sb.append(arrow).append(mapKeyValue).append("]"); + } + } + + return sb.append("]").toString(); + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortFilesConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortFilesConverter.java index e79fd107..cfe9a94d 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortFilesConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortFilesConverter.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for AbortFiles function. This class - * receives input datas from xmlrpc call and converts these datas into a StoRM - * Type that can be used to invoke the AbortManager. - * + * This class represents the Type Converter for AbortFiles function. This class receives input datas + * from xmlrpc call and converts these datas into a StoRM Type that can be used to invoke the + * AbortManager. + * * @author Magnoni Luca * @author CNAF-INFN Bologna * @date Jan 2007 @@ -30,87 +29,76 @@ import it.grid.storm.synchcall.data.datatransfer.AnonymousAbortFilesInputData; import it.grid.storm.synchcall.data.datatransfer.IdentityAbortFilesInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AbortFilesConverter implements Converter { - private static final Logger log = LoggerFactory - .getLogger(AbortFilesConverter.class); - - public AbortFilesConverter() { - - } - - /** - * This method returns a AbortFilesInputData created from the input Hashtable - * structure of a xmlrpc srmAbortFiles() v2.2 call. - * - * @param inputParam - * Hashtable containing the input data - * @return AbortFilesInputData - */ - public InputData convertToInputData(Map inputParam) { - - GridUserInterface guser = GridUserManager.decode(inputParam); - - TRequestToken requestToken; - try { - requestToken = TRequestToken.decode(inputParam, - TRequestToken.PNAME_REQUESTOKEN); - log.debug("requestToken={}" , requestToken.toString()); - } catch (InvalidTRequestTokenAttributesException e) { - requestToken = null; - log.debug("requestToken=NULL",e); - } - - ArrayOfSURLs arrayOfSURLs; - try { - arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e) { - log.debug("Empty surlArray!"); - arrayOfSURLs = null; - } - - AbortFilesInputData inputData; - if (guser != null) { - inputData = new IdentityAbortFilesInputData(guser, requestToken, - arrayOfSURLs); - } else { - inputData = new AnonymousAbortFilesInputData(requestToken, arrayOfSURLs); - } - return inputData; - } - - public Map convertFromOutputData(OutputData data) { - - log.debug("AbortFilesOutputData - Creation of XMLRPC Output Structure!"); - - Map outputParam = new HashMap(); - AbortFilesOutputData outputData = AbortFilesOutputData - .make((AbortGeneralOutputData) data); - - // (1) returnStatus - TReturnStatus returnStatus = outputData.getReturnStatus(); - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - - // (2) arrayOfFileStatuses - ArrayOfTSURLReturnStatus arrayOfFileStatuses = outputData - .getArrayOfFileStatuses(); - if (arrayOfFileStatuses != null) { - arrayOfFileStatuses.encode(outputParam, - ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); - } - - log.debug("AbortFilesConverter - Sending: {}" , outputParam.toString()); - - // Return global structure. - return outputParam; - } + private static final Logger log = LoggerFactory.getLogger(AbortFilesConverter.class); + + public AbortFilesConverter() {} + + /** + * This method returns a AbortFilesInputData created from the input Hashtable structure of a + * xmlrpc srmAbortFiles() v2.2 call. + * + * @param inputParam Hashtable containing the input data + * @return AbortFilesInputData + */ + public InputData convertToInputData(Map inputParam) { + + GridUserInterface guser = GridUserManager.decode(inputParam); + + TRequestToken requestToken; + try { + requestToken = TRequestToken.decode(inputParam, TRequestToken.PNAME_REQUESTOKEN); + log.debug("requestToken={}", requestToken.toString()); + } catch (InvalidTRequestTokenAttributesException e) { + requestToken = null; + log.debug("requestToken=NULL", e); + } + + ArrayOfSURLs arrayOfSURLs; + try { + arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e) { + log.debug("Empty surlArray!"); + arrayOfSURLs = null; + } + + AbortFilesInputData inputData; + if (guser != null) { + inputData = new IdentityAbortFilesInputData(guser, requestToken, arrayOfSURLs); + } else { + inputData = new AnonymousAbortFilesInputData(requestToken, arrayOfSURLs); + } + return inputData; + } + + public Map convertFromOutputData(OutputData data) { + + log.debug("AbortFilesOutputData - Creation of XMLRPC Output Structure!"); + + Map outputParam = new HashMap(); + AbortFilesOutputData outputData = AbortFilesOutputData.make((AbortGeneralOutputData) data); + + // (1) returnStatus + TReturnStatus returnStatus = outputData.getReturnStatus(); + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + + // (2) arrayOfFileStatuses + ArrayOfTSURLReturnStatus arrayOfFileStatuses = outputData.getArrayOfFileStatuses(); + if (arrayOfFileStatuses != null) { + arrayOfFileStatuses.encode(outputParam, ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); + } + + log.debug("AbortFilesConverter - Sending: {}", outputParam.toString()); + + // Return global structure. + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortRequestConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortRequestConverter.java index 0ec524de..cb1c0e70 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortRequestConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/AbortRequestConverter.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for AbortRequest function. This - * class receives input datas from xmlrpc call and converts these datas into a - * StoRM Type that can be used to invoke the AbortManager. - * + * This class represents the Type Converter for AbortRequest function. This class receives input + * datas from xmlrpc call and converts these datas into a StoRM Type that can be used to invoke the + * AbortManager. + * * @author Magnoni Luca * @author CNAF-INFN Bologna * @date Jan 2007 @@ -27,69 +26,61 @@ import it.grid.storm.synchcall.data.datatransfer.AnonymousAbortRequestInputData; import it.grid.storm.synchcall.data.datatransfer.IdentityAbortRequestInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AbortRequestConverter implements Converter { - private static final Logger log = LoggerFactory - .getLogger(AbortRequestConverter.class); - - public AbortRequestConverter() { - - } - - /** - * This method returns a AbortRequest data created from the input Hashtable - * structure of a xmlrpc srmAbortRequest() v2.2 call. - * - * @param inputParam - * Hashtable containing the input data - * @return AbortRequestInputData - */ - public InputData convertToInputData(Map inputParam) { - - GridUserInterface guser = GridUserManager.decode(inputParam); - - TRequestToken requestToken; - try { - requestToken = TRequestToken.decode(inputParam, - TRequestToken.PNAME_REQUESTOKEN); - log.debug("requestToken={}" , requestToken.toString()); - } catch (InvalidTRequestTokenAttributesException e) { - requestToken = null; - log.debug("requestToken=NULL",e); - } - AbortInputData inputData; - if (guser != null) { - inputData = new IdentityAbortRequestInputData(guser, requestToken); - } else { - inputData = new AnonymousAbortRequestInputData(requestToken); - } - return inputData; - } - - public Map convertFromOutputData(OutputData data) { - - log.debug("AbortRequestOutputData - Creation of XMLRPC Output Structure!"); - - Map outputParam = new HashMap(); - AbortRequestOutputData outputData = AbortRequestOutputData - .make((AbortGeneralOutputData) data); - - // (1) returnStatus - TReturnStatus returnStatus = outputData.getReturnStatus(); - - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - log.debug("AbortRequestConverter - Sending: {}" , outputParam.toString()); - - // Return global structure. - return outputParam; - } + private static final Logger log = LoggerFactory.getLogger(AbortRequestConverter.class); + + public AbortRequestConverter() {} + + /** + * This method returns a AbortRequest data created from the input Hashtable structure of a xmlrpc + * srmAbortRequest() v2.2 call. + * + * @param inputParam Hashtable containing the input data + * @return AbortRequestInputData + */ + public InputData convertToInputData(Map inputParam) { + + GridUserInterface guser = GridUserManager.decode(inputParam); + + TRequestToken requestToken; + try { + requestToken = TRequestToken.decode(inputParam, TRequestToken.PNAME_REQUESTOKEN); + log.debug("requestToken={}", requestToken.toString()); + } catch (InvalidTRequestTokenAttributesException e) { + requestToken = null; + log.debug("requestToken=NULL", e); + } + AbortInputData inputData; + if (guser != null) { + inputData = new IdentityAbortRequestInputData(guser, requestToken); + } else { + inputData = new AnonymousAbortRequestInputData(requestToken); + } + return inputData; + } + + public Map convertFromOutputData(OutputData data) { + + log.debug("AbortRequestOutputData - Creation of XMLRPC Output Structure!"); + + Map outputParam = new HashMap(); + AbortRequestOutputData outputData = AbortRequestOutputData.make((AbortGeneralOutputData) data); + + // (1) returnStatus + TReturnStatus returnStatus = outputData.getReturnStatus(); + + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + log.debug("AbortRequestConverter - Sending: {}", outputParam.toString()); + + // Return global structure. + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ExtendFileLifeTimeConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ExtendFileLifeTimeConverter.java index 29aeff2b..3d85b4dc 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ExtendFileLifeTimeConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ExtendFileLifeTimeConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; @@ -17,118 +16,105 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.AnonymousExtendFileLifeTimeInputData; import it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeInputData; -import it.grid.storm.synchcall.data.datatransfer.IdentityExtendFileLifeTimeInputData; import it.grid.storm.synchcall.data.datatransfer.ExtendFileLifeTimeOutputData; +import it.grid.storm.synchcall.data.datatransfer.IdentityExtendFileLifeTimeInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Hashtable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * This class represents the Type Converter for ExtendFileLifeTime function. - * This class receives input datas from xmlrpc call and converts these datas - * into a StoRM Type that can be used to invoke the ExtendFileLifeTimeManager. - * - * Authors: - * + * + *

This class represents the Type Converter for ExtendFileLifeTime function. This class receives + * input datas from xmlrpc call and converts these datas into a StoRM Type that can be used to + * invoke the ExtendFileLifeTimeManager. + * + *

Authors: + * * @author lucamag luca.magnoniATcnaf.infn.it * @author Alberto Forti - * * @date = Oct 10, 2008 - * */ - public class ExtendFileLifeTimeConverter implements Converter { - private static final Logger log = LoggerFactory - .getLogger(ExtendFileLifeTimeConverter.class); - - public ExtendFileLifeTimeConverter() { - - } - - /** - * This method returns a ExtendFileLifeTimeInputData created from the input - * Hashtable structure of a xmlrpc srmExtendFileLifeTime() v2.2 call. - * - * @param inputParam - * Hashtable containing the input data - * @return ExtendFileLifeTimeInputData - */ - public InputData convertToInputData(Map inputParam) { - - GridUserInterface guser = GridUserManager.decode(inputParam); - - String authID = (String) inputParam.get("authorizationID"); - - TRequestToken requestToken; - try { - requestToken = TRequestToken.decode(inputParam, - TRequestToken.PNAME_REQUESTOKEN); - log.debug("requestToken={}" , requestToken.toString()); - } catch (InvalidTRequestTokenAttributesException e) { - requestToken = null; - log.error("requestToken=NULL",e); - } - - ArrayOfSURLs arrayOfSURLs; - try { - arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e) { - log.error("Empty surlArray!",e); - arrayOfSURLs = null; - } - - TLifeTimeInSeconds newFileLifetime = TLifeTimeInSeconds.decode(inputParam, - TLifeTimeInSeconds.PNAME_FILELIFETIME); - - TLifeTimeInSeconds newPinLifetime = TLifeTimeInSeconds.decode(inputParam, - TLifeTimeInSeconds.PNAME_PINLIFETIME); - - ExtendFileLifeTimeInputData inputData; - if (guser != null) { - inputData = new IdentityExtendFileLifeTimeInputData(guser, requestToken, - arrayOfSURLs, newFileLifetime, newPinLifetime); - } else { - inputData = new AnonymousExtendFileLifeTimeInputData(requestToken, - arrayOfSURLs, newFileLifetime, newPinLifetime); - } - return inputData; - } - - public Hashtable convertFromOutputData(OutputData data) { - - log - .debug("ExtendFileLifeTimeOutputData - Creation of XMLRPC Output Structure!"); - - Hashtable outputParam = new Hashtable(); - ExtendFileLifeTimeOutputData outputData = (ExtendFileLifeTimeOutputData) data; - - // (1) returnStatus - TReturnStatus returnStatus = outputData.getReturnStatus(); - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - - // (2) arrayOfFileStatuses - ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses = outputData - .getArrayOfFileStatuses(); - if (arrayOfFileStatuses != null) { - arrayOfFileStatuses.encode(outputParam, - ArrayOfTSURLLifetimeReturnStatus.PNAME_ARRAYOFFILESTATUSES); - } - - log.debug("ExtendFileLifeTimeConverter - Sending: {}" - , outputParam.toString()); - - // Return global structure. - return outputParam; - } + private static final Logger log = LoggerFactory.getLogger(ExtendFileLifeTimeConverter.class); + + public ExtendFileLifeTimeConverter() {} + + /** + * This method returns a ExtendFileLifeTimeInputData created from the input Hashtable structure of + * a xmlrpc srmExtendFileLifeTime() v2.2 call. + * + * @param inputParam Hashtable containing the input data + * @return ExtendFileLifeTimeInputData + */ + public InputData convertToInputData(Map inputParam) { + + GridUserInterface guser = GridUserManager.decode(inputParam); + + String authID = (String) inputParam.get("authorizationID"); + + TRequestToken requestToken; + try { + requestToken = TRequestToken.decode(inputParam, TRequestToken.PNAME_REQUESTOKEN); + log.debug("requestToken={}", requestToken.toString()); + } catch (InvalidTRequestTokenAttributesException e) { + requestToken = null; + log.error("requestToken=NULL", e); + } + + ArrayOfSURLs arrayOfSURLs; + try { + arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e) { + log.error("Empty surlArray!", e); + arrayOfSURLs = null; + } + + TLifeTimeInSeconds newFileLifetime = + TLifeTimeInSeconds.decode(inputParam, TLifeTimeInSeconds.PNAME_FILELIFETIME); + + TLifeTimeInSeconds newPinLifetime = + TLifeTimeInSeconds.decode(inputParam, TLifeTimeInSeconds.PNAME_PINLIFETIME); + + ExtendFileLifeTimeInputData inputData; + if (guser != null) { + inputData = + new IdentityExtendFileLifeTimeInputData( + guser, requestToken, arrayOfSURLs, newFileLifetime, newPinLifetime); + } else { + inputData = + new AnonymousExtendFileLifeTimeInputData( + requestToken, arrayOfSURLs, newFileLifetime, newPinLifetime); + } + return inputData; + } + + public Hashtable convertFromOutputData(OutputData data) { + + log.debug("ExtendFileLifeTimeOutputData - Creation of XMLRPC Output Structure!"); + + Hashtable outputParam = new Hashtable(); + ExtendFileLifeTimeOutputData outputData = (ExtendFileLifeTimeOutputData) data; + + // (1) returnStatus + TReturnStatus returnStatus = outputData.getReturnStatus(); + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + + // (2) arrayOfFileStatuses + ArrayOfTSURLLifetimeReturnStatus arrayOfFileStatuses = outputData.getArrayOfFileStatuses(); + if (arrayOfFileStatuses != null) { + arrayOfFileStatuses.encode( + outputParam, ArrayOfTSURLLifetimeReturnStatus.PNAME_ARRAYOFFILESTATUSES); + } + + log.debug("ExtendFileLifeTimeConverter - Sending: {}", outputParam.toString()); + + // Return global structure. + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/FileTransferRequestInputConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/FileTransferRequestInputConverter.java index 8e92fe14..d1fa4386 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/FileTransferRequestInputConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/FileTransferRequestInputConverter.java @@ -1,13 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; -import java.util.Hashtable; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.griduser.GridUserManager; @@ -22,146 +17,153 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.AnonymousFileTransferInputData; import it.grid.storm.synchcall.data.datatransfer.FileTransferInputData; -import it.grid.storm.synchcall.data.datatransfer.IdentityFileTransferInputData; import it.grid.storm.synchcall.data.datatransfer.FileTransferOutputData; +import it.grid.storm.synchcall.data.datatransfer.IdentityFileTransferInputData; import it.grid.storm.xmlrpc.StoRMXmlRpcException; import it.grid.storm.xmlrpc.converter.Converter; +import java.util.Hashtable; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public abstract class FileTransferRequestInputConverter implements Converter { - static final Logger log = LoggerFactory - .getLogger(FileTransferRequestInputConverter.class); - - @Override - public InputData convertToInputData(Map inputParam) - throws IllegalArgumentException, StoRMXmlRpcException { - - TSURL surl = decodeSURL(inputParam); - if (surl == null) { - log.error("Missing mandatory parameter '{}' Unable to build FileTransferInputData" , TSURL.PNAME_SURL); - throw new IllegalArgumentException("Missing mandatory parameter \'" - + TSURL.PNAME_SURL + "\'"); - } - GridUserInterface user = decodeUser(inputParam); - TURLPrefix transferProtocols = decodeTransferProtocols(inputParam); - if (transferProtocols == null) { - log.error("Missing mandatory parameter '{}' Unable to build FileTransferInputData" , TURLPrefix.PNAME_TURL_PREFIX); - throw new IllegalArgumentException("Missing mandatory parameter \'" - + TURLPrefix.PNAME_TURL_PREFIX + "\'"); - } - - FileTransferInputData inputData; - try { - if (user != null) { - inputData = new IdentityFileTransferInputData(user, surl, - transferProtocols); - } else { - inputData = new AnonymousFileTransferInputData(surl, transferProtocols); - } - } catch (IllegalArgumentException e) { - log - .error("Unable to build FileTransferInputData. IllegalArgumentException: {}" - , e.getMessage(),e); - throw new StoRMXmlRpcException("Unable to build FileTransferInputData"); - } - TLifeTimeInSeconds desiredPinLifetime = decodeDesiredPinLifetime(inputParam); - if (desiredPinLifetime != null) { - inputData.setDesiredPinLifetime(desiredPinLifetime); - } - TSpaceToken targetSpaceToken = decodeTargetSpaceToken(inputParam); - if (targetSpaceToken != null) { - inputData.setTargetSpaceToken(targetSpaceToken); - } - return inputData; - } - - @Override - public Map convertFromOutputData(OutputData outputData) - throws IllegalArgumentException { - - if (outputData == null) { - log.error("Unable to build an output map. Null argument: outputData={}" - , outputData); - throw new IllegalArgumentException( - "Unable to build a valid output map, null argument"); - } - if (!(outputData instanceof FileTransferOutputData)) { - log.error("Unable to convert from OutputData. Wrong OutputData type: '{}'" - , outputData.getClass().getName()); - throw new IllegalArgumentException( - "Unable to convert from OutputData. Wrong OutputData type: \'" - + outputData.getClass().getName() + "\'"); - } - FileTransferOutputData ftOutputData = (FileTransferOutputData) outputData; - TSURL surl = ftOutputData.getSurl(); - TTURL turl = ftOutputData.getTurl(); - TReturnStatus status = ftOutputData.getStatus(); - TRequestToken requestToken = ftOutputData.getRequestToken(); - if (surl == null || surl.isEmpty() || surl.getSURLString().trim().isEmpty() - || turl == null || status == null || requestToken == null - || requestToken.getValue() == null || requestToken.getValue().isEmpty()) { - log - .error("Unable to build a valid output map. Missing mandatory values from FileTransferOutputData: {}" - , ftOutputData.toString()); - throw new IllegalArgumentException( - "Unable to build a valid output map from FileTransferOutputData"); - } - Hashtable outputParam = new Hashtable(); - surl.encode(outputParam, TSURL.PNAME_SURL); - turl.encode(outputParam, TTURL.PNAME_TURL); - status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - outputParam.put(TRequestToken.PNAME_REQUESTOKEN, requestToken.toString()); - log.debug("Built output Map: {}" , outputParam.toString()); - return outputParam; - } - - protected TSpaceToken decodeTargetSpaceToken(Map inputParam) { - - return TSpaceToken.decode(inputParam, TSpaceToken.PNAME_SPACETOKEN); - } - - protected TLifeTimeInSeconds decodeDesiredPinLifetime( - Map inputParam) { - - return TLifeTimeInSeconds.decode(inputParam, - TLifeTimeInSeconds.PNAME_PINLIFETIME); - } - - protected TURLPrefix decodeTransferProtocols(Map inputParam) - throws IllegalArgumentException { - - TURLPrefix transferProtocols = TURLPrefix.decode(inputParam, - TURLPrefix.PNAME_TURL_PREFIX); - if (transferProtocols == null) { - log.error("Missing mandatory parameter '{}' Unable to build FileTransferInputData" , TURLPrefix.PNAME_TURL_PREFIX); - throw new IllegalArgumentException("Missing mandatory parameter \'" - + TURLPrefix.PNAME_TURL_PREFIX + "\'"); - } - return transferProtocols; - } - - protected GridUserInterface decodeUser(Map inputParam) { - - return GridUserManager.decode(inputParam); - } - - protected TSURL decodeSURL(Map inputParam) - throws IllegalArgumentException { - - TSURL surl = null; - try { - surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); - } catch (InvalidTSURLAttributesException e) { - log.error("Unable to decode '{}' parameter as TSURL. InvalidTSURLAttributesException: {}" , TSURL.PNAME_SURL - , e.getMessage(),e); - throw new IllegalArgumentException("Unable to decode \'" - + TSURL.PNAME_SURL + "\' parameter as TSURL"); - } - return surl; - } - + static final Logger log = LoggerFactory.getLogger(FileTransferRequestInputConverter.class); + + @Override + public InputData convertToInputData(Map inputParam) + throws IllegalArgumentException, StoRMXmlRpcException { + + TSURL surl = decodeSURL(inputParam); + if (surl == null) { + log.error( + "Missing mandatory parameter '{}' Unable to build FileTransferInputData", + TSURL.PNAME_SURL); + throw new IllegalArgumentException( + "Missing mandatory parameter \'" + TSURL.PNAME_SURL + "\'"); + } + GridUserInterface user = decodeUser(inputParam); + TURLPrefix transferProtocols = decodeTransferProtocols(inputParam); + if (transferProtocols == null) { + log.error( + "Missing mandatory parameter '{}' Unable to build FileTransferInputData", + TURLPrefix.PNAME_TURL_PREFIX); + throw new IllegalArgumentException( + "Missing mandatory parameter \'" + TURLPrefix.PNAME_TURL_PREFIX + "\'"); + } + + FileTransferInputData inputData; + try { + if (user != null) { + inputData = new IdentityFileTransferInputData(user, surl, transferProtocols); + } else { + inputData = new AnonymousFileTransferInputData(surl, transferProtocols); + } + } catch (IllegalArgumentException e) { + log.error( + "Unable to build FileTransferInputData. IllegalArgumentException: {}", e.getMessage(), e); + throw new StoRMXmlRpcException("Unable to build FileTransferInputData"); + } + TLifeTimeInSeconds desiredPinLifetime = decodeDesiredPinLifetime(inputParam); + if (desiredPinLifetime != null) { + inputData.setDesiredPinLifetime(desiredPinLifetime); + } + TSpaceToken targetSpaceToken = decodeTargetSpaceToken(inputParam); + if (targetSpaceToken != null) { + inputData.setTargetSpaceToken(targetSpaceToken); + } + return inputData; + } + + @Override + public Map convertFromOutputData(OutputData outputData) + throws IllegalArgumentException { + + if (outputData == null) { + log.error("Unable to build an output map. Null argument: outputData={}", outputData); + throw new IllegalArgumentException("Unable to build a valid output map, null argument"); + } + if (!(outputData instanceof FileTransferOutputData)) { + log.error( + "Unable to convert from OutputData. Wrong OutputData type: '{}'", + outputData.getClass().getName()); + throw new IllegalArgumentException( + "Unable to convert from OutputData. Wrong OutputData type: \'" + + outputData.getClass().getName() + + "\'"); + } + FileTransferOutputData ftOutputData = (FileTransferOutputData) outputData; + TSURL surl = ftOutputData.getSurl(); + TTURL turl = ftOutputData.getTurl(); + TReturnStatus status = ftOutputData.getStatus(); + TRequestToken requestToken = ftOutputData.getRequestToken(); + if (surl == null + || surl.isEmpty() + || surl.getSURLString().trim().isEmpty() + || turl == null + || status == null + || requestToken == null + || requestToken.getValue() == null + || requestToken.getValue().isEmpty()) { + log.error( + "Unable to build a valid output map. Missing mandatory values from FileTransferOutputData: {}", + ftOutputData.toString()); + throw new IllegalArgumentException( + "Unable to build a valid output map from FileTransferOutputData"); + } + Hashtable outputParam = new Hashtable(); + surl.encode(outputParam, TSURL.PNAME_SURL); + turl.encode(outputParam, TTURL.PNAME_TURL); + status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + outputParam.put(TRequestToken.PNAME_REQUESTOKEN, requestToken.toString()); + log.debug("Built output Map: {}", outputParam.toString()); + return outputParam; + } + + protected TSpaceToken decodeTargetSpaceToken(Map inputParam) { + + return TSpaceToken.decode(inputParam, TSpaceToken.PNAME_SPACETOKEN); + } + + protected TLifeTimeInSeconds decodeDesiredPinLifetime(Map inputParam) { + + return TLifeTimeInSeconds.decode(inputParam, TLifeTimeInSeconds.PNAME_PINLIFETIME); + } + + protected TURLPrefix decodeTransferProtocols(Map inputParam) + throws IllegalArgumentException { + + TURLPrefix transferProtocols = TURLPrefix.decode(inputParam, TURLPrefix.PNAME_TURL_PREFIX); + if (transferProtocols == null) { + log.error( + "Missing mandatory parameter '{}' Unable to build FileTransferInputData", + TURLPrefix.PNAME_TURL_PREFIX); + throw new IllegalArgumentException( + "Missing mandatory parameter \'" + TURLPrefix.PNAME_TURL_PREFIX + "\'"); + } + return transferProtocols; + } + + protected GridUserInterface decodeUser(Map inputParam) { + + return GridUserManager.decode(inputParam); + } + + protected TSURL decodeSURL(Map inputParam) throws IllegalArgumentException { + + TSURL surl = null; + try { + surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); + } catch (InvalidTSURLAttributesException e) { + log.error( + "Unable to decode '{}' parameter as TSURL. InvalidTSURLAttributesException: {}", + TSURL.PNAME_SURL, + e.getMessage(), + e); + throw new IllegalArgumentException( + "Unable to decode \'" + TSURL.PNAME_SURL + "\' parameter as TSURL"); + } + return surl; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferConverter.java index b60fa58c..00872b8f 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferConverter.java @@ -1,56 +1,47 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; -import java.util.Hashtable; -import java.util.Map; -import org.slf4j.Logger; import it.grid.storm.srm.types.ArrayOfTSURLReturnStatus; import it.grid.storm.srm.types.TReturnStatus; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferOutputData; import it.grid.storm.xmlrpc.converter.Converter; +import java.util.Hashtable; +import java.util.Map; +import org.slf4j.Logger; -/** - * @author Michele Dibenedetto - * - */ +/** @author Michele Dibenedetto */ public abstract class ManageFileTransferConverter implements Converter { - public ManageFileTransferConverter() { - - super(); - } + public ManageFileTransferConverter() { - public Map convertFromOutputData(OutputData data) { + super(); + } - getLogger().debug( - "Started ReleaseFilesConverter - Creation of XMLRPC Output Structure!"); + public Map convertFromOutputData(OutputData data) { - Hashtable outputParam = new Hashtable(); - ManageFileTransferOutputData outputData = (ManageFileTransferOutputData) data; - /* (1) returnStatus */ - TReturnStatus returnStatus = outputData.getReturnStatus(); - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } + getLogger().debug("Started ReleaseFilesConverter - Creation of XMLRPC Output Structure!"); - /* (2) arrayOfFileStatuses */ - ArrayOfTSURLReturnStatus arrayOfFileStatuses = outputData - .getArrayOfFileStatuses(); - if (arrayOfFileStatuses != null) { - arrayOfFileStatuses.encode(outputParam, - ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); - } + Hashtable outputParam = new Hashtable(); + ManageFileTransferOutputData outputData = (ManageFileTransferOutputData) data; + /* (1) returnStatus */ + TReturnStatus returnStatus = outputData.getReturnStatus(); + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } - getLogger().debug( - "ReleaseFilesConverter - Sending: " + outputParam.toString()); + /* (2) arrayOfFileStatuses */ + ArrayOfTSURLReturnStatus arrayOfFileStatuses = outputData.getArrayOfFileStatuses(); + if (arrayOfFileStatuses != null) { + arrayOfFileStatuses.encode(outputParam, ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); + } - return outputParam; - } + getLogger().debug("ReleaseFilesConverter - Sending: " + outputParam.toString()); - protected abstract Logger getLogger(); + return outputParam; + } + protected abstract Logger getLogger(); } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferRequestConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferRequestConverter.java index bd20866a..8a8000d3 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferRequestConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/ManageFileTransferRequestConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; @@ -18,83 +17,78 @@ import it.grid.storm.synchcall.data.datatransfer.IdentityManageFileTransferRequestFilesInputData; import it.grid.storm.synchcall.data.datatransfer.IdentityReleaseRequestInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class ManageFileTransferRequestConverter extends - ManageFileTransferConverter implements Converter { - - static final Logger log = LoggerFactory - .getLogger(ManageFileTransferRequestConverter.class); +public class ManageFileTransferRequestConverter extends ManageFileTransferConverter + implements Converter { - /** - * This method returns a ReleaseFilesInputData created from the input - * Hashtable structure of an xmlrpc ReleaseFiles v2.2 call. - * - * @param inputParam - * Hashtable containing the input data - * @return ReleaseFilesInputData - */ - public InputData convertToInputData(Map inputParam) { + static final Logger log = LoggerFactory.getLogger(ManageFileTransferRequestConverter.class); - GridUserInterface guser = GridUserManager.decode(inputParam); + /** + * This method returns a ReleaseFilesInputData created from the input Hashtable structure of an + * xmlrpc ReleaseFiles v2.2 call. + * + * @param inputParam Hashtable containing the input data + * @return ReleaseFilesInputData + */ + public InputData convertToInputData(Map inputParam) { - /* (2) TRequestToken requestToken */ - TRequestToken requestToken = null; - try { - requestToken = TRequestToken.decode(inputParam, - TRequestToken.PNAME_REQUESTOKEN); - log.debug("requestToken={}" , requestToken.toString()); - } catch (InvalidTRequestTokenAttributesException e) { - log - .debug("No request token provided by user. InvalidTRequestTokenAttributesException: {}" - , e.getMessage(),e); - } + GridUserInterface guser = GridUserManager.decode(inputParam); - /* (3) anyURI[] arrayOfSURLs */ - ArrayOfSURLs arrayOfSURLs; - try { - arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e) { - log.debug("Empty surlArray!",e); - arrayOfSURLs = null; - } + /* (2) TRequestToken requestToken */ + TRequestToken requestToken = null; + try { + requestToken = TRequestToken.decode(inputParam, TRequestToken.PNAME_REQUESTOKEN); + log.debug("requestToken={}", requestToken.toString()); + } catch (InvalidTRequestTokenAttributesException e) { + log.debug( + "No request token provided by user. InvalidTRequestTokenAttributesException: {}", + e.getMessage(), + e); + } - InputData inputData; - if (guser != null) { - if (requestToken != null) { - if (arrayOfSURLs != null && arrayOfSURLs.size() > 0) { - inputData = new IdentityManageFileTransferRequestFilesInputData( - guser, requestToken, arrayOfSURLs); - } else { - inputData = new IdentityReleaseRequestInputData(guser, requestToken); - } - } else { - inputData = new IdentityManageFileTransferFilesInputData(guser, - arrayOfSURLs); - } - } else { - if (requestToken != null) { - if (arrayOfSURLs != null && arrayOfSURLs.size() > 0) { - inputData = new AnonymousManageFileTransferRequestFilesInputData( - requestToken, arrayOfSURLs); - } else { - inputData = new AnonymousReleaseRequestInputData(requestToken); - } - } else { - inputData = new AnonymousManageFileTransferFilesInputData(arrayOfSURLs); - } - } - return inputData; - } + /* (3) anyURI[] arrayOfSURLs */ + ArrayOfSURLs arrayOfSURLs; + try { + arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e) { + log.debug("Empty surlArray!", e); + arrayOfSURLs = null; + } - @Override - protected Logger getLogger() { + InputData inputData; + if (guser != null) { + if (requestToken != null) { + if (arrayOfSURLs != null && arrayOfSURLs.size() > 0) { + inputData = + new IdentityManageFileTransferRequestFilesInputData( + guser, requestToken, arrayOfSURLs); + } else { + inputData = new IdentityReleaseRequestInputData(guser, requestToken); + } + } else { + inputData = new IdentityManageFileTransferFilesInputData(guser, arrayOfSURLs); + } + } else { + if (requestToken != null) { + if (arrayOfSURLs != null && arrayOfSURLs.size() > 0) { + inputData = + new AnonymousManageFileTransferRequestFilesInputData(requestToken, arrayOfSURLs); + } else { + inputData = new AnonymousReleaseRequestInputData(requestToken); + } + } else { + inputData = new AnonymousManageFileTransferFilesInputData(arrayOfSURLs); + } + } + return inputData; + } - return log; - } + @Override + protected Logger getLogger() { + return log; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToGetRequestConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToGetRequestConverter.java index 4127bbef..4ea7f0bd 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToGetRequestConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToGetRequestConverter.java @@ -1,44 +1,38 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.srm.types.TLifeTimeInSeconds; import it.grid.storm.srm.types.TSizeInBytes; import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToGetOutputData; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ -public class PrepareToGetRequestConverter extends - FileTransferRequestInputConverter { - - private static final Logger log = LoggerFactory - .getLogger(PrepareToGetRequestConverter.class); +/** @author Michele Dibenedetto */ +public class PrepareToGetRequestConverter extends FileTransferRequestInputConverter { - @Override - public Map convertFromOutputData(OutputData outputData) { + private static final Logger log = LoggerFactory.getLogger(PrepareToGetRequestConverter.class); - if (!(outputData instanceof PrepareToGetOutputData)) { - log.error("Unable to convert from OutputData. Wrong OutputData type: '{}'" - , outputData.getClass().getName()); - throw new IllegalArgumentException( - "Unable to convert from OutputData. Wrong OutputData type: \'" - + outputData.getClass().getName() + "\'"); - } - Map outputParam = super.convertFromOutputData(outputData); - PrepareToGetOutputData ptgOutputData = (PrepareToGetOutputData) outputData; - ptgOutputData.getFileSize().encode(outputParam, TSizeInBytes.PNAME_SIZE); - ptgOutputData.getRemainingPinTime().encode(outputParam, - TLifeTimeInSeconds.PNAME_PINLIFETIME); - log.debug("Built output Map: {}" , outputParam.toString()); - return outputParam; - } + @Override + public Map convertFromOutputData(OutputData outputData) { + if (!(outputData instanceof PrepareToGetOutputData)) { + log.error( + "Unable to convert from OutputData. Wrong OutputData type: '{}'", + outputData.getClass().getName()); + throw new IllegalArgumentException( + "Unable to convert from OutputData. Wrong OutputData type: \'" + + outputData.getClass().getName() + + "\'"); + } + Map outputParam = super.convertFromOutputData(outputData); + PrepareToGetOutputData ptgOutputData = (PrepareToGetOutputData) outputData; + ptgOutputData.getFileSize().encode(outputParam, TSizeInBytes.PNAME_SIZE); + ptgOutputData.getRemainingPinTime().encode(outputParam, TLifeTimeInSeconds.PNAME_PINLIFETIME); + log.debug("Built output Map: {}", outputParam.toString()); + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToPutRequestConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToPutRequestConverter.java index 836c6a56..777fc6fc 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToPutRequestConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PrepareToPutRequestConverter.java @@ -1,12 +1,8 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.datatransfer; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import it.grid.storm.common.types.TURLPrefix; import it.grid.storm.griduser.GridUserInterface; import it.grid.storm.srm.types.TLifeTimeInSeconds; @@ -19,82 +15,77 @@ import it.grid.storm.synchcall.data.datatransfer.IdentityPrepareToPutInputData; import it.grid.storm.synchcall.data.datatransfer.PrepareToPutInputData; import it.grid.storm.xmlrpc.StoRMXmlRpcException; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * @author Michele Dibenedetto - * - */ -public class PrepareToPutRequestConverter extends - FileTransferRequestInputConverter { - - static final String OVERWRITE_MODE_PARAMETER_NAME = "overwriteMode"; - static final Logger log = LoggerFactory - .getLogger(PrepareToPutRequestConverter.class); +/** @author Michele Dibenedetto */ +public class PrepareToPutRequestConverter extends FileTransferRequestInputConverter { - @Override - public InputData convertToInputData(Map inputParam) - throws IllegalArgumentException, StoRMXmlRpcException { + static final String OVERWRITE_MODE_PARAMETER_NAME = "overwriteMode"; + static final Logger log = LoggerFactory.getLogger(PrepareToPutRequestConverter.class); - TSURL surl = decodeSURL(inputParam); - GridUserInterface user = decodeUser(inputParam); - TURLPrefix transferProtocols = decodeTransferProtocols(inputParam); + @Override + public InputData convertToInputData(Map inputParam) + throws IllegalArgumentException, StoRMXmlRpcException { - PrepareToPutInputData inputData; - try { - if (user != null) { - inputData = new IdentityPrepareToPutInputData(user, surl, - transferProtocols); - } else { - inputData = new AnonymousPrepareToPutInputData(surl, transferProtocols); - } - } catch (IllegalArgumentException e) { - log - .error("Unable to build PrepareToPutInputData. IllegalArgumentException: {}" - , e.getMessage(),e); - throw new StoRMXmlRpcException("Unable to build PrepareToPutInputData"); - } - TLifeTimeInSeconds desiredFileLifetime = TLifeTimeInSeconds.decode( - inputParam, TLifeTimeInSeconds.PNAME_FILELIFETIME); - if (desiredFileLifetime != null && !desiredFileLifetime.isEmpty()) { - inputData.setDesiredFileLifetime(desiredFileLifetime); + TSURL surl = decodeSURL(inputParam); + GridUserInterface user = decodeUser(inputParam); + TURLPrefix transferProtocols = decodeTransferProtocols(inputParam); - } + PrepareToPutInputData inputData; + try { + if (user != null) { + inputData = new IdentityPrepareToPutInputData(user, surl, transferProtocols); + } else { + inputData = new AnonymousPrepareToPutInputData(surl, transferProtocols); + } + } catch (IllegalArgumentException e) { + log.error( + "Unable to build PrepareToPutInputData. IllegalArgumentException: {}", e.getMessage(), e); + throw new StoRMXmlRpcException("Unable to build PrepareToPutInputData"); + } + TLifeTimeInSeconds desiredFileLifetime = + TLifeTimeInSeconds.decode(inputParam, TLifeTimeInSeconds.PNAME_FILELIFETIME); + if (desiredFileLifetime != null && !desiredFileLifetime.isEmpty()) { + inputData.setDesiredFileLifetime(desiredFileLifetime); + } - TLifeTimeInSeconds desiredPinLifetime = decodeDesiredPinLifetime(inputParam); - if (desiredPinLifetime != null) { - inputData.setDesiredPinLifetime(desiredPinLifetime); - } - TSpaceToken targetSpaceToken = decodeTargetSpaceToken(inputParam); - if (targetSpaceToken != null) { - inputData.setTargetSpaceToken(targetSpaceToken); - } - TSizeInBytes fileSize = TSizeInBytes.decode(inputParam, - TSizeInBytes.PNAME_SIZE); - if (fileSize != null) { - inputData.setFileSize(fileSize); - } + TLifeTimeInSeconds desiredPinLifetime = decodeDesiredPinLifetime(inputParam); + if (desiredPinLifetime != null) { + inputData.setDesiredPinLifetime(desiredPinLifetime); + } + TSpaceToken targetSpaceToken = decodeTargetSpaceToken(inputParam); + if (targetSpaceToken != null) { + inputData.setTargetSpaceToken(targetSpaceToken); + } + TSizeInBytes fileSize = TSizeInBytes.decode(inputParam, TSizeInBytes.PNAME_SIZE); + if (fileSize != null) { + inputData.setFileSize(fileSize); + } - String overwriteModeString = (String) inputParam - .get(OVERWRITE_MODE_PARAMETER_NAME); - if (overwriteModeString != null) { - TOverwriteMode overwriteMode; - try { - overwriteMode = TOverwriteMode.getTOverwriteMode(overwriteModeString); - } catch (IllegalArgumentException e) { - log.error("Unable to build TOverwriteMode from '{}'. IllegalArgumentException: {}" - , overwriteModeString - , e.getMessage() - , e); - throw new StoRMXmlRpcException("Unable to build PrepareToPutInputData"); - } - if (!overwriteMode.equals(TOverwriteMode.EMPTY)) { - inputData.setOverwriteMode(overwriteMode); - } else { - log - .warn("Unable to use the received '{}', interpreted as an empty value" , OVERWRITE_MODE_PARAMETER_NAME); - } - } - log.debug("PrepareToPutInputData Created!"); - return inputData; - } + String overwriteModeString = (String) inputParam.get(OVERWRITE_MODE_PARAMETER_NAME); + if (overwriteModeString != null) { + TOverwriteMode overwriteMode; + try { + overwriteMode = TOverwriteMode.getTOverwriteMode(overwriteModeString); + } catch (IllegalArgumentException e) { + log.error( + "Unable to build TOverwriteMode from '{}'. IllegalArgumentException: {}", + overwriteModeString, + e.getMessage(), + e); + throw new StoRMXmlRpcException("Unable to build PrepareToPutInputData"); + } + if (!overwriteMode.equals(TOverwriteMode.EMPTY)) { + inputData.setOverwriteMode(overwriteMode); + } else { + log.warn( + "Unable to use the received '{}', interpreted as an empty value", + OVERWRITE_MODE_PARAMETER_NAME); + } + } + log.debug("PrepareToPutInputData Created!"); + return inputData; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PutDoneConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PutDoneConverter.java index 109e735f..2762a373 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PutDoneConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/datatransfer/PutDoneConverter.java @@ -1,12 +1,11 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for PutDone function. This class - * receives input datas from xmlrpc call and converts these datas into a StoRM - * Type that can be used to invoke the PutDoneManager. - * + * This class represents the Type Converter for PutDone function. This class receives input datas + * from xmlrpc call and converts these datas into a StoRM Type that can be used to invoke the + * PutDoneManager. + * * @author Alberto Forti * @author CNAF -INFN Bologna * @date Aug 2006 @@ -25,69 +24,59 @@ import it.grid.storm.synchcall.data.datatransfer.IdentityPutDoneInputData; import it.grid.storm.synchcall.data.datatransfer.ManageFileTransferRequestFilesInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class PutDoneConverter extends ManageFileTransferConverter implements - Converter { - - private static final Logger log = LoggerFactory - .getLogger(PutDoneConverter.class); - - public PutDoneConverter() { - - } - - /** - * This method returns a PutDoneInputData created from the input Hashtable - * structure of an xmlrpc PutDone v2.2 call. - * - * @param inputParam - * Hashtable containing the input data - * @return PutDoneInputData - */ - public InputData convertToInputData(Map inputParam) { - - GridUserInterface guser = GridUserManager.decode(inputParam); - - TRequestToken requestToken; - try { - requestToken = TRequestToken.decode(inputParam, - TRequestToken.PNAME_REQUESTOKEN); - log.debug("requestToken={}" , requestToken.toString()); - } catch (InvalidTRequestTokenAttributesException e) { - requestToken = null; - log.debug("requestToken=NULL" , e); - } - - ArrayOfSURLs arrayOfSURLs; - try { - arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e) { - log.debug("Empty surlArray!",e); - arrayOfSURLs = null; - } - - ManageFileTransferRequestFilesInputData inputData; - if (guser != null) { - inputData = new IdentityPutDoneInputData(guser, requestToken, - arrayOfSURLs); - } else { - inputData = new AnonymousPutDoneInputData(requestToken, arrayOfSURLs); - } - - log.debug("PutDoneInputData Created!"); - - return inputData; - } - - @Override - protected Logger getLogger() { - - return log; - } - +public class PutDoneConverter extends ManageFileTransferConverter implements Converter { + + private static final Logger log = LoggerFactory.getLogger(PutDoneConverter.class); + + public PutDoneConverter() {} + + /** + * This method returns a PutDoneInputData created from the input Hashtable structure of an xmlrpc + * PutDone v2.2 call. + * + * @param inputParam Hashtable containing the input data + * @return PutDoneInputData + */ + public InputData convertToInputData(Map inputParam) { + + GridUserInterface guser = GridUserManager.decode(inputParam); + + TRequestToken requestToken; + try { + requestToken = TRequestToken.decode(inputParam, TRequestToken.PNAME_REQUESTOKEN); + log.debug("requestToken={}", requestToken.toString()); + } catch (InvalidTRequestTokenAttributesException e) { + requestToken = null; + log.debug("requestToken=NULL", e); + } + + ArrayOfSURLs arrayOfSURLs; + try { + arrayOfSURLs = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e) { + log.debug("Empty surlArray!", e); + arrayOfSURLs = null; + } + + ManageFileTransferRequestFilesInputData inputData; + if (guser != null) { + inputData = new IdentityPutDoneInputData(guser, requestToken, arrayOfSURLs); + } else { + inputData = new AnonymousPutDoneInputData(requestToken, arrayOfSURLs); + } + + log.debug("PutDoneInputData Created!"); + + return inputData; + } + + @Override + protected Logger getLogger() { + + return log; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/directory/LsConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/directory/LsConverter.java index 2082682b..c991895e 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/directory/LsConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/directory/LsConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.directory; @@ -20,133 +19,122 @@ import it.grid.storm.synchcall.data.directory.LSOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.Hashtable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * * This class is part of the StoRM project. Copyright (c) 2008 INFN-CNAF. - *

- * - * This class represents the Type Converter for LS function . This class have - * get an input data from xmlrpc call anc convert it into a StoRM Type that can - * be used to invoke the LSManager - * - * - * Authors: - * + * + *

This class represents the Type Converter for LS function . This class have get an input data + * from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the LSManager + * + *

Authors: + * * @author=lucamag luca.magnoniATcnaf.infn.it - * * @date = Oct 9, 2008 - * */ - public class LsConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory.getLogger(LsConverter.class); - - public LsConverter() { - - }; - - /** - * This method return a LSInputData created from input Hashtable structure of - * an xmlrpc spaceReservation v2.1 call. SpaceResData can be used to invoke LS - * method of Directory Functions Manager - */ - public InputData convertToInputData(Map inputParam) { - - log.debug("SrmLs: LSConverter :Call received :Creation of SpaceResData = {}" - , inputParam.size()); - log.debug("SrmLs: LSConverter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); - - // Member name definition for inputParam struct , from SRM V2.2 - String member_fullDL = new String("fullDetailedList"); - String member_allLR = new String("allLevelRecursive"); - String member_numOL = new String("numOfLevels"); - String member_offset = new String("offset"); - String member_count = new String("count"); - - /* Creation of VomsGridUser */ - GridUserInterface guser = GridUserManager.decode(inputParam); - - /* (2) anyURI[] arrayOfSURLs */ - ArrayOfSURLs surlArray = null; - try { - surlArray = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e2) { - log.debug("SrmLs: Empty surlArray found!",e2); - surlArray = null; - } - - TFileStorageType fileStorageType = TFileStorageType.decode(inputParam, - TFileStorageType.PNAME_FILESTORAGETYPE); - log.debug("fileType: {}" , fileStorageType); - - /* (5) fullDetailedList */ - Boolean fullDL = (Boolean) inputParam.get(member_fullDL); - log.debug("fullDetailedList: {}" , fullDL); - - /* (6) allLevelRecursive */ - Boolean allLR = (Boolean) inputParam.get(member_allLR); - log.debug("allLevelRecursive: {}" , allLR); - - /* (7) numOfLevels */ - Integer numOL = (Integer) inputParam.get(member_numOL); - log.debug("numOfLevels: {}" , numOL); - - /* (8) offset */ - Integer offset = (Integer) inputParam.get(member_offset); - log.debug("offset: {}" , offset); - - /* (9) count */ - Integer count = (Integer) inputParam.get(member_count); - log.debug("count: {}" , count); - - LSInputData inputData; - if (guser != null) { - inputData = new IdentityLSInputData(guser, surlArray, fileStorageType, - fullDL, allLR, numOL, offset, count); - } else { - inputData = new AnonymousLSInputData(surlArray, fileStorageType, fullDL, - allLR, numOL, offset, count); - } - return inputData; - } - - public Hashtable convertFromOutputData(OutputData data) { - - // Creation of new Hashtable to return - Hashtable outputParam = new Hashtable(); - LSOutputData outputData = (LSOutputData) data; - - /* (1) TReturnStatus */ - TReturnStatus globStatus = outputData.getStatus(); - if (globStatus != null) { - globStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - - /* (2) TRequestToken */ - TRequestToken requestToken = outputData.getRequestToken(); - if (requestToken != null) { - outputParam.put("requestToken", requestToken.toString()); - } - - /* (3) ArrayOfTMetaDataPathDetail details */ - ArrayOfTMetaDataPathDetail details = outputData.getDetails(); - if (details != null) { - details.encode(outputParam, ArrayOfTMetaDataPathDetail.PNAME_DETAILS); - } - - // Return global structure. - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(LsConverter.class); + + public LsConverter() {}; + + /** + * This method return a LSInputData created from input Hashtable structure of an xmlrpc + * spaceReservation v2.1 call. SpaceResData can be used to invoke LS method of Directory Functions + * Manager + */ + public InputData convertToInputData(Map inputParam) { + + log.debug( + "SrmLs: LSConverter :Call received :Creation of SpaceResData = {}", inputParam.size()); + log.debug( + "SrmLs: LSConverter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); + + // Member name definition for inputParam struct , from SRM V2.2 + String member_fullDL = new String("fullDetailedList"); + String member_allLR = new String("allLevelRecursive"); + String member_numOL = new String("numOfLevels"); + String member_offset = new String("offset"); + String member_count = new String("count"); + + /* Creation of VomsGridUser */ + GridUserInterface guser = GridUserManager.decode(inputParam); + + /* (2) anyURI[] arrayOfSURLs */ + ArrayOfSURLs surlArray = null; + try { + surlArray = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e2) { + log.debug("SrmLs: Empty surlArray found!", e2); + surlArray = null; + } + + TFileStorageType fileStorageType = + TFileStorageType.decode(inputParam, TFileStorageType.PNAME_FILESTORAGETYPE); + log.debug("fileType: {}", fileStorageType); + + /* (5) fullDetailedList */ + Boolean fullDL = (Boolean) inputParam.get(member_fullDL); + log.debug("fullDetailedList: {}", fullDL); + + /* (6) allLevelRecursive */ + Boolean allLR = (Boolean) inputParam.get(member_allLR); + log.debug("allLevelRecursive: {}", allLR); + + /* (7) numOfLevels */ + Integer numOL = (Integer) inputParam.get(member_numOL); + log.debug("numOfLevels: {}", numOL); + + /* (8) offset */ + Integer offset = (Integer) inputParam.get(member_offset); + log.debug("offset: {}", offset); + + /* (9) count */ + Integer count = (Integer) inputParam.get(member_count); + log.debug("count: {}", count); + + LSInputData inputData; + if (guser != null) { + inputData = + new IdentityLSInputData( + guser, surlArray, fileStorageType, fullDL, allLR, numOL, offset, count); + } else { + inputData = + new AnonymousLSInputData(surlArray, fileStorageType, fullDL, allLR, numOL, offset, count); + } + return inputData; + } + + public Hashtable convertFromOutputData(OutputData data) { + + // Creation of new Hashtable to return + Hashtable outputParam = new Hashtable(); + LSOutputData outputData = (LSOutputData) data; + + /* (1) TReturnStatus */ + TReturnStatus globStatus = outputData.getStatus(); + if (globStatus != null) { + globStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + + /* (2) TRequestToken */ + TRequestToken requestToken = outputData.getRequestToken(); + if (requestToken != null) { + outputParam.put("requestToken", requestToken.toString()); + } + + /* (3) ArrayOfTMetaDataPathDetail details */ + ArrayOfTMetaDataPathDetail details = outputData.getDetails(); + if (details != null) { + details.encode(outputParam, ArrayOfTMetaDataPathDetail.PNAME_DETAILS); + } + + // Return global structure. + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/directory/MkdirConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/directory/MkdirConverter.java index f9d369cd..843ae882 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/directory/MkdirConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/directory/MkdirConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.directory; @@ -17,80 +16,68 @@ import it.grid.storm.synchcall.data.directory.MkdirOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * * @author lucamag * @date May 28, 2008 - * */ - public class MkdirConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(MkdirConverter.class); - - public MkdirConverter() { - - }; + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(MkdirConverter.class); - /** - * This method return a MkdirInputData created from input Hashtable structure - * of an xmlrpc Mkdir v2.1 call. Mkdir Input Data can be used to invoke mkdir - * method of DirectoryFunctionsManager - */ - public InputData convertToInputData(Map inputParam) { + public MkdirConverter() {}; - log - .debug("SrmMkdir: Converter :Call received :Creation of MkdirInputData = {}" - , inputParam.size()); - log.debug("SrmMkdir: Converter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); + /** + * This method return a MkdirInputData created from input Hashtable structure of an xmlrpc Mkdir + * v2.1 call. Mkdir Input Data can be used to invoke mkdir method of DirectoryFunctionsManager + */ + public InputData convertToInputData(Map inputParam) { - GridUserInterface guser = GridUserManager.decode(inputParam); + log.debug( + "SrmMkdir: Converter :Call received :Creation of MkdirInputData = {}", inputParam.size()); + log.debug( + "SrmMkdir: Converter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); - /* (2) directoryPath */ - TSURL surl = null; - try { - surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); - } catch (InvalidTSURLAttributesException e1) { - log.debug("SrmMkdir: ErrorCreating surl: {}" , e1.toString(),e1); - } + GridUserInterface guser = GridUserManager.decode(inputParam); - MkdirInputData inputData; - if (guser != null) { - inputData = new IdentityMkdirInputData(guser, surl); - } else { - inputData = new AnonymousMkdirInputData(surl); - } - return inputData; - } + /* (2) directoryPath */ + TSURL surl = null; + try { + surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); + } catch (InvalidTSURLAttributesException e1) { + log.debug("SrmMkdir: ErrorCreating surl: {}", e1.toString(), e1); + } - public Map convertFromOutputData(OutputData outputData) { + MkdirInputData inputData; + if (guser != null) { + inputData = new IdentityMkdirInputData(guser, surl); + } else { + inputData = new AnonymousMkdirInputData(surl); + } + return inputData; + } - log - .debug("SrmMkdir: Converter :Call received :Creation of XMLRPC Output Structure! "); + public Map convertFromOutputData(OutputData outputData) { - Map outputParam = new HashMap(); + log.debug("SrmMkdir: Converter :Call received :Creation of XMLRPC Output Structure! "); - MkdirOutputData odata = (MkdirOutputData) outputData; - TReturnStatus outputStatus = odata.getStatus(); + Map outputParam = new HashMap(); - outputStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + MkdirOutputData odata = (MkdirOutputData) outputData; + TReturnStatus outputStatus = odata.getStatus(); - // Return Output Structure - return outputParam; + outputStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } + // Return Output Structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/directory/MvConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/directory/MvConverter.java index e9ce994a..fb2af104 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/directory/MvConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/directory/MvConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.directory; @@ -17,87 +16,75 @@ import it.grid.storm.synchcall.data.directory.MvOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the Type Converter for SrmMv function . This class have - * get an input data from xmlrpc call anc convert it into a StoRM Type that can - * be used to invoke the MvExecutor. - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the Type Converter for SrmMv function . This class have get an input + * data from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the MvExecutor. + * * @author lucamag * @date May 28, 2008 - * */ - public class MvConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory.getLogger(MvConverter.class); - - public MvConverter() { - - }; - - /** - * This method return a MvInputData created from input Map structure of an - * xmlrpc SrmMv v2.2 call. - */ - public InputData convertToInputData(Map inputParam) { - - log.debug("SrmMv: Converter :Call received :Creation of MvInputData = {}" - , inputParam.size()); - log.debug("SrmMv: Converter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); - - GridUserInterface guser = GridUserManager.decode(inputParam); - - /* (2) fromSURL */ - TSURL fromSURL = null; - try { - fromSURL = TSURL.decode(inputParam, TSURL.PNAME_FROMSURL); - } catch (InvalidTSURLAttributesException e1) { - log.debug("SrmMv: ErrorCreating surl: {}" , e1.toString(),e1); - } - - /* (3) toSURL */ - TSURL toSURL = null; - try { - toSURL = TSURL.decode(inputParam, TSURL.PNAME_TOSURL); - } catch (InvalidTSURLAttributesException e1) { - log.debug("SrmMv: ErrorCreating surl: {}" , e1.toString(),e1); - } - - MvInputData inputData; - if (guser != null) { - inputData = new IdentityMvInputData(guser, fromSURL, toSURL); - } else { - inputData = new AnonymousMvInputData(fromSURL, toSURL); - } - return inputData; - - } - - public Map convertFromOutputData(OutputData data) { - - log - .debug("SrmMv: Converter :Call received :Creation of XMLRPC Output Structure! "); - // Output structure to return to xmlrpc client - Map outputParam = new HashMap(); - MvOutputData outputData = (MvOutputData) data; - TReturnStatus status = outputData.getStatus(); - status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - - // Return Output Structure - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(MvConverter.class); + + public MvConverter() {}; + + /** + * This method return a MvInputData created from input Map structure of an xmlrpc SrmMv v2.2 call. + */ + public InputData convertToInputData(Map inputParam) { + + log.debug("SrmMv: Converter :Call received :Creation of MvInputData = {}", inputParam.size()); + log.debug( + "SrmMv: Converter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); + + GridUserInterface guser = GridUserManager.decode(inputParam); + + /* (2) fromSURL */ + TSURL fromSURL = null; + try { + fromSURL = TSURL.decode(inputParam, TSURL.PNAME_FROMSURL); + } catch (InvalidTSURLAttributesException e1) { + log.debug("SrmMv: ErrorCreating surl: {}", e1.toString(), e1); + } + + /* (3) toSURL */ + TSURL toSURL = null; + try { + toSURL = TSURL.decode(inputParam, TSURL.PNAME_TOSURL); + } catch (InvalidTSURLAttributesException e1) { + log.debug("SrmMv: ErrorCreating surl: {}", e1.toString(), e1); + } + + MvInputData inputData; + if (guser != null) { + inputData = new IdentityMvInputData(guser, fromSURL, toSURL); + } else { + inputData = new AnonymousMvInputData(fromSURL, toSURL); + } + return inputData; + } + + public Map convertFromOutputData(OutputData data) { + + log.debug("SrmMv: Converter :Call received :Creation of XMLRPC Output Structure! "); + // Output structure to return to xmlrpc client + Map outputParam = new HashMap(); + MvOutputData outputData = (MvOutputData) data; + TReturnStatus status = outputData.getStatus(); + status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + + // Return Output Structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmConverter.java index b02ef366..164fad19 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.directory; @@ -18,89 +17,76 @@ import it.grid.storm.synchcall.data.directory.RmOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. - * - * This class represents the Type Converter for Rm function . This class have - * get an input data from xmlrpc call anc convert it into a StoRM Type that can - * be used to invoke the RmManager - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * + * + *

This class represents the Type Converter for Rm function . This class have get an input data + * from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the RmManager + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @date May 28, 2008 - * */ - public class RmConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory.getLogger(RmConverter.class); - - public RmConverter() { - - }; - - /** - * This method return a RmInputData created from input Hashtable structure of - * an xmlrpc Rm v2.1 call. Rm Input Data can be used to invoke mkdir method of - * DirectoryFunctionsManager - */ - public InputData convertToInputData(Map inputParam) { - - log.debug("RmConverter :Call received :Creation of RmdirInputData = {}" - , inputParam.size()); - log.debug("RmConverter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); - - GridUserInterface guser = GridUserManager.decode(inputParam); - - ArrayOfSURLs surlArray = null; - try { - surlArray = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); - } catch (InvalidArrayOfSURLsAttributeException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - RmInputData inputData; - if (guser != null) { - inputData = new IdentityRmInputData(guser, surlArray); - } else { - inputData = new AnonymousRmInputData(surlArray); - } - log.debug("RmInputData Created!"); - return inputData; - } - - public Map convertFromOutputData(OutputData outputData) { - - log - .debug("RmConverter :Call received :Creation of XMLRPC Output Structure! "); - // Output structure to return to xmlrpc client - Map outputParam = new HashMap(); - RmOutputData rmOutputData = (RmOutputData) outputData; - TReturnStatus status = rmOutputData.getStatus(); - if (status != null) { - status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - - ArrayOfTSURLReturnStatus surlArray = rmOutputData.getSurlStatus(); - if (surlArray != null) { - surlArray.encode(outputParam, - ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); - } - - // Return global structure. - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(RmConverter.class); + + public RmConverter() {}; + + /** + * This method return a RmInputData created from input Hashtable structure of an xmlrpc Rm v2.1 + * call. Rm Input Data can be used to invoke mkdir method of DirectoryFunctionsManager + */ + public InputData convertToInputData(Map inputParam) { + + log.debug("RmConverter :Call received :Creation of RmdirInputData = {}", inputParam.size()); + log.debug( + "RmConverter: Input Structure toString: {}", ParameterDisplayHelper.display(inputParam)); + + GridUserInterface guser = GridUserManager.decode(inputParam); + + ArrayOfSURLs surlArray = null; + try { + surlArray = ArrayOfSURLs.decode(inputParam, ArrayOfSURLs.ARRAY_OF_SURLS); + } catch (InvalidArrayOfSURLsAttributeException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + + RmInputData inputData; + if (guser != null) { + inputData = new IdentityRmInputData(guser, surlArray); + } else { + inputData = new AnonymousRmInputData(surlArray); + } + log.debug("RmInputData Created!"); + return inputData; + } + + public Map convertFromOutputData(OutputData outputData) { + + log.debug("RmConverter :Call received :Creation of XMLRPC Output Structure! "); + // Output structure to return to xmlrpc client + Map outputParam = new HashMap(); + RmOutputData rmOutputData = (RmOutputData) outputData; + TReturnStatus status = rmOutputData.getStatus(); + if (status != null) { + status.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + + ArrayOfTSURLReturnStatus surlArray = rmOutputData.getSurlStatus(); + if (surlArray != null) { + surlArray.encode(outputParam, ArrayOfTSURLReturnStatus.PNAME_ARRAYOFFILESTATUSES); + } + + // Return global structure. + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmdirConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmdirConverter.java index 0e7cdf79..7a5dca1e 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmdirConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/directory/RmdirConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.directory; @@ -17,96 +16,86 @@ import it.grid.storm.synchcall.data.directory.RmdirOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is part of the StoRM project. - * - * This class represents the Type Converter for Rmdir function . This class have - * get an input data from xmlrpc call anc convert it into a StoRM Type that can - * be used to invoke the RmdirManager - * - * Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project - * + * + *

This class represents the Type Converter for Rmdir function . This class have get an input + * data from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the + * RmdirManager + * + *

Copyright: Copyright (c) 2008 Company: INFN-CNAF and ICTP/EGRID project + * * @author lucamag * @date May 28, 2008 - * */ - public class RmdirConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(RmdirConverter.class); - - public RmdirConverter() { - - }; - - /** - * This method return a RmdirInputData created from input Hashtable structure - * of an xmlrpc Rmdir v2.1 call. Rmdir Input Data can be used to invoke mkdir - * method of DirectoryFunctionsManager - */ - public InputData convertToInputData(Map inputParam) { - - log - .debug("srmRmdir: Converter :Call received :Creation of RmdirInputData = {}" - , inputParam.size()); - log.debug("srmRmdir: Converter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); - - /* Creation of VomsGridUser */ - GridUserInterface guser = GridUserManager.decode(inputParam); - - /* (2) directoryPath */ - TSURL surl = null; - try { - surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); - } catch (InvalidTSURLAttributesException e1) { - log.debug("srmRm: ErrorCreating surl: {}" , e1.toString(),e1); - } - - /* (4) recursive */ - String member_recursive = new String("recursive"); - Boolean recursive = inputParam.get(member_recursive) == null ? false - : (Boolean) inputParam.get(member_recursive); - RmdirInputData inputData; - if (guser != null) { - inputData = new IdentityRmdirInputData(guser, surl, recursive); - } else { - inputData = new AnonymousRmdirInputData(surl, recursive); - } - return inputData; - - } - - /* - * (non-Javadoc) - * - * @see - * it.grid.storm.xmlrpc.converter.Converter#convertFromOutputData(it.grid. - * storm.synchcall.data.OutputData) - */ - public Map convertFromOutputData(OutputData outputData) { - - log - .debug("srmRm: RmdirConverter :Call received :Creation of XMLRPC Output Structure! "); - - // Output structure to return to xmlrpc client - Map outputParam = new HashMap(); - RmdirOutputData rmdirOutputData = (RmdirOutputData) outputData; - TReturnStatus outputStatus = rmdirOutputData.getStatus(); - outputStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - - // Return Output Structure - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(RmdirConverter.class); + + public RmdirConverter() {}; + + /** + * This method return a RmdirInputData created from input Hashtable structure of an xmlrpc Rmdir + * v2.1 call. Rmdir Input Data can be used to invoke mkdir method of DirectoryFunctionsManager + */ + public InputData convertToInputData(Map inputParam) { + + log.debug( + "srmRmdir: Converter :Call received :Creation of RmdirInputData = {}", inputParam.size()); + log.debug( + "srmRmdir: Converter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); + + /* Creation of VomsGridUser */ + GridUserInterface guser = GridUserManager.decode(inputParam); + + /* (2) directoryPath */ + TSURL surl = null; + try { + surl = TSURL.decode(inputParam, TSURL.PNAME_SURL); + } catch (InvalidTSURLAttributesException e1) { + log.debug("srmRm: ErrorCreating surl: {}", e1.toString(), e1); + } + + /* (4) recursive */ + String member_recursive = new String("recursive"); + Boolean recursive = + inputParam.get(member_recursive) == null + ? false + : (Boolean) inputParam.get(member_recursive); + RmdirInputData inputData; + if (guser != null) { + inputData = new IdentityRmdirInputData(guser, surl, recursive); + } else { + inputData = new AnonymousRmdirInputData(surl, recursive); + } + return inputData; + } + + /* + * (non-Javadoc) + * + * @see + * it.grid.storm.xmlrpc.converter.Converter#convertFromOutputData(it.grid. + * storm.synchcall.data.OutputData) + */ + public Map convertFromOutputData(OutputData outputData) { + + log.debug("srmRm: RmdirConverter :Call received :Creation of XMLRPC Output Structure! "); + + // Output structure to return to xmlrpc client + Map outputParam = new HashMap(); + RmdirOutputData rmdirOutputData = (RmdirOutputData) outputData; + TReturnStatus outputStatus = rmdirOutputData.getStatus(); + outputStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + + // Return Output Structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/discovery/PingConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/discovery/PingConverter.java index dea6048e..01208009 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/discovery/PingConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/discovery/PingConverter.java @@ -1,17 +1,15 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for the Ping function. This class - * receives input data from xmlrpc call and fills the PingInputData class. - * + * This class represents the Type Converter for the Ping function. This class receives input data + * from xmlrpc call and fills the PingInputData class. + * * @author Alberto Forti * @author CNAF-INFN Bologna * @date Feb 2007 * @version 1.0 */ - package it.grid.storm.xmlrpc.converter.discovery; import it.grid.storm.griduser.GridUserInterface; @@ -24,57 +22,51 @@ import it.grid.storm.synchcall.data.discovery.PingInputData; import it.grid.storm.synchcall.data.discovery.PingOutputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Hashtable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PingConverter implements Converter { - private static final Logger log = LoggerFactory - .getLogger(PingConverter.class); - - public PingConverter() { + private static final Logger log = LoggerFactory.getLogger(PingConverter.class); - } + public PingConverter() {} - public InputData convertToInputData(Map inputParam) { + public InputData convertToInputData(Map inputParam) { - log.debug("Ping: input converter started. InputParam "); + log.debug("Ping: input converter started. InputParam "); - GridUserInterface requestor = GridUserManager.decode(inputParam); + GridUserInterface requestor = GridUserManager.decode(inputParam); - String authorizationID = (String) inputParam.get("authorizationID"); + String authorizationID = (String) inputParam.get("authorizationID"); - PingInputData inputData; - if (requestor != null) { - inputData = new IdentityPingInputData(requestor, authorizationID); - } else { - inputData = new AnonymousPingInputData(authorizationID); - } - log.debug("Ping: input converter has finished."); - return inputData; - } + PingInputData inputData; + if (requestor != null) { + inputData = new IdentityPingInputData(requestor, authorizationID); + } else { + inputData = new AnonymousPingInputData(authorizationID); + } + log.debug("Ping: input converter has finished."); + return inputData; + } - public Map convertFromOutputData(OutputData data) { + public Map convertFromOutputData(OutputData data) { - log.debug("Ping: output converter started."); - Hashtable outputParam = new Hashtable(); - PingOutputData outputData = (PingOutputData) data; - String versionInfo = outputData.getVersionInfo(); - if (versionInfo != null) { - outputParam.put("versionInfo", versionInfo); - } + log.debug("Ping: output converter started."); + Hashtable outputParam = new Hashtable(); + PingOutputData outputData = (PingOutputData) data; + String versionInfo = outputData.getVersionInfo(); + if (versionInfo != null) { + outputParam.put("versionInfo", versionInfo); + } - ArrayOfTExtraInfo extraInfoArray = outputData.getExtraInfoArray(); - if (extraInfoArray != null) { - extraInfoArray.encode(outputParam, - ArrayOfTExtraInfo.PNAME_STORAGESYSTEMINFO); - } + ArrayOfTExtraInfo extraInfoArray = outputData.getExtraInfoArray(); + if (extraInfoArray != null) { + extraInfoArray.encode(outputParam, ArrayOfTExtraInfo.PNAME_STORAGESYSTEMINFO); + } - log.debug("Ping: output converter has finished."); - return outputParam; - } + log.debug("Ping: output converter has finished."); + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceMetaDataConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceMetaDataConverter.java index 9452a57f..f28fc1a4 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceMetaDataConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceMetaDataConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.space; @@ -14,102 +13,88 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.space.AnonymousGetSpaceMetaDataInputData; import it.grid.storm.synchcall.data.space.GetSpaceMetaDataInputData; -import it.grid.storm.synchcall.data.space.IdentityGetSpaceMetaDataInputData; import it.grid.storm.synchcall.data.space.GetSpaceMetaDataOutputData; +import it.grid.storm.synchcall.data.space.IdentityGetSpaceMetaDataInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Hashtable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class is part of the StoRM project. Copyright: Copyright (c) 2008 - * Company: INFN-CNAF and ICTP/EGRID project - * - * This class represents the Type Converter for GetSpaceMetaData function . This - * class have get an input data from xmlrpc call anc convert it into a StoRM - * Type that can be used to invoke the GetSpaceMetaDataManager - * + * This class is part of the StoRM project. Copyright: Copyright (c) 2008 Company: INFN-CNAF and + * ICTP/EGRID project + * + *

This class represents the Type Converter for GetSpaceMetaData function . This class have get + * an input data from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the + * GetSpaceMetaDataManager + * * @author lucamag * @date May 29, 2008 - * */ - public class GetSpaceMetaDataConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(GetSpaceMetaDataConverter.class); - - public GetSpaceMetaDataConverter() { - - }; - - /** - * This method return a SpaceResData created from input Hashtable structure of - * an xmlrpc spaceReservation v2.2 call. SpaceResData can be used to invoke - * SpaceResevation Manager - */ - public InputData convertToInputData(Map inputParam) { - - String memberName = null; - - /* Creation of VomsGridUser */ - GridUserInterface guser = GridUserManager.decode(inputParam); - - /* (1) authorizationID (never used) */ - String authID = (String) inputParam.get("authorizationID"); - - ArrayOfTSpaceToken arrayOfSpaceTokens; - try { - arrayOfSpaceTokens = ArrayOfTSpaceToken.decode(inputParam, - ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); - } catch (InvalidArrayOfTSpaceTokenAttributeException e) { - arrayOfSpaceTokens = null; - } - - GetSpaceMetaDataInputData inputData; - if (guser != null) { - inputData = new IdentityGetSpaceMetaDataInputData(guser, - arrayOfSpaceTokens); - } else { - inputData = new AnonymousGetSpaceMetaDataInputData(arrayOfSpaceTokens); - } - return inputData; - } - - public Map convertFromOutputData(OutputData data) { - - log - .debug("GetSpaceMetaDataConverter: Creation of XMLRPC Output Structure! "); - - // Creation of new Hashtable to return - Hashtable outputParam = new Hashtable(); - - // outputData - GetSpaceMetaDataOutputData outputData = (GetSpaceMetaDataOutputData) data; - - /* (1) returnStatus */ - TReturnStatus returnStatus = outputData.getStatus(); - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } - - /* (2) arrayOfSpaceDetails */ - ArrayOfTMetaDataSpace arrayOfSpaceDetails = outputData - .getMetaDataSpaceArray(); - if (arrayOfSpaceDetails != null) { - arrayOfSpaceDetails.encode(outputParam, - ArrayOfTMetaDataSpace.PNAME_ARRAYOFSPACEDETAILS); - } - - log.debug(outputParam.toString()); - - // Return output Parameter structure - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(GetSpaceMetaDataConverter.class); + + public GetSpaceMetaDataConverter() {}; + + /** + * This method return a SpaceResData created from input Hashtable structure of an xmlrpc + * spaceReservation v2.2 call. SpaceResData can be used to invoke SpaceResevation Manager + */ + public InputData convertToInputData(Map inputParam) { + + String memberName = null; + + /* Creation of VomsGridUser */ + GridUserInterface guser = GridUserManager.decode(inputParam); + + /* (1) authorizationID (never used) */ + String authID = (String) inputParam.get("authorizationID"); + + ArrayOfTSpaceToken arrayOfSpaceTokens; + try { + arrayOfSpaceTokens = + ArrayOfTSpaceToken.decode(inputParam, ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); + } catch (InvalidArrayOfTSpaceTokenAttributeException e) { + arrayOfSpaceTokens = null; + } + + GetSpaceMetaDataInputData inputData; + if (guser != null) { + inputData = new IdentityGetSpaceMetaDataInputData(guser, arrayOfSpaceTokens); + } else { + inputData = new AnonymousGetSpaceMetaDataInputData(arrayOfSpaceTokens); + } + return inputData; + } + + public Map convertFromOutputData(OutputData data) { + + log.debug("GetSpaceMetaDataConverter: Creation of XMLRPC Output Structure! "); + + // Creation of new Hashtable to return + Hashtable outputParam = new Hashtable(); + + // outputData + GetSpaceMetaDataOutputData outputData = (GetSpaceMetaDataOutputData) data; + + /* (1) returnStatus */ + TReturnStatus returnStatus = outputData.getStatus(); + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } + + /* (2) arrayOfSpaceDetails */ + ArrayOfTMetaDataSpace arrayOfSpaceDetails = outputData.getMetaDataSpaceArray(); + if (arrayOfSpaceDetails != null) { + arrayOfSpaceDetails.encode(outputParam, ArrayOfTMetaDataSpace.PNAME_ARRAYOFSPACEDETAILS); + } + + log.debug(outputParam.toString()); + + // Return output Parameter structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceTokensConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceTokensConverter.java index c9bea45b..7d8b01aa 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceTokensConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/space/GetSpaceTokensConverter.java @@ -1,18 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for GetSpaceTokens function. This - * class gets input data from xmlrpc call and converts it into a StoRM Type that - * can be used to invoke the GetSpaceTokensExecutor - * + * This class represents the Type Converter for GetSpaceTokens function. This class gets input data + * from xmlrpc call and converts it into a StoRM Type that can be used to invoke the + * GetSpaceTokensExecutor + * * @author Alberto Forti * @author CNAF -INFN Bologna * @date November 2006 * @version 1.0 */ - package it.grid.storm.xmlrpc.converter.space; import it.grid.storm.griduser.GridUserInterface; @@ -23,77 +21,69 @@ import it.grid.storm.synchcall.data.OutputData; import it.grid.storm.synchcall.data.space.AnonymousGetSpaceTokensInputData; import it.grid.storm.synchcall.data.space.GetSpaceTokensInputData; -import it.grid.storm.synchcall.data.space.IdentityGetSpaceTokensInputData; import it.grid.storm.synchcall.data.space.GetSpaceTokensOutputData; +import it.grid.storm.synchcall.data.space.IdentityGetSpaceTokensInputData; import it.grid.storm.xmlrpc.converter.Converter; - import java.util.Hashtable; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GetSpaceTokensConverter implements Converter { - // Logger - private static final Logger log = LoggerFactory - .getLogger(GetSpaceTokensConverter.class); - - public GetSpaceTokensConverter() { + // Logger + private static final Logger log = LoggerFactory.getLogger(GetSpaceTokensConverter.class); - }; + public GetSpaceTokensConverter() {}; - /** - * Returns an instance of GetSpaceTokenInputData from a Hashtable structure - * created by a xmlrpc GetSpaceTokens v2.2 call. - */ - public InputData convertToInputData(Map inputParam) { + /** + * Returns an instance of GetSpaceTokenInputData from a Hashtable structure created by a xmlrpc + * GetSpaceTokens v2.2 call. + */ + public InputData convertToInputData(Map inputParam) { - String memberName = new String("authorizationID"); + String memberName = new String("authorizationID"); - GridUserInterface guser = GridUserManager.decode(inputParam); + GridUserInterface guser = GridUserManager.decode(inputParam); - /* (1) authorizationID (never used) */ - String authID = (String) inputParam.get(memberName); + /* (1) authorizationID (never used) */ + String authID = (String) inputParam.get(memberName); - memberName = new String("userSpaceTokenDescription"); - String userSpaceTokenDescription = (String) inputParam.get(memberName); - GetSpaceTokensInputData inputData; - if (guser != null) { - inputData = new IdentityGetSpaceTokensInputData(guser, - userSpaceTokenDescription); - } else { - inputData = new AnonymousGetSpaceTokensInputData( - userSpaceTokenDescription); - } - return inputData; - } + memberName = new String("userSpaceTokenDescription"); + String userSpaceTokenDescription = (String) inputParam.get(memberName); + GetSpaceTokensInputData inputData; + if (guser != null) { + inputData = new IdentityGetSpaceTokensInputData(guser, userSpaceTokenDescription); + } else { + inputData = new AnonymousGetSpaceTokensInputData(userSpaceTokenDescription); + } + return inputData; + } - public Map convertFromOutputData(OutputData data) { + public Map convertFromOutputData(OutputData data) { - log.debug("GetSpaceTokensConverter. Creation of XMLRPC Output Structure! "); + log.debug("GetSpaceTokensConverter. Creation of XMLRPC Output Structure! "); - // Creation of new Hashtable to return - Hashtable outputParam = new Hashtable(); + // Creation of new Hashtable to return + Hashtable outputParam = new Hashtable(); - GetSpaceTokensOutputData outputData = (GetSpaceTokensOutputData) data; + GetSpaceTokensOutputData outputData = (GetSpaceTokensOutputData) data; - /* (1) returnStatus */ - TReturnStatus returnStatus = outputData.getStatus(); - if (returnStatus != null) { - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - } + /* (1) returnStatus */ + TReturnStatus returnStatus = outputData.getStatus(); + if (returnStatus != null) { + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + } - /* (2) arrayOfSpaceTokens */ - ArrayOfTSpaceToken arrayOfSpaceTokens = outputData.getArrayOfSpaceTokens(); - if (arrayOfSpaceTokens != null) { - arrayOfSpaceTokens.encode(outputParam, - ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); - } + /* (2) arrayOfSpaceTokens */ + ArrayOfTSpaceToken arrayOfSpaceTokens = outputData.getArrayOfSpaceTokens(); + if (arrayOfSpaceTokens != null) { + arrayOfSpaceTokens.encode(outputParam, ArrayOfTSpaceToken.PNAME_ARRAYOFSPACETOKENS); + } - log.debug("Sending: {}" , outputParam.toString()); + log.debug("Sending: {}", outputParam.toString()); - // Return output Parameter structure - return outputParam; - } + // Return output Parameter structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/space/ReleaseSpaceConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/space/ReleaseSpaceConverter.java index 61df9e66..a2df0645 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/space/ReleaseSpaceConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/space/ReleaseSpaceConverter.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.xmlrpc.converter.space; @@ -16,85 +15,70 @@ import it.grid.storm.synchcall.data.space.ReleaseSpaceOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This class represents the Type Converter for ReleaseSpace function . This - * class have get an input data from xmlrpc call anc convert it into a StoRM - * Type that can be used to invoke the ReleaseSpaceManager - * + * This class represents the Type Converter for ReleaseSpace function . This class have get an input + * data from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the + * ReleaseSpaceManager + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - public class ReleaseSpaceConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(ReleaseSpaceConverter.class); - - public ReleaseSpaceConverter() { - - }; - - /** - * This method return a ReleaseSpaceInputData created from input Hashtable - * structure of an xmlrpc releaseSpace v2.1 call. ReleaseSpaceInputData can be - * used to invoke ReleaseSpace Manager - */ + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(ReleaseSpaceConverter.class); - public InputData convertToInputData(Map inputParam) { + public ReleaseSpaceConverter() {}; - log - .debug("ReleaseSpaceConverter :Call received :Creation of SpaceResData = {}" - , inputParam.size()); - log.debug("ReleaseSpaceConverter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); + /** + * This method return a ReleaseSpaceInputData created from input Hashtable structure of an xmlrpc + * releaseSpace v2.1 call. ReleaseSpaceInputData can be used to invoke ReleaseSpace Manager + */ + public InputData convertToInputData(Map inputParam) { - GridUserInterface guser = GridUserManager.decode(inputParam); + log.debug( + "ReleaseSpaceConverter :Call received :Creation of SpaceResData = {}", inputParam.size()); + log.debug( + "ReleaseSpaceConverter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); - TSpaceToken spaceToken = TSpaceToken.decode(inputParam, - TSpaceToken.PNAME_SPACETOKEN); + GridUserInterface guser = GridUserManager.decode(inputParam); - Boolean force = (Boolean) inputParam.get("forceFileRelease"); - if (force == null) { - force = new Boolean(false); - } + TSpaceToken spaceToken = TSpaceToken.decode(inputParam, TSpaceToken.PNAME_SPACETOKEN); - ReleaseSpaceInputData inputData; - if (guser != null) { - inputData = new IdentityReleaseSpaceInputData(guser, spaceToken, - force.booleanValue()); - } else { - inputData = new AnonymousReleaseSpaceInputData(spaceToken, - force.booleanValue()); - } - return inputData; + Boolean force = (Boolean) inputParam.get("forceFileRelease"); + if (force == null) { + force = new Boolean(false); + } - } + ReleaseSpaceInputData inputData; + if (guser != null) { + inputData = new IdentityReleaseSpaceInputData(guser, spaceToken, force.booleanValue()); + } else { + inputData = new AnonymousReleaseSpaceInputData(spaceToken, force.booleanValue()); + } + return inputData; + } - public Map convertFromOutputData(OutputData data) { + public Map convertFromOutputData(OutputData data) { - log - .debug("releaseSpaceConverter :Call received :Creation of XMLRPC Output Structure! "); - ReleaseSpaceOutputData outputData = (ReleaseSpaceOutputData) data; + log.debug("releaseSpaceConverter :Call received :Creation of XMLRPC Output Structure! "); + ReleaseSpaceOutputData outputData = (ReleaseSpaceOutputData) data; - // Creation of new Hashtable to return - Map outputParam = new HashMap(); + // Creation of new Hashtable to return + Map outputParam = new HashMap(); - TReturnStatus returnStatus = outputData.getStatus(); - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + TReturnStatus returnStatus = outputData.getStatus(); + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - // Return output Parameter structure - return outputParam; - } + // Return output Parameter structure + return outputParam; + } } diff --git a/src/main/java/it/grid/storm/xmlrpc/converter/space/ReserveSpaceConverter.java b/src/main/java/it/grid/storm/xmlrpc/converter/space/ReserveSpaceConverter.java index 9d1c24e9..f007cf7c 100644 --- a/src/main/java/it/grid/storm/xmlrpc/converter/space/ReserveSpaceConverter.java +++ b/src/main/java/it/grid/storm/xmlrpc/converter/space/ReserveSpaceConverter.java @@ -1,18 +1,16 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ /** - * This class represents the Type Converter for space Reservation function . - * This class have get an input data from xmlrpc call anc convert it into a - * StoRM Type that can be used to invoke the space Reservation Manager - * + * This class represents the Type Converter for space Reservation function . This class have get an + * input data from xmlrpc call anc convert it into a StoRM Type that can be used to invoke the space + * Reservation Manager + * * @author Magnoni Luca * @author Cnaf -INFN Bologna * @date * @version 1.0 */ - package it.grid.storm.xmlrpc.converter.space; import it.grid.storm.griduser.GridUserInterface; @@ -32,157 +30,151 @@ import it.grid.storm.synchcall.data.space.ReserveSpaceOutputData; import it.grid.storm.xmlrpc.converter.Converter; import it.grid.storm.xmlrpc.converter.ParameterDisplayHelper; - import java.util.HashMap; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ReserveSpaceConverter implements Converter { - /** - * Logger - */ - private static final Logger log = LoggerFactory - .getLogger(ReserveSpaceConverter.class); - - public ReserveSpaceConverter() { - - }; - - /** - * This method return a SpaceResData created from input Hashtable structure of - * an xmlrpc spaceReservation v2.1 call. SpaceResData can be used to invoke - * SpaceResevation Manager - */ - public InputData convertToInputData(Map inputParam) { - - log - .debug("reserveSpaceConverter :Call received :Creation of SpaceResData = {}" - , inputParam.size()); - log.debug("reserveSpaceConverter: Input Structure toString: {}" - , ParameterDisplayHelper.display(inputParam)); - - String memberName = null; - - GridUserInterface guser = GridUserManager.decode(inputParam); - - memberName = new String("authorizationID"); - String authID = (String) inputParam.get(memberName); - - memberName = new String("userSpaceTokenDescription"); - String spaceAlias = (String) inputParam.get(memberName); - if (spaceAlias == null) { - spaceAlias = new String(""); - } - - TRetentionPolicyInfo retentionPolicyInfo = TRetentionPolicyInfo.decode( - inputParam, TRetentionPolicyInfo.PNAME_retentionPolicyInfo); - - TSizeInBytes desiredSizeOfTotalSpace = TSizeInBytes.decode(inputParam, - TSizeInBytes.PNAME_DESIREDSIZEOFTOTALSPACE); - - TSizeInBytes desiredSizeOfGuaranteedSpace = TSizeInBytes.decode(inputParam, - TSizeInBytes.PNAME_DESIREDSIZEOFGUARANTEEDSPACE); - - ArrayOfTExtraInfo storageSystemInfo; - try { - storageSystemInfo = ArrayOfTExtraInfo.decode(inputParam, - ArrayOfTExtraInfo.PNAME_STORAGESYSTEMINFO); - } catch (InvalidArrayOfTExtraInfoAttributeException e) { - storageSystemInfo = null; - } - - ReserveSpaceInputData inputData; - if (guser != null) { - inputData = new IdentityReserveSpaceInputData(guser, spaceAlias, - retentionPolicyInfo, desiredSizeOfTotalSpace, - desiredSizeOfGuaranteedSpace, storageSystemInfo); - } else { - inputData = new AnonymousReserveSpaceInputData(spaceAlias, - retentionPolicyInfo, desiredSizeOfTotalSpace, - desiredSizeOfGuaranteedSpace, storageSystemInfo); - } - TLifeTimeInSeconds desiredLifetimeOfReservedSpace = TLifeTimeInSeconds - .decode(inputParam, - TLifeTimeInSeconds.PNAME_DESIREDLIFETIMEOFRESERVEDSPACE); - if (desiredLifetimeOfReservedSpace != null - && !desiredLifetimeOfReservedSpace.isEmpty()) { - inputData.setSpaceLifetime(desiredLifetimeOfReservedSpace); - } - return inputData; - - } - - public Map convertFromOutputData(OutputData data) { - - log - .debug("reserveSpaceConverter :Call received :Creation of XMLRPC Output Structure! "); - - // Creation of new Hashtable to return - Map outputParam = new HashMap(); - - ReserveSpaceOutputData outputData = (ReserveSpaceOutputData) data; - - /* (1) returnStatus */ - TReturnStatus returnStatus = outputData.getStatus(); - returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); - - /* (2) requestToken */ - /* - * Actually we are not planning an asynchronous version of ReserveSpace (in - * theory not needed for StoRM). Therefor this parameter is not set. - */ - - /* (3) estimatedProcessingTime */ - // TODO: in the future (actually the FE is predisposed to decode this value - // as an int). - - /* (4) retentionPolocyInfo */ - TRetentionPolicyInfo retentionPolicyInfo = outputData - .getRetentionPolicyInfo(); - if (retentionPolicyInfo != null) { - retentionPolicyInfo.encode(outputParam, - TRetentionPolicyInfo.PNAME_retentionPolicyInfo); - } - - /* (5) sizeOfTotalReservedSpace */ - TSizeInBytes sizeOfTotalReservedSpace = outputData.getTotalSize(); - if (sizeOfTotalReservedSpace != null) { - if (!(sizeOfTotalReservedSpace.isEmpty())) { - sizeOfTotalReservedSpace.encode(outputParam, - TSizeInBytes.PNAME_SIZEOFTOTALRESERVEDSPACE); - } - } - - /* (6) sizeOfGuaranteedReservedSpace */ - TSizeInBytes sizeOfGuaranteedReservedSpace = outputData.getGuaranteedSize(); - if (sizeOfGuaranteedReservedSpace != null) { - if (!(sizeOfGuaranteedReservedSpace.isEmpty())) { - sizeOfGuaranteedReservedSpace.encode(outputParam, - TSizeInBytes.PNAME_SIZEOFGUARANTEEDRESERVEDSPACE); - } - } - - /* (7) lifetimeOfReservedSpace */ - TLifeTimeInSeconds lifetimeOfReservedSpace = outputData - .getLifeTimeInSeconds(); - if (lifetimeOfReservedSpace != null) { - if (!(lifetimeOfReservedSpace.isEmpty())) { - lifetimeOfReservedSpace.encode(outputParam, - TLifeTimeInSeconds.PNAME_LIFETIMEOFRESERVEDSPACE); - } - } - - /* (8) spaceToken */ - TSpaceToken spaceToken = outputData.getSpaceToken(); - if (spaceToken != null) { - spaceToken.encode(outputParam, TSpaceToken.PNAME_SPACETOKEN); - } - - log.debug(outputParam.toString()); - - return outputParam; - } + /** Logger */ + private static final Logger log = LoggerFactory.getLogger(ReserveSpaceConverter.class); + + public ReserveSpaceConverter() {}; + + /** + * This method return a SpaceResData created from input Hashtable structure of an xmlrpc + * spaceReservation v2.1 call. SpaceResData can be used to invoke SpaceResevation Manager + */ + public InputData convertToInputData(Map inputParam) { + + log.debug( + "reserveSpaceConverter :Call received :Creation of SpaceResData = {}", inputParam.size()); + log.debug( + "reserveSpaceConverter: Input Structure toString: {}", + ParameterDisplayHelper.display(inputParam)); + + String memberName = null; + + GridUserInterface guser = GridUserManager.decode(inputParam); + + memberName = new String("authorizationID"); + String authID = (String) inputParam.get(memberName); + + memberName = new String("userSpaceTokenDescription"); + String spaceAlias = (String) inputParam.get(memberName); + if (spaceAlias == null) { + spaceAlias = new String(""); + } + + TRetentionPolicyInfo retentionPolicyInfo = + TRetentionPolicyInfo.decode(inputParam, TRetentionPolicyInfo.PNAME_retentionPolicyInfo); + + TSizeInBytes desiredSizeOfTotalSpace = + TSizeInBytes.decode(inputParam, TSizeInBytes.PNAME_DESIREDSIZEOFTOTALSPACE); + + TSizeInBytes desiredSizeOfGuaranteedSpace = + TSizeInBytes.decode(inputParam, TSizeInBytes.PNAME_DESIREDSIZEOFGUARANTEEDSPACE); + + ArrayOfTExtraInfo storageSystemInfo; + try { + storageSystemInfo = + ArrayOfTExtraInfo.decode(inputParam, ArrayOfTExtraInfo.PNAME_STORAGESYSTEMINFO); + } catch (InvalidArrayOfTExtraInfoAttributeException e) { + storageSystemInfo = null; + } + + ReserveSpaceInputData inputData; + if (guser != null) { + inputData = + new IdentityReserveSpaceInputData( + guser, + spaceAlias, + retentionPolicyInfo, + desiredSizeOfTotalSpace, + desiredSizeOfGuaranteedSpace, + storageSystemInfo); + } else { + inputData = + new AnonymousReserveSpaceInputData( + spaceAlias, + retentionPolicyInfo, + desiredSizeOfTotalSpace, + desiredSizeOfGuaranteedSpace, + storageSystemInfo); + } + TLifeTimeInSeconds desiredLifetimeOfReservedSpace = + TLifeTimeInSeconds.decode( + inputParam, TLifeTimeInSeconds.PNAME_DESIREDLIFETIMEOFRESERVEDSPACE); + if (desiredLifetimeOfReservedSpace != null && !desiredLifetimeOfReservedSpace.isEmpty()) { + inputData.setSpaceLifetime(desiredLifetimeOfReservedSpace); + } + return inputData; + } + + public Map convertFromOutputData(OutputData data) { + + log.debug("reserveSpaceConverter :Call received :Creation of XMLRPC Output Structure! "); + + // Creation of new Hashtable to return + Map outputParam = new HashMap(); + + ReserveSpaceOutputData outputData = (ReserveSpaceOutputData) data; + + /* (1) returnStatus */ + TReturnStatus returnStatus = outputData.getStatus(); + returnStatus.encode(outputParam, TReturnStatus.PNAME_RETURNSTATUS); + + /* (2) requestToken */ + /* + * Actually we are not planning an asynchronous version of ReserveSpace (in + * theory not needed for StoRM). Therefor this parameter is not set. + */ + + /* (3) estimatedProcessingTime */ + // TODO: in the future (actually the FE is predisposed to decode this value + // as an int). + + /* (4) retentionPolocyInfo */ + TRetentionPolicyInfo retentionPolicyInfo = outputData.getRetentionPolicyInfo(); + if (retentionPolicyInfo != null) { + retentionPolicyInfo.encode(outputParam, TRetentionPolicyInfo.PNAME_retentionPolicyInfo); + } + + /* (5) sizeOfTotalReservedSpace */ + TSizeInBytes sizeOfTotalReservedSpace = outputData.getTotalSize(); + if (sizeOfTotalReservedSpace != null) { + if (!(sizeOfTotalReservedSpace.isEmpty())) { + sizeOfTotalReservedSpace.encode(outputParam, TSizeInBytes.PNAME_SIZEOFTOTALRESERVEDSPACE); + } + } + + /* (6) sizeOfGuaranteedReservedSpace */ + TSizeInBytes sizeOfGuaranteedReservedSpace = outputData.getGuaranteedSize(); + if (sizeOfGuaranteedReservedSpace != null) { + if (!(sizeOfGuaranteedReservedSpace.isEmpty())) { + sizeOfGuaranteedReservedSpace.encode( + outputParam, TSizeInBytes.PNAME_SIZEOFGUARANTEEDRESERVEDSPACE); + } + } + + /* (7) lifetimeOfReservedSpace */ + TLifeTimeInSeconds lifetimeOfReservedSpace = outputData.getLifeTimeInSeconds(); + if (lifetimeOfReservedSpace != null) { + if (!(lifetimeOfReservedSpace.isEmpty())) { + lifetimeOfReservedSpace.encode( + outputParam, TLifeTimeInSeconds.PNAME_LIFETIMEOFRESERVEDSPACE); + } + } + + /* (8) spaceToken */ + TSpaceToken spaceToken = outputData.getSpaceToken(); + if (spaceToken != null) { + spaceToken.encode(outputParam, TSpaceToken.PNAME_SPACETOKEN); + } + + log.debug(outputParam.toString()); + + return outputParam; + } } diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index d988f8cc..d8fa2850 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -1,8 +1,7 @@ diff --git a/src/main/resources/org/apache/xmlrpc/webserver/XmlRpcServlet.properties b/src/main/resources/org/apache/xmlrpc/webserver/XmlRpcServlet.properties index 03e536ca..a4ab9232 100644 --- a/src/main/resources/org/apache/xmlrpc/webserver/XmlRpcServlet.properties +++ b/src/main/resources/org/apache/xmlrpc/webserver/XmlRpcServlet.properties @@ -1,6 +1,5 @@ # -# Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). -# SPDX-License-Identifier: Apache-2.0 +# Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 # synchcall=it.grid.storm.xmlrpc.XMLRPCMethods \ No newline at end of file diff --git a/src/test/java/it/grid/storm/balancer/BalancerUtils.java b/src/test/java/it/grid/storm/balancer/BalancerUtils.java index b696be1d..601427a5 100644 --- a/src/test/java/it/grid/storm/balancer/BalancerUtils.java +++ b/src/test/java/it/grid/storm/balancer/BalancerUtils.java @@ -1,19 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer; -import org.mockito.Mockito; - import it.grid.storm.balancer.node.FTPNode; import it.grid.storm.balancer.node.HttpNode; import it.grid.storm.balancer.node.HttpsNode; +import org.mockito.Mockito; public class BalancerUtils { - protected Node getNode(Class c, int id, String hostname, int port, - boolean isResponsive) { + protected Node getNode( + Class c, int id, String hostname, int port, boolean isResponsive) { Node n = Mockito.mock(c); Mockito.when(n.getHostname()).thenReturn(hostname); @@ -23,8 +21,13 @@ protected Node getNode(Class c, int id, String hostname, int por return n; } - protected Node getNode(Class c, int id, String hostname, int port, - boolean isResponsive, int weight) { + protected Node getNode( + Class c, + int id, + String hostname, + int port, + boolean isResponsive, + int weight) { Node n = Mockito.mock(c); Mockito.when(n.getHostname()).thenReturn(hostname); @@ -82,5 +85,4 @@ protected Node getResponsiveHttpsNode(int id, String hostname, int port, int wei protected Node getUnresponsiveHttpsNode(int id, String hostname, int port, int weight) { return getNode(HttpsNode.class, id, hostname, port, false, weight); } - } diff --git a/src/test/java/it/grid/storm/balancer/cache/ResponsivenessCacheTest.java b/src/test/java/it/grid/storm/balancer/cache/ResponsivenessCacheTest.java index 9ff1f497..18c90a39 100644 --- a/src/test/java/it/grid/storm/balancer/cache/ResponsivenessCacheTest.java +++ b/src/test/java/it/grid/storm/balancer/cache/ResponsivenessCacheTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.cache; @@ -11,11 +10,10 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.junit.Before; -import org.junit.Test; - import it.grid.storm.balancer.BalancerUtils; import it.grid.storm.balancer.Node; +import org.junit.Before; +import org.junit.Test; public class ResponsivenessCacheTest extends BalancerUtils { @@ -64,5 +62,4 @@ public void testCaching() { assertEquals(UNRESPONSIVE, CACHE.getResponsiveness(ftp2)); assertTrue(CACHE.isCached(ftp2)); } - } diff --git a/src/test/java/it/grid/storm/balancer/strategy/BalancingStrategiesTests.java b/src/test/java/it/grid/storm/balancer/strategy/BalancingStrategiesTests.java index cd87ee35..208e58a4 100644 --- a/src/test/java/it/grid/storm/balancer/strategy/BalancingStrategiesTests.java +++ b/src/test/java/it/grid/storm/balancer/strategy/BalancingStrategiesTests.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; @@ -8,18 +7,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; - import com.google.common.collect.Lists; - import it.grid.storm.balancer.BalancerUtils; import it.grid.storm.balancer.BalancingStrategy; import it.grid.storm.balancer.Node; import it.grid.storm.balancer.cache.ResponsivenessCache; import it.grid.storm.balancer.exception.BalancingStrategyException; +import java.util.List; +import org.junit.Before; +import org.junit.Test; public class BalancingStrategiesTests extends BalancerUtils { @@ -78,7 +74,6 @@ public void randomTest() throws BalancingStrategyException { assertTrue(nodes.indexOf(rs.getNextElement()) != -1); assertTrue(nodes.indexOf(rs.getNextElement()) != -1); assertTrue(nodes.indexOf(rs.getNextElement()) != -1); - } @Test @@ -103,5 +98,4 @@ public void weightTest() throws BalancingStrategyException { assertEquals(https1, ws.getNextElement()); } - } diff --git a/src/test/java/it/grid/storm/balancer/strategy/CyclicCounterTest.java b/src/test/java/it/grid/storm/balancer/strategy/CyclicCounterTest.java index 6d02f0da..96af6d40 100644 --- a/src/test/java/it/grid/storm/balancer/strategy/CyclicCounterTest.java +++ b/src/test/java/it/grid/storm/balancer/strategy/CyclicCounterTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.balancer.strategy; @@ -24,5 +23,4 @@ public void basicTest() { public void errorOnInit() { new CyclicCounter(-4); } - } diff --git a/src/test/java/it/grid/storm/info/du/DiskUsageUtilsTest.java b/src/test/java/it/grid/storm/info/du/DiskUsageUtilsTest.java index ce273067..e0d49272 100644 --- a/src/test/java/it/grid/storm/info/du/DiskUsageUtilsTest.java +++ b/src/test/java/it/grid/storm/info/du/DiskUsageUtilsTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.info.du; @@ -8,16 +7,13 @@ import static it.grid.storm.space.ExitStatus.SUCCESS; import static org.junit.Assert.assertEquals; +import com.google.common.collect.Lists; +import it.grid.storm.space.DUResult; import java.time.Duration; import java.time.Instant; import java.util.List; - import org.junit.Test; -import com.google.common.collect.Lists; - -import it.grid.storm.space.DUResult; - public class DiskUsageUtilsTest { @Test @@ -35,7 +31,6 @@ public void testEmptyOutput() { assertEquals(result.getDetail(), "empty output"); assertEquals(result.getDurationInMillis(), fiveMinutes.toMillis()); assertEquals(result.getSizeInBytes(), -1L); - } @Test @@ -53,7 +48,6 @@ public void testSuccessfulOutput() { assertEquals(result.getDetail(), ""); assertEquals(result.getDurationInMillis(), fiveMinutes.toMillis()); assertEquals(result.getSizeInBytes(), 474839632L); - } @Test @@ -71,7 +65,6 @@ public void testParseLongErrorOutput() { assertEquals(result.getDetail(), "NumberFormatException on parsing du output"); assertEquals(result.getDurationInMillis(), fiveMinutes.toMillis()); assertEquals(result.getSizeInBytes(), -1L); - } @Test(expected = NullPointerException.class) @@ -84,5 +77,4 @@ public void testParseNullOutputError() { DiskUsageUtils.getResult(ABS_PATH, start, end, null); } - } diff --git a/src/test/java/it/grid/storm/namespace/model/SAInfoV13Test.java b/src/test/java/it/grid/storm/namespace/model/SAInfoV13Test.java index a5c078e0..2340af7a 100644 --- a/src/test/java/it/grid/storm/namespace/model/SAInfoV13Test.java +++ b/src/test/java/it/grid/storm/namespace/model/SAInfoV13Test.java @@ -1,22 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import static org.junit.Assert.assertEquals; -import java.io.IOException; - -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; - import it.grid.storm.namespace.remote.Constants.HttpPerms; +import java.io.IOException; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SAInfoV13Test { @@ -59,7 +55,4 @@ public void testRead() throws IOException { assertEquals(saInfoRead.getName(), saInfo.getName()); assertEquals(saInfoRead.getToken(), saInfo.getToken()); } - - - } diff --git a/src/test/java/it/grid/storm/namespace/model/SAInfoV14Test.java b/src/test/java/it/grid/storm/namespace/model/SAInfoV14Test.java index b6584577..65f58589 100644 --- a/src/test/java/it/grid/storm/namespace/model/SAInfoV14Test.java +++ b/src/test/java/it/grid/storm/namespace/model/SAInfoV14Test.java @@ -1,22 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.namespace.model; import static org.junit.Assert.assertEquals; -import java.io.IOException; - -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; - import it.grid.storm.namespace.remote.Constants.HttpPerms; +import java.io.IOException; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SAInfoV14Test { @@ -59,7 +55,4 @@ public void testRead() throws IOException { assertEquals(saInfoRead.getName(), saInfo.getName()); assertEquals(saInfoRead.getToken(), saInfo.getToken()); } - - - } diff --git a/src/test/java/it/grid/storm/rest/auth/RestTokenFilterTest.java b/src/test/java/it/grid/storm/rest/auth/RestTokenFilterTest.java index d3881b6e..7c2436a1 100644 --- a/src/test/java/it/grid/storm/rest/auth/RestTokenFilterTest.java +++ b/src/test/java/it/grid/storm/rest/auth/RestTokenFilterTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.auth; @@ -16,12 +15,10 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; - import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.eclipse.jetty.servlet.FilterHolder; import org.junit.After; import org.junit.Test; @@ -29,10 +26,10 @@ public class RestTokenFilterTest { - private final static String TOKEN = "abracadabra"; - private final static String WRONG_TOKEN = "alakazam"; + private static final String TOKEN = "abracadabra"; + private static final String WRONG_TOKEN = "alakazam"; - private final static String TMP_FILENAME = "tmp.txt"; + private static final String TMP_FILENAME = "tmp.txt"; private HttpServletRequest getMockRequest(String token) { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); diff --git a/src/test/java/it/grid/storm/rest/metadata/MetadataTests.java b/src/test/java/it/grid/storm/rest/metadata/MetadataTests.java index b716b5ce..cdbd214f 100644 --- a/src/test/java/it/grid/storm/rest/metadata/MetadataTests.java +++ b/src/test/java/it/grid/storm/rest/metadata/MetadataTests.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata; @@ -10,14 +9,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; -import java.io.IOException; - -import javax.ws.rs.WebApplicationException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import it.grid.storm.filesystem.FSException; import it.grid.storm.filesystem.FilesystemError; import it.grid.storm.namespace.NamespaceException; @@ -29,6 +20,11 @@ import it.grid.storm.rest.metadata.model.VirtualFsMetadata; import it.grid.storm.rest.metadata.service.ResourceNotFoundException; import it.grid.storm.rest.metadata.service.StoriMetadataService; +import java.io.IOException; +import javax.ws.rs.WebApplicationException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class MetadataTests { @@ -48,33 +44,37 @@ private VirtualFS getVirtualFS(String name, String rootPath) throws NamespaceExc Mockito.when(vfs.getAliasName()).thenReturn(name); Mockito.when(vfs.getRootPath()).thenReturn(rootPath); StoRI stori = Mockito.mock(StoRI.class); - Mockito.when(vfs.createFile(Mockito.anyString(), Mockito.any(StoRIType.class), - Mockito.any(MappingRule.class))) - .thenReturn(stori); + Mockito.when( + vfs.createFile( + Mockito.anyString(), Mockito.any(StoRIType.class), Mockito.any(MappingRule.class))) + .thenReturn(stori); Mockito.when(stori.getAbsolutePath()).thenReturn(FILE_PATH); return vfs; } - private Metadata getMetadataServiceSuccess(StoriMetadata output) throws ResourceNotFoundException, - NamespaceException, IOException, SecurityException, FilesystemError, FSException { + private Metadata getMetadataServiceSuccess(StoriMetadata output) + throws ResourceNotFoundException, NamespaceException, IOException, SecurityException, + FilesystemError, FSException { StoriMetadataService service = Mockito.mock(StoriMetadataService.class); Mockito.when(service.getMetadata(Mockito.anyString())).thenReturn(output); return getMetadataServlet(service); } - private Metadata getMetadataServiceNotFound() throws ResourceNotFoundException, - NamespaceException, IOException, SecurityException, FilesystemError, FSException { + private Metadata getMetadataServiceNotFound() + throws ResourceNotFoundException, NamespaceException, IOException, SecurityException, + FilesystemError, FSException { StoriMetadataService service = Mockito.mock(StoriMetadataService.class); Mockito.when(service.getMetadata(Mockito.anyString())) - .thenThrow(new ResourceNotFoundException(FILE_PATH + " not exists")); + .thenThrow(new ResourceNotFoundException(FILE_PATH + " not exists")); return getMetadataServlet(service); } - private Metadata getMetadataServiceNamespaceException() throws ResourceNotFoundException, - NamespaceException, IOException, SecurityException, FilesystemError, FSException { + private Metadata getMetadataServiceNamespaceException() + throws ResourceNotFoundException, NamespaceException, IOException, SecurityException, + FilesystemError, FSException { StoriMetadataService service = Mockito.mock(StoriMetadataService.class); Mockito.when(service.getMetadata(Mockito.anyString())) - .thenThrow(new NamespaceException("Mocked namespace excpetion")); + .thenThrow(new NamespaceException("Mocked namespace excpetion")); return getMetadataServlet(service); } @@ -85,16 +85,21 @@ private Metadata getMetadataServlet(StoriMetadataService s) { @Before public void init() throws NamespaceException { vfs = getVirtualFS(VFS_NAME, VFS_ROOTPATH); - expected = StoriMetadata.builder() - .absolutePath(FILE_PATH) - .filesystem( - VirtualFsMetadata.builder().name(vfs.getAliasName()).root(vfs.getRootPath()).build()) - .build(); + expected = + StoriMetadata.builder() + .absolutePath(FILE_PATH) + .filesystem( + VirtualFsMetadata.builder() + .name(vfs.getAliasName()) + .root(vfs.getRootPath()) + .build()) + .build(); } @Test - public void testSuccess() throws NamespaceException, ResourceNotFoundException, IOException, - SecurityException, FilesystemError, FSException { + public void testSuccess() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceSuccess(expected); StoriMetadata response = servlet.getFileMetadata(STFN_PATH); @@ -104,8 +109,9 @@ public void testSuccess() throws NamespaceException, ResourceNotFoundException, } @Test - public void testSuccessWithWrongToken() throws NamespaceException, ResourceNotFoundException, - IOException, SecurityException, FilesystemError, FSException { + public void testSuccessWithWrongToken() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceSuccess(expected); StoriMetadata response = servlet.getFileMetadata(STFN_PATH); @@ -115,8 +121,9 @@ public void testSuccessWithWrongToken() throws NamespaceException, ResourceNotFo } @Test - public void testSuccessStfnNoSlash() throws NamespaceException, ResourceNotFoundException, - IOException, SecurityException, FilesystemError, FSException { + public void testSuccessStfnNoSlash() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceSuccess(expected); StoriMetadata response = servlet.getFileMetadata(STFN_NOSLASH_PATH); @@ -126,8 +133,9 @@ public void testSuccessStfnNoSlash() throws NamespaceException, ResourceNotFound } @Test - public void testMetadataNotFound() throws NamespaceException, ResourceNotFoundException, - IOException, SecurityException, FilesystemError, FSException { + public void testMetadataNotFound() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceNotFound(); try { servlet.getFileMetadata(STFN_PATH); @@ -138,8 +146,9 @@ public void testMetadataNotFound() throws NamespaceException, ResourceNotFoundEx } @Test - public void testMetadataNamespaceException() throws NamespaceException, ResourceNotFoundException, - IOException, SecurityException, FilesystemError, FSException { + public void testMetadataNamespaceException() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceNamespaceException(); try { servlet.getFileMetadata(STFN_PATH); @@ -150,8 +159,9 @@ public void testMetadataNamespaceException() throws NamespaceException, Resource } @Test - public void testMetadataBadRequest() throws NamespaceException, ResourceNotFoundException, - IOException, SecurityException, FilesystemError, FSException { + public void testMetadataBadRequest() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceSuccess(expected); try { servlet.getFileMetadata("/"); @@ -162,8 +172,9 @@ public void testMetadataBadRequest() throws NamespaceException, ResourceNotFound } @Test - public void testMetadataBadRequestEmptyStfn() throws NamespaceException, - ResourceNotFoundException, IOException, SecurityException, FilesystemError, FSException { + public void testMetadataBadRequestEmptyStfn() + throws NamespaceException, ResourceNotFoundException, IOException, SecurityException, + FilesystemError, FSException { Metadata servlet = getMetadataServiceSuccess(expected); try { servlet.getFileMetadata(""); diff --git a/src/test/java/it/grid/storm/rest/metadata/ResourceServiceTest.java b/src/test/java/it/grid/storm/rest/metadata/ResourceServiceTest.java index b4d36602..8842ae5e 100644 --- a/src/test/java/it/grid/storm/rest/metadata/ResourceServiceTest.java +++ b/src/test/java/it/grid/storm/rest/metadata/ResourceServiceTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata; @@ -10,23 +9,20 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.io.File; -import java.io.IOException; -import java.util.Collections; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import com.google.common.collect.Lists; - import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.StoRI; import it.grid.storm.namespace.model.MappingRule; import it.grid.storm.namespace.model.VirtualFS; import it.grid.storm.rest.metadata.service.ResourceNotFoundException; import it.grid.storm.rest.metadata.service.ResourceService; +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class ResourceServiceTest { @@ -54,14 +50,14 @@ private VirtualFS getVirtualFS(String name, String rootPath) throws NamespaceExc Mockito.when(vfs.getRootPath()).thenReturn(rootPath); StoRI fileStori = Mockito.mock(StoRI.class); Mockito.when(fileStori.getAbsolutePath()).thenReturn(FILE_PATH); - Mockito - .when(vfs.createFile(Mockito.anyString(), Mockito.eq(FILE), Mockito.any(MappingRule.class))) - .thenReturn(fileStori); + Mockito.when( + vfs.createFile(Mockito.anyString(), Mockito.eq(FILE), Mockito.any(MappingRule.class))) + .thenReturn(fileStori); StoRI dirStori = Mockito.mock(StoRI.class); Mockito.when(dirStori.getAbsolutePath()).thenReturn(DIR_PATH); - Mockito - .when(vfs.createFile(Mockito.anyString(), Mockito.eq(FOLDER), Mockito.any(MappingRule.class))) - .thenReturn(dirStori); + Mockito.when( + vfs.createFile(Mockito.anyString(), Mockito.eq(FOLDER), Mockito.any(MappingRule.class))) + .thenReturn(dirStori); return vfs; } @@ -105,8 +101,7 @@ private ResourceService getStoRIResourceServiceNoVFSs() throws NamespaceExceptio VirtualFS vfs = getVirtualFS(VFS_NAME, VFS_ROOTPATH); MappingRule rule = getMappingRule(RULE_NAME, RULE_STFNROOT, vfs); - return new ResourceService(Collections.emptyList(), - Lists.newArrayList(rule)); + return new ResourceService(Collections.emptyList(), Lists.newArrayList(rule)); } @Before @@ -197,5 +192,4 @@ public void testMappingFailInternalErrorEmptyVFSs() assertTrue(e.getMessage().indexOf("Unable to map " + FILE_STFN_PATH + " to a rule") != -1); } } - } diff --git a/src/test/java/it/grid/storm/rest/metadata/StoriMetadataServiceTest.java b/src/test/java/it/grid/storm/rest/metadata/StoriMetadataServiceTest.java index 8e56c908..78c3ed88 100644 --- a/src/test/java/it/grid/storm/rest/metadata/StoriMetadataServiceTest.java +++ b/src/test/java/it/grid/storm/rest/metadata/StoriMetadataServiceTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.rest.metadata; @@ -15,15 +14,7 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.IOException; - -import org.junit.Test; -import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Lists; - import it.grid.storm.ea.ExtendedAttributes; import it.grid.storm.ea.StormEA; import it.grid.storm.filesystem.FSException; @@ -39,6 +30,11 @@ import it.grid.storm.rest.metadata.service.ResourceService; import it.grid.storm.rest.metadata.service.StoriMetadataService; import it.grid.storm.srm.types.TDirOption; +import java.io.IOException; +import org.junit.Test; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class StoriMetadataServiceTest { @@ -57,7 +53,8 @@ public class StoriMetadataServiceTest { private StoriMetadataService service; - private void init(boolean dirExists, boolean fileExists, boolean isMigrated, boolean isRecalled) throws IOException, FSException, ResourceNotFoundException, NamespaceException { + private void init(boolean dirExists, boolean fileExists, boolean isMigrated, boolean isRecalled) + throws IOException, FSException, ResourceNotFoundException, NamespaceException { initStormEA(isMigrated, isRecalled); @@ -99,19 +96,23 @@ private void init(boolean dirExists, boolean fileExists, boolean isMigrated, boo service = new StoriMetadataService(resourceService); } - private void initAsFileOnline() throws IOException, FSException, ResourceNotFoundException, NamespaceException { + private void initAsFileOnline() + throws IOException, FSException, ResourceNotFoundException, NamespaceException { init(true, true, false, false); } - private void initAsFileOffline() throws IOException, FSException, ResourceNotFoundException, NamespaceException { + private void initAsFileOffline() + throws IOException, FSException, ResourceNotFoundException, NamespaceException { init(true, true, true, false); } - private void initAsFileNotFound() throws IOException, FSException, ResourceNotFoundException, NamespaceException { + private void initAsFileNotFound() + throws IOException, FSException, ResourceNotFoundException, NamespaceException { init(true, false, false, false); } - private void initAsFileMigratedAndRecalled() throws IOException, FSException, ResourceNotFoundException, NamespaceException { + private void initAsFileMigratedAndRecalled() + throws IOException, FSException, ResourceNotFoundException, NamespaceException { init(true, true, true, true); } @@ -133,8 +134,9 @@ private void initStormEA(boolean hasMigrated, boolean hasTSMRecT) { } @Test - public void testSuccess() throws NamespaceException, IOException, ResourceNotFoundException, - SecurityException, FilesystemError, FSException { + public void testSuccess() + throws NamespaceException, IOException, ResourceNotFoundException, SecurityException, + FilesystemError, FSException { initAsFileOnline(); StoriMetadata metadata = service.getMetadata(FILE_STFN_PATH); @@ -153,8 +155,9 @@ public void testSuccess() throws NamespaceException, IOException, ResourceNotFou } @Test - public void testSuccessDirectory() throws NamespaceException, IOException, - ResourceNotFoundException, SecurityException, FilesystemError, FSException { + public void testSuccessDirectory() + throws NamespaceException, IOException, ResourceNotFoundException, SecurityException, + FilesystemError, FSException { initAsFileOnline(); StoriMetadata metadata = service.getMetadata(DIR_STFN_PATH); @@ -169,7 +172,8 @@ public void testSuccessDirectory() throws NamespaceException, IOException, @Test public void testFileNotFound() - throws NamespaceException, IOException, SecurityException, FilesystemError, FSException, ResourceNotFoundException { + throws NamespaceException, IOException, SecurityException, FilesystemError, FSException, + ResourceNotFoundException { initAsFileNotFound(); try { @@ -180,8 +184,9 @@ public void testFileNotFound() } @Test - public void testSuccessFileExistsButMigrated() throws NamespaceException, IOException, - ResourceNotFoundException, SecurityException, FilesystemError, FSException { + public void testSuccessFileExistsButMigrated() + throws NamespaceException, IOException, ResourceNotFoundException, SecurityException, + FilesystemError, FSException { initAsFileOffline(); StoriMetadata metadata = service.getMetadata(FILE_STFN_PATH); @@ -193,8 +198,9 @@ public void testSuccessFileExistsButMigrated() throws NamespaceException, IOExce } @Test - public void testSuccessFileMigratedAndRecalled() throws NamespaceException, IOException, - ResourceNotFoundException, SecurityException, FilesystemError, FSException { + public void testSuccessFileMigratedAndRecalled() + throws NamespaceException, IOException, ResourceNotFoundException, SecurityException, + FilesystemError, FSException { initAsFileMigratedAndRecalled(); StoriMetadata metadata = service.getMetadata(FILE_STFN_PATH); diff --git a/src/test/java/it/grid/storm/tape/recalltable/model/TapeRecallStatusTest.java b/src/test/java/it/grid/storm/tape/recalltable/model/TapeRecallStatusTest.java index 4ed3f3e0..82f56eed 100644 --- a/src/test/java/it/grid/storm/tape/recalltable/model/TapeRecallStatusTest.java +++ b/src/test/java/it/grid/storm/tape/recalltable/model/TapeRecallStatusTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; diff --git a/src/test/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidatorTest.java b/src/test/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidatorTest.java index 0ecf6f45..98e022af 100644 --- a/src/test/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidatorTest.java +++ b/src/test/java/it/grid/storm/tape/recalltable/model/TaskInsertRequestValidatorTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.model; @@ -8,34 +7,35 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.junit.Test; - import it.grid.storm.tape.recalltable.resources.TaskInsertRequest; +import org.junit.Test; public class TaskInsertRequestValidatorTest { @Test public void testSuccess() { - TaskInsertRequest request = TaskInsertRequest.builder() - .voName("test.vo") - .userId("user") - .retryAttempts(0) - .pinLifetime(1000) - .stfn("/test.vo") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .voName("test.vo") + .userId("user") + .retryAttempts(0) + .pinLifetime(1000) + .stfn("/test.vo") + .build(); TaskInsertRequestValidator validator = new TaskInsertRequestValidator(request); assertTrue(validator.validate()); } @Test public void testNullStfn() { - TaskInsertRequest request = TaskInsertRequest.builder() - .voName("test.vo") - .userId("user") - .retryAttempts(0) - .pinLifetime(1000) - .stfn(null) - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .voName("test.vo") + .userId("user") + .retryAttempts(0) + .pinLifetime(1000) + .stfn(null) + .build(); TaskInsertRequestValidator validator = new TaskInsertRequestValidator(request); assertFalse(validator.validate()); assertEquals(validator.getErrorMessage(), "Request must contain a STFN"); @@ -43,17 +43,17 @@ public void testNullStfn() { @Test public void testNullStfnAndUserId() { - TaskInsertRequest request = TaskInsertRequest.builder() - .voName("test.vo") - .userId(null) - .retryAttempts(0) - .pinLifetime(1000) - .stfn(null) - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .voName("test.vo") + .userId(null) + .retryAttempts(0) + .pinLifetime(1000) + .stfn(null) + .build(); TaskInsertRequestValidator validator = new TaskInsertRequestValidator(request); assertFalse(validator.validate()); assertTrue(validator.getErrorMessage().contains("Request must contain a STFN")); assertTrue(validator.getErrorMessage().contains("Request must contain a userId")); } - } diff --git a/src/test/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequestTest.java b/src/test/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequestTest.java index a578eb82..b3ad8f9b 100644 --- a/src/test/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequestTest.java +++ b/src/test/java/it/grid/storm/tape/recalltable/resources/TaskInsertRequestTest.java @@ -1,21 +1,18 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.resources; import static org.junit.Assert.assertEquals; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; - import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; - public class TaskInsertRequestTest { private JSONObject getTaskInsertRequestAsJsonObject() throws JSONException { @@ -36,12 +33,10 @@ public void testConstructorWithMapper() JSONObject j = getTaskInsertRequestAsJsonObject(); TaskInsertRequest request = mapper.readValue(j.toString().getBytes(), TaskInsertRequest.class); assertEquals(request.getStfn(), j.getString("stfn")); - assertEquals(Integer.valueOf(request.getRetryAttempts()), - Integer.valueOf(j.getInt("retryAttempts"))); + assertEquals( + Integer.valueOf(request.getRetryAttempts()), Integer.valueOf(j.getInt("retryAttempts"))); assertEquals(request.getVoName(), j.getString("voName")); assertEquals(request.getPinLifetime(), Integer.valueOf(j.getInt("pinLifetime"))); assertEquals(request.getUserId(), j.getString("userId")); - } - } diff --git a/src/test/java/it/grid/storm/tape/recalltable/resources/TaskResourceTest.java b/src/test/java/it/grid/storm/tape/recalltable/resources/TaskResourceTest.java index 63520947..d938ff13 100644 --- a/src/test/java/it/grid/storm/tape/recalltable/resources/TaskResourceTest.java +++ b/src/test/java/it/grid/storm/tape/recalltable/resources/TaskResourceTest.java @@ -1,6 +1,5 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package it.grid.storm.tape.recalltable.resources; @@ -16,24 +15,10 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.io.IOException; -import java.net.URI; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; - -import org.junit.Test; -import org.mockito.Mockito; - import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; - import it.grid.storm.griduser.VONameMatchingRule; import it.grid.storm.namespace.NamespaceException; import it.grid.storm.namespace.StoRI; @@ -47,6 +32,16 @@ import it.grid.storm.srm.types.InvalidTRequestTokenAttributesException; import it.grid.storm.srm.types.TRequestToken; import it.grid.storm.tape.recalltable.TapeRecallCatalog; +import java.io.IOException; +import java.net.URI; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.UUID; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; +import org.junit.Test; +import org.mockito.Mockito; public class TaskResourceTest { @@ -78,7 +73,7 @@ private TapeRecallCatalog getTapeRecallCatalogInsertSuccess(UUID groupTaskId) { try { Mockito.when(catalog.insertNewTask(Mockito.any(TapeRecallTO.class))).thenReturn(groupTaskId); Mockito.when(catalog.getGroupTasks(groupTaskId)) - .thenReturn(Lists.newArrayList(TapeRecallTO.createRandom(new Date(), VFS_VONAME))); + .thenReturn(Lists.newArrayList(TapeRecallTO.createRandom(new Date(), VFS_VONAME))); } catch (DataAccessException e) { e.printStackTrace(); } @@ -103,7 +98,7 @@ private ResourceService getResourceNotFoundService() throws ResourceNotFoundException, NamespaceException { ResourceService service = Mockito.mock(ResourceService.class); Mockito.when(service.getResource(Mockito.anyString())) - .thenThrow(new ResourceNotFoundException("Unable to map " + STFN_PATH + " to a rule")); + .thenThrow(new ResourceNotFoundException("Unable to map " + STFN_PATH + " to a rule")); return service; } @@ -111,7 +106,7 @@ private ResourceService getResourceNamespaceErrorService() throws ResourceNotFoundException, NamespaceException { ResourceService service = Mockito.mock(ResourceService.class); Mockito.when(service.getResource(Mockito.anyString())) - .thenThrow(new NamespaceException("Mocked namespace exception")); + .thenThrow(new NamespaceException("Mocked namespace exception")); return service; } @@ -120,7 +115,7 @@ private TapeRecallCatalog getTapeRecallCatalogInsertError() { TapeRecallCatalog catalog = Mockito.mock(TapeRecallCatalog.class); try { Mockito.when(catalog.insertNewTask(Mockito.any(TapeRecallTO.class))) - .thenThrow(new DataAccessException("Error on db")); + .thenThrow(new DataAccessException("Error on db")); } catch (DataAccessException e) { e.printStackTrace(); } @@ -156,7 +151,7 @@ private TaskResource getTaskResource(ResourceService service, TapeRecallCatalog private void testGETTaskInfo(Response res) throws InvalidTRequestTokenAttributesException, DataAccessException, JsonParseException, - JsonMappingException, IOException, NamespaceException, ResourceNotFoundException { + JsonMappingException, IOException, NamespaceException, ResourceNotFoundException { TaskResource recallEndpoint = getTaskResource(getResourceService(STORI), RECALL_CATALOG); @@ -172,7 +167,7 @@ private void testGETTaskInfo(Response res) Mockito.when(requestToken.getValue()).thenReturn(requestTokenValue); task.setRequestToken(new TRequestToken(requestTokenValue, new Date())); Mockito.when(RECALL_CATALOG.getGroupTasks(UUID.fromString(groupTaskId))) - .thenReturn(Lists.newArrayList(task)); + .thenReturn(Lists.newArrayList(task)); // ask for task info res = recallEndpoint.getGroupTaskInfo(groupTaskId, requestTokenValue); @@ -185,16 +180,17 @@ private void testGETTaskInfo(Response res) @Test public void testPOSTSuccess() throws DataAccessException, NamespaceException, JsonParseException, JsonMappingException, - IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { + IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { TaskResource recallEndpoint = getTaskResource(getResourceService(STORI), RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .retryAttempts(0) - .voName(VFS_VONAME) - .pinLifetime(1223123) - .userId("test") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .retryAttempts(0) + .voName(VFS_VONAME) + .pinLifetime(1223123) + .userId("test") + .build(); Response res = recallEndpoint.postNewTask(request); assertNotNull(res.getHeaderString("Location")); assertEquals(res.getStatus(), CREATED.getStatusCode()); @@ -205,16 +201,17 @@ public void testPOSTSuccess() @Test public void testPOSTSuccessWithNullVoName() throws DataAccessException, NamespaceException, JsonParseException, JsonMappingException, - IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { + IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { TaskResource recallEndpoint = getTaskResource(getResourceService(STORI), RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .retryAttempts(0) - .voName(null) - .pinLifetime(1223123) - .userId("test") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .retryAttempts(0) + .voName(null) + .pinLifetime(1223123) + .userId("test") + .build(); Response res = recallEndpoint.postNewTask(request); assertNotNull(res.getHeaderString("Location")); assertEquals(res.getStatus(), CREATED.getStatusCode()); @@ -228,13 +225,14 @@ public void testPOSTNamespaceErrorOnResolvingStfnPath() TaskResource recallEndpoint = new TaskResource(getResourceNamespaceErrorService(), RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .retryAttempts(0) - .voName(VFS_VONAME) - .pinLifetime(1223123) - .userId("test") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .retryAttempts(0) + .voName(VFS_VONAME) + .pinLifetime(1223123) + .userId("test") + .build(); try { recallEndpoint.postNewTask(request); fail(); @@ -249,13 +247,14 @@ public void testPOSTBadVoNameRequested() throws DataAccessException, NamespaceException, ResourceNotFoundException { TaskResource recallEndpoint = new TaskResource(getResourceService(STORI), RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .retryAttempts(0) - .voName(REQUEST_WRONG_VONAME) - .pinLifetime(1223123) - .userId("test") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .retryAttempts(0) + .voName(REQUEST_WRONG_VONAME) + .pinLifetime(1223123) + .userId("test") + .build(); try { recallEndpoint.postNewTask(request); fail(); @@ -286,13 +285,14 @@ public void testPOSTDbException() TaskResource recallEndpoint = new TaskResource(getResourceService(STORI), BROKEN_RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .retryAttempts(0) - .voName(VFS_VONAME) - .pinLifetime(1223123) - .userId("test") - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .retryAttempts(0) + .voName(VFS_VONAME) + .pinLifetime(1223123) + .userId("test") + .build(); try { recallEndpoint.postNewTask(request); fail(); @@ -346,7 +346,8 @@ public void testPOSTValidationRequestInvalidNegativeRetryAttempts() fail(); } catch (WebApplicationException e) { assertEquals(e.getResponse().getStatus(), BAD_REQUEST.getStatusCode()); - assertEquals(e.getResponse().getEntity().toString(), + assertEquals( + e.getResponse().getEntity().toString(), "Retry attempts must be more or equal than zero."); } } @@ -357,17 +358,19 @@ public void testPOSTValidationRequestInvalidTooManyRetryAttempts() TaskResource recallEndpoint = new TaskResource(getResourceService(STORI), BROKEN_RECALL_CATALOG); - TaskInsertRequest request = TaskInsertRequest.builder() - .stfn(STFN_PATH) - .userId("test") - .retryAttempts(Integer.valueOf(MAX_RETRY_ATTEMPTS) + 1) - .build(); + TaskInsertRequest request = + TaskInsertRequest.builder() + .stfn(STFN_PATH) + .userId("test") + .retryAttempts(Integer.valueOf(MAX_RETRY_ATTEMPTS) + 1) + .build(); try { recallEndpoint.postNewTask(request); fail(); } catch (WebApplicationException e) { assertEquals(e.getResponse().getStatus(), BAD_REQUEST.getStatusCode()); - assertEquals(e.getResponse().getEntity().toString(), + assertEquals( + e.getResponse().getEntity().toString(), "Retry attempts must be less or equal than " + MAX_RETRY_ATTEMPTS + "."); } } @@ -383,13 +386,11 @@ private TapeRecallCatalog getTapeRecallCatalogInProgressNotEmpty() { @Test public void testGETTasksInProgressEmpty() throws DataAccessException, NamespaceException, JsonParseException, JsonMappingException, - IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { + IOException, InvalidTRequestTokenAttributesException, ResourceNotFoundException { TaskResource recallEndpoint = getTaskResource(getResourceService(STORI), getTapeRecallCatalogInProgressNotEmpty()); Response res = recallEndpoint.getTasks(10); assertEquals(res.getStatus(), OK.getStatusCode()); } - - } diff --git a/src/test/java/org/italiangrid/storm/test/TestSURLValidator.java b/src/test/java/org/italiangrid/storm/test/TestSURLValidator.java index 38bd957d..904c4f82 100644 --- a/src/test/java/org/italiangrid/storm/test/TestSURLValidator.java +++ b/src/test/java/org/italiangrid/storm/test/TestSURLValidator.java @@ -1,19 +1,17 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package org.italiangrid.storm.test; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; -import it.grid.storm.util.SURLValidator; +import static org.junit.Assert.assertTrue; +import it.grid.storm.util.SURLValidator; import org.junit.Test; - public class TestSURLValidator { - static final String validSURLs[] = { + static final String validSURLs[] = { "srm://host.ciccio:8444/test//palla", "srm://host.ciccio/manager", "srm://host.com:8009//srm/managerv2?SFN=/test//ciccio", @@ -21,31 +19,27 @@ public class TestSURLValidator { "srm://[2001:720:1210:f023::65]:8444/srm/managerv2?SFN=/lhcp/plus+minus-ciccio,also_with_a_comma.txt", "srm://[2001:720:1210:f023::65]:8444/ciccio+caio_[{}]/horrible.txt" }; - - static final String invalidSURLs[] = { - "invalid surl", - "http://www.google.com", - "https://www.cicciopalla.com", - "srm://ciccio.srm.org/file with space", - "", - "drop database storm_BE_ISAM" - }; - - + + static final String invalidSURLs[] = { + "invalid surl", + "http://www.google.com", + "https://www.cicciopalla.com", + "srm://ciccio.srm.org/file with space", + "", + "drop database storm_BE_ISAM" + }; + @Test public void testValid() { - for (String surl: validSURLs){ - assertTrue("Valid SURL considered invalid: "+surl, - SURLValidator.valid(surl)); + for (String surl : validSURLs) { + assertTrue("Valid SURL considered invalid: " + surl, SURLValidator.valid(surl)); } } - + @Test public void testInvalid() { - for (String surl: invalidSURLs){ - assertFalse("Invalid SURL considered valid: "+surl, - SURLValidator.valid(surl)); + for (String surl : invalidSURLs) { + assertFalse("Invalid SURL considered valid: " + surl, SURLValidator.valid(surl)); } } - } diff --git a/src/test/java/org/italiangrid/storm/test/TestTokenValidator.java b/src/test/java/org/italiangrid/storm/test/TestTokenValidator.java index d44e0452..0caba0cc 100644 --- a/src/test/java/org/italiangrid/storm/test/TestTokenValidator.java +++ b/src/test/java/org/italiangrid/storm/test/TestTokenValidator.java @@ -1,42 +1,34 @@ /** - * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). - * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). SPDX-License-Identifier: Apache-2.0 */ package org.italiangrid.storm.test; import static org.junit.Assert.*; -import it.grid.storm.util.TokenValidator; +import it.grid.storm.util.TokenValidator; import org.junit.Test; - public class TestTokenValidator { public static final String[] validTokens = { - "dc99d6d9-4ec5-4e29-aebd-51a8cc06a2f9", - "15e9515d-c0d0-412d-89b2-134fe4fe1655" + "dc99d6d9-4ec5-4e29-aebd-51a8cc06a2f9", "15e9515d-c0d0-412d-89b2-134fe4fe1655" }; public static final String[] invalidTokens = { - "dc99d6d9-4ec5-4e29-aebd-51a806a2f9", - "15e9515d", - "srm://ciccio.caio.org", - "" + "dc99d6d9-4ec5-4e29-aebd-51a806a2f9", "15e9515d", "srm://ciccio.caio.org", "" }; - + @Test public void testValid() { - for (String tok: validTokens){ - assertTrue("Valid token considered invalid: "+ - tok,TokenValidator.valid(tok)); + for (String tok : validTokens) { + assertTrue("Valid token considered invalid: " + tok, TokenValidator.valid(tok)); } } @Test public void testInvalid() { - for (String tok: invalidTokens){ - assertFalse("Invalid token considered valid: "+ - tok,TokenValidator.valid(tok)); + for (String tok : invalidTokens) { + assertFalse("Invalid token considered valid: " + tok, TokenValidator.valid(tok)); } } } From 90e5430e15a92ba319a9c7622e8bf10f93bc9c1f Mon Sep 17 00:00:00 2001 From: Enrico Vianello Date: Mon, 10 Jul 2023 16:51:56 +0200 Subject: [PATCH 2/3] Add dav|davs to supported transport protocols on namespace --- etc/namespace-1.5.1.xsd | 346 ++++++++++++++++++++++++++++++++++++++++ etc/namespace.xml | 2 +- 2 files changed, 347 insertions(+), 1 deletion(-) create mode 100644 etc/namespace-1.5.1.xsd diff --git a/etc/namespace-1.5.1.xsd b/etc/namespace-1.5.1.xsd new file mode 100644 index 00000000..214fa635 --- /dev/null +++ b/etc/namespace-1.5.1.xsd @@ -0,0 +1,346 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/etc/namespace.xml b/etc/namespace.xml index 42a2df8e..8c0ae320 100644 --- a/etc/namespace.xml +++ b/etc/namespace.xml @@ -1,4 +1,4 @@ - + TEST1 From f3833713c88b48ba065d5169bba2d35c2bfae74e Mon Sep 17 00:00:00 2001 From: Enrico Vianello Date: Mon, 10 Jul 2023 17:00:56 +0200 Subject: [PATCH 3/3] WIP --- .../storm/common/types/TransferProtocol.java | 131 ++++++++++-------- .../it/grid/storm/config/Configuration.java | 2 +- .../it/grid/storm/namespace/StoRIImpl.java | 20 +++ .../grid/storm/namespace/model/Protocol.java | 34 ++--- 4 files changed, 107 insertions(+), 80 deletions(-) diff --git a/src/main/java/it/grid/storm/common/types/TransferProtocol.java b/src/main/java/it/grid/storm/common/types/TransferProtocol.java index a1ce7cfe..8d4e83e3 100644 --- a/src/main/java/it/grid/storm/common/types/TransferProtocol.java +++ b/src/main/java/it/grid/storm/common/types/TransferProtocol.java @@ -15,84 +15,94 @@ public class TransferProtocol { private String protocol; /** Static attribute that indicates EMPTY TransferProtocol */ - public static TransferProtocol EMPTY = - new TransferProtocol("empty") { + public static TransferProtocol EMPTY = new TransferProtocol("empty") { - public int hashCode() { + public int hashCode() { - return 0; - } - }; + return 0; + } + }; /** Static attribute that indicates FILE TransferProtocol. */ - public static TransferProtocol FILE = - new TransferProtocol("file") { + public static TransferProtocol FILE = new TransferProtocol("file") { - public int hashCode() { + public int hashCode() { - return 1; - } - }; + return 1; + } + }; /** Static attribute that indicates GSIFTP TransferProtocol. */ - public static TransferProtocol GSIFTP = - new TransferProtocol("gsiftp") { + public static TransferProtocol GSIFTP = new TransferProtocol("gsiftp") { - public int hashCode() { + public int hashCode() { - return 2; - } - }; + return 2; + } + }; /** Static attribute that indicates RFIO TransferProtocol. */ - public static TransferProtocol RFIO = - new TransferProtocol("rfio") { + public static TransferProtocol RFIO = new TransferProtocol("rfio") { - public int hashCode() { + public int hashCode() { - return 3; - } - }; + return 3; + } + }; /** Static attribute that indicates ROOT TransferProtocol. */ - public static TransferProtocol ROOT = - new TransferProtocol("root") { + public static TransferProtocol ROOT = new TransferProtocol("root") { - public int hashCode() { + public int hashCode() { - return 4; - } - }; + return 4; + } + }; /** Static attribute that indicates XROOT TransferProtocol. */ - public static TransferProtocol XROOT = - new TransferProtocol("xroot") { + public static TransferProtocol XROOT = new TransferProtocol("xroot") { - public int hashCode() { + public int hashCode() { - return 8; - } - }; + return 8; + } + }; /** Static attribute that indicates HTTP TransferProtocol. */ - public static TransferProtocol HTTP = - new TransferProtocol("http") { + public static TransferProtocol HTTP = new TransferProtocol("http") { - public int hashCode() { + public int hashCode() { - return 5; - } - }; + return 5; + } + }; /** Static attribute that indicates HTTPS TransferProtocol. */ - public static TransferProtocol HTTPS = - new TransferProtocol("https") { + public static TransferProtocol HTTPS = new TransferProtocol("https") { - public int hashCode() { + public int hashCode() { - return 6; - } - }; + return 6; + } + }; + + /** Static attribute that indicates HTTP TransferProtocol. */ + public static TransferProtocol DAV = new TransferProtocol("dav") { + + public int hashCode() { + + return 7; + } + }; + + /** Static attribute that indicates HTTPS TransferProtocol. */ + public static TransferProtocol DAVS = new TransferProtocol("davs") { + + public int hashCode() { + + return 9; + } + }; private TransferProtocol(String protocol) { @@ -115,13 +125,24 @@ public String toString() { */ public static TransferProtocol getTransferProtocol(String protocol) { - if (protocol.toLowerCase().replaceAll(" ", "").equals(FILE.toString())) return FILE; - if (protocol.toLowerCase().replaceAll(" ", "").equals(GSIFTP.toString())) return GSIFTP; - if (protocol.toLowerCase().replaceAll(" ", "").equals(RFIO.toString())) return RFIO; - if (protocol.toLowerCase().replaceAll(" ", "").equals(ROOT.toString())) return ROOT; - if (protocol.toLowerCase().replaceAll(" ", "").equals(XROOT.toString())) return XROOT; - if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTP.toString())) return HTTP; - if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTPS.toString())) return HTTPS; + if (protocol.toLowerCase().replaceAll(" ", "").equals(FILE.toString())) + return FILE; + if (protocol.toLowerCase().replaceAll(" ", "").equals(GSIFTP.toString())) + return GSIFTP; + if (protocol.toLowerCase().replaceAll(" ", "").equals(RFIO.toString())) + return RFIO; + if (protocol.toLowerCase().replaceAll(" ", "").equals(ROOT.toString())) + return ROOT; + if (protocol.toLowerCase().replaceAll(" ", "").equals(XROOT.toString())) + return XROOT; + if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTP.toString())) + return HTTP; + if (protocol.toLowerCase().replaceAll(" ", "").equals(HTTPS.toString())) + return HTTPS; + if (protocol.toLowerCase().replaceAll(" ", "").equals(DAV.toString())) + return DAV; + if (protocol.toLowerCase().replaceAll(" ", "").equals(DAVS.toString())) + return DAVS; return EMPTY; } } diff --git a/src/main/java/it/grid/storm/config/Configuration.java b/src/main/java/it/grid/storm/config/Configuration.java index 38251697..55a2a8e7 100644 --- a/src/main/java/it/grid/storm/config/Configuration.java +++ b/src/main/java/it/grid/storm/config/Configuration.java @@ -1283,7 +1283,7 @@ public String toString() { } public String getHTTPTURLPrefix() { - return cr.getConfiguration().getString(HTTP_TURL_PREFIX, "/fileTransfer"); + return cr.getConfiguration().getString(HTTP_TURL_PREFIX, ""); } public long getInProgressPutRequestExpirationTime() { diff --git a/src/main/java/it/grid/storm/namespace/StoRIImpl.java b/src/main/java/it/grid/storm/namespace/StoRIImpl.java index ed3578e3..49036e9f 100644 --- a/src/main/java/it/grid/storm/namespace/StoRIImpl.java +++ b/src/main/java/it/grid/storm/namespace/StoRIImpl.java @@ -464,6 +464,8 @@ public TTURL getTURL(TURLPrefix desiredProtocols) if (choosen.equals(Protocol.HTTP) || choosen.equals(Protocol.HTTPS)) { resultTURL = buildHTTPTURL(choosen, authority); + } else if (choosen.equals(Protocol.DAV) || choosen.equals(Protocol.DAVS)) { + resultTURL = buildDAVTURL(choosen, authority); } else { resultTURL = buildTURL(choosen, authority); } @@ -564,6 +566,24 @@ private TTURL buildHTTPTURL(Protocol p, Authority authority) { return TTURL.makeFromString(sb.toString()); } + private TTURL buildDAVTURL(Protocol p, Authority authority) { + + String prefix = Configuration.getInstance().getHTTPTURLPrefix(); + StringBuilder sb = new StringBuilder(); + sb.append(p.getProtocolPrefix()); + sb.append(authority); + + if (prefix != null) { + sb.append(prefix); + } + + sb.append(getStFN().toString()); + + log.debug("built dav turl: {}", sb.toString()); + + return TTURL.makeFromString(sb.toString()); + } + private TTURL buildTURL(Protocol protocol, Authority authority) throws InvalidProtocolForTURLException { diff --git a/src/main/java/it/grid/storm/namespace/model/Protocol.java b/src/main/java/it/grid/storm/namespace/model/Protocol.java index 12b8a302..596f79af 100644 --- a/src/main/java/it/grid/storm/namespace/model/Protocol.java +++ b/src/main/java/it/grid/storm/namespace/model/Protocol.java @@ -3,18 +3,6 @@ */ package it.grid.storm.namespace.model; -/** - * Title: - * - *

Description: - * - *

Copyright: Copyright (c) 2006 - * - *

Company: - * - * @author not attributable - * @version 1.0 - */ public class Protocol { private int protocolIndex = -1; @@ -29,21 +17,18 @@ public class Protocol { public static final Protocol RFIO = new Protocol(3, "RFIO", "rfio", 5001); public static final Protocol SRM = new Protocol(4, "SRM", "srm", 8444); public static final Protocol ROOT = new Protocol(5, "ROOT", "root", 1094); - // TODO HTTPS TURL + public static final Protocol HTTP = new Protocol(6, "HTTP", "http", 8080); public static final Protocol HTTPS = new Protocol(7, "HTTPS", "https", 443); public static final Protocol XROOT = new Protocol(8, "XROOT", "xroot", 1094); + public static final Protocol DAV = new Protocol(9, "DAV", "dav", 8080); + public static final Protocol DAVS = new Protocol(10, "DAVS", "davs", 443); + public static final Protocol EMPTY = new Protocol(0, "EMPTY", "", -1); public static final Protocol UNKNOWN = new Protocol(-1, "UNKNOWN", "", -1); - /** - * Constructor - * - * @param protocolName String - * @param protocolSchema String - */ private Protocol(int protocolIndex, String protocolName, String protocolScheme, int defaultPort) { this.protocolIndex = protocolIndex; @@ -52,25 +37,21 @@ private Protocol(int protocolIndex, String protocolName, String protocolScheme, this.defaultPort = defaultPort; } - // Return internal index for equals method and to use in a switch statement public int getProtocolIndex() { return protocolIndex; } - // Only get method for Name public String getProtocolName() { return protocolName; } - // Only get method for Schema public String getSchema() { return schema; } - // Only get method for Schema public String getProtocolPrefix() { return this.schema + "://"; @@ -108,13 +89,18 @@ public static Protocol getProtocol(String scheme) { if (scheme.toLowerCase().replaceAll(" ", "").equals(SRM.getSchema().toLowerCase())) { return SRM; } - // TODO HTTPS TURL if (scheme.toLowerCase().replaceAll(" ", "").equals(HTTP.getSchema().toLowerCase())) { return HTTP; } if (scheme.toLowerCase().replaceAll(" ", "").equals(HTTPS.getSchema().toLowerCase())) { return HTTPS; } + if (scheme.toLowerCase().replaceAll(" ", "").equals(DAV.getSchema().toLowerCase())) { + return DAV; + } + if (scheme.toLowerCase().replaceAll(" ", "").equals(DAVS.getSchema().toLowerCase())) { + return DAVS; + } if (scheme.toLowerCase().replaceAll(" ", "").equals(EMPTY.getSchema().toLowerCase())) { return EMPTY;