diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index f8a09b5589..a093dbbbd0 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -26,6 +26,7 @@ on: - development - master - lineageondemand + - java17 jobs: build: @@ -35,10 +36,13 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up JDK 1.8 + - name: Set up JDK 1.17 uses: actions/setup-java@v1 with: - java-version: 1.8 + java-version: 1.17 + + - name: Print JDK version + run: java -version - name: Cache Maven packages uses: actions/cache@v2 diff --git a/Dockerfile b/Dockerfile index ec34b5d22b..2d98455e86 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,37 +29,26 @@ RUN apt-get update \ && apt-get -y install \ wget \ python2 \ - openjdk-8-jdk-headless \ + openjdk-17-jdk-headless \ patch \ netcat \ curl \ && cd / \ && export MAVEN_OPTS="-Xms2g -Xmx2g" \ - && export JAVA_HOME="/usr/lib/jvm/java-8-openjdk-amd64" \ + && export JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64" \ && tar -xzvf /apache-atlas-3.0.0-SNAPSHOT-server.tar.gz -C /opt \ && mv /opt/apache-atlas-${VERSION} /opt/apache-atlas \ && apt-get clean \ && rm -rf /apache-atlas-3.0.0-SNAPSHOT-server.tar.gz -# Copy the repair index jar file -RUN cd / \ - && wget https://atlan-build-artifacts.s3.ap-south-1.amazonaws.com/atlas/atlas-index-repair-tool-${VERSION}.tar.gz \ - && tar -xzvf /atlas-index-repair-tool-${VERSION}.tar.gz \ - && mkdir /opt/apache-atlas/libext \ - && mv /atlas-index-repair-tool-${VERSION}.jar /opt/apache-atlas/libext/ \ - && rm -rf /atlas-index-repair-tool-${VERSION}.tar.gz - RUN ln -s /usr/bin/python2 /usr/bin/python -COPY atlas-hub/repair_index.py /opt/apache-atlas/bin/ - -RUN chmod +x /opt/apache-atlas/bin/repair_index.py - COPY atlas-hub/atlas_start.py.patch atlas-hub/atlas_config.py.patch /opt/apache-atlas/bin/ COPY atlas-hub/pre-conf/atlas-log4j.xml /opt/apache-atlas/conf/ COPY atlas-hub/pre-conf/atlas-log4j2.xml /opt/apache-atlas/conf/ COPY atlas-hub/pre-conf/atlas-auth/ /opt/apache-atlas/conf/ +RUN mkdir /opt/apache-atlas/libext RUN curl https://repo1.maven.org/maven2/org/jolokia/jolokia-jvm/1.6.2/jolokia-jvm-1.6.2-agent.jar -o /opt/apache-atlas/libext/jolokia-jvm-agent.jar RUN cd /opt/apache-atlas/bin \ diff --git a/addons/falcon-bridge-shim/pom.xml b/addons/falcon-bridge-shim/pom.xml deleted file mode 100755 index c554e890a7..0000000000 --- a/addons/falcon-bridge-shim/pom.xml +++ /dev/null @@ -1,77 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - falcon-bridge-shim - Apache Atlas Falcon Bridge Shim Module - Apache Atlas Falcon Bridge Shim - jar - - - - - org.apache.atlas - atlas-plugin-classloader - - - - org.apache.falcon - falcon-common - ${falcon.version} - provided - - - org.apache.spark - * - - - javax.servlet - servlet-api - - - org.mortbay.jetty - servlet-api - - - org.springframework - spring-beans - - - org.springframework - spring-jms - - - org.springframework - spring-tx - - - org.springframework - spring-context - - - - - diff --git a/addons/falcon-bridge-shim/src/main/java/org/apache/atlas/falcon/service/AtlasService.java b/addons/falcon-bridge-shim/src/main/java/org/apache/atlas/falcon/service/AtlasService.java deleted file mode 100755 index 2b756de0e2..0000000000 --- a/addons/falcon-bridge-shim/src/main/java/org/apache/atlas/falcon/service/AtlasService.java +++ /dev/null @@ -1,222 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.service; - - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.falcon.FalconException; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.Entity; -import org.apache.falcon.service.ConfigurationChangeListener; -import org.apache.falcon.service.FalconService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Falcon hook used for atlas entity registration. - */ -public class AtlasService implements FalconService, ConfigurationChangeListener { - private static final Logger LOG = LoggerFactory.getLogger(AtlasService.class); - - private static final String ATLAS_PLUGIN_TYPE = "falcon"; - private static final String ATLAS_FALCON_HOOK_IMPL_CLASSNAME = "org.apache.atlas.falcon.service.AtlasService"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private FalconService falconServiceImpl = null; - private ConfigurationChangeListener configChangeListenerImpl = null; - - public AtlasService() { - this.initialize(); - } - - @Override - public String getName() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.getName()"); - } - - String ret = null; - - try { - activatePluginClassLoader(); - ret = falconServiceImpl.getName(); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.getName()"); - } - - return ret; - } - - @Override - public void init() throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.init()"); - } - - try { - activatePluginClassLoader(); - - ConfigurationStore.get().registerListener(this); - - falconServiceImpl.init(); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.init()"); - } - } - - @Override - public void destroy() throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.destroy()"); - } - - try { - activatePluginClassLoader(); - - ConfigurationStore.get().unregisterListener(this); - - falconServiceImpl.destroy(); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.destroy()"); - } - } - - @Override - public void onAdd(Entity entity) throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.onAdd({})", entity); - } - - try { - activatePluginClassLoader(); - configChangeListenerImpl.onAdd(entity); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.onAdd({})", entity); - } - } - - @Override - public void onRemove(Entity entity) throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.onRemove({})", entity); - } - - try { - activatePluginClassLoader(); - configChangeListenerImpl.onRemove(entity); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.onRemove({})", entity); - } - } - - @Override - public void onChange(Entity entity, Entity entity1) throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.onChange({}, {})", entity, entity1); - } - - try { - activatePluginClassLoader(); - configChangeListenerImpl.onChange(entity, entity1); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.onChange({}, {})", entity, entity1); - } - } - - @Override - public void onReload(Entity entity) throws FalconException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.onReload({})", entity); - } - - try { - activatePluginClassLoader(); - configChangeListenerImpl.onReload(entity); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.onReload({})", entity); - } - } - - private void initialize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AtlasService.initialize()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - Class cls = Class.forName(ATLAS_FALCON_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - Object atlasService = cls.newInstance(); - - falconServiceImpl = (FalconService) atlasService; - configChangeListenerImpl = (ConfigurationChangeListener) atlasService; - } catch (Exception excp) { - LOG.error("Error instantiating Atlas hook implementation", excp); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AtlasService.initialize()"); - } - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} diff --git a/addons/falcon-bridge/pom.xml b/addons/falcon-bridge/pom.xml deleted file mode 100644 index 1e2ce7c81b..0000000000 --- a/addons/falcon-bridge/pom.xml +++ /dev/null @@ -1,431 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - falcon-bridge - Apache Atlas Falcon Bridge Module - Apache Atlas Falcon Bridge - jar - - - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-notification - - - - org.apache.falcon - falcon-common - ${falcon.version} - provided - - - org.apache.spark - * - - - javax.servlet - servlet-api - - - org.mortbay.jetty - servlet-api - - - org.springframework - spring-beans - - - org.springframework - spring-jms - - - org.springframework - spring-tx - - - - - - org.apache.atlas - hive-bridge - - - - org.testng - testng - - - - org.eclipse.jetty - jetty-server - test - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/falcon/atlas-falcon-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - hive-bridge - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/falcon - false - false - true - - - ${project.groupId} - falcon-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - true - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/0010-base_model.json - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - org.apache.maven.plugins - maven-antrun-plugin - - - post-integration-test - - run - - - - - - - - - - - - - - diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/Util/EventUtil.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/Util/EventUtil.java deleted file mode 100644 index ef5634009d..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/Util/EventUtil.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.Util; - -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.FalconException; -import org.apache.falcon.security.CurrentUser; - -import java.util.HashMap; -import java.util.Map; - -/** - * Falcon event util - */ -public final class EventUtil { - - private EventUtil() {} - - - public static Map convertKeyValueStringToMap(final String keyValueString) { - if (StringUtils.isBlank(keyValueString)) { - return null; - } - - Map keyValueMap = new HashMap<>(); - - String[] tags = keyValueString.split(","); - for (String tag : tags) { - int index = tag.indexOf("="); - String tagKey = tag.substring(0, index).trim(); - String tagValue = tag.substring(index + 1, tag.length()).trim(); - keyValueMap.put(tagKey, tagValue); - } - return keyValueMap; - } - - public static String getUser() throws FalconException { - try { - return CurrentUser.getAuthenticatedUGI().getShortUserName(); - } catch (Exception ioe) { - //Ignore is failed to get user, uses login user - } - return null; - } -} diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java deleted file mode 100644 index cbf002f4fa..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java +++ /dev/null @@ -1,416 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.bridge; - -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasConstants; -import org.apache.atlas.falcon.Util.EventUtil; -import org.apache.atlas.falcon.model.FalconDataTypes; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.FalconException; -import org.apache.falcon.entity.CatalogStorage; -import org.apache.falcon.entity.FeedHelper; -import org.apache.falcon.entity.FileSystemStorage; -import org.apache.falcon.entity.ProcessHelper; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.feed.CatalogTable; -import org.apache.falcon.entity.v0.feed.ClusterType; -import org.apache.falcon.entity.v0.feed.Feed; -import org.apache.falcon.entity.v0.feed.Location; -import org.apache.falcon.entity.v0.feed.LocationType; -import org.apache.falcon.entity.v0.process.Cluster; -import org.apache.falcon.entity.v0.process.Input; -import org.apache.falcon.entity.v0.process.Output; -import org.apache.falcon.entity.v0.process.Workflow; -import org.apache.falcon.workflow.WorkflowExecutionArgs; -import org.apache.hadoop.fs.Path; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * A Bridge Utility to register Falcon entities metadata to Atlas. - */ -public class FalconBridge { - private static final Logger LOG = LoggerFactory.getLogger(FalconBridge.class); - - public static final String COLO = "colo"; - public static final String TAGS = "tags"; - public static final String GROUPS = "groups"; - public static final String PIPELINES = "pipelines"; - public static final String WFPROPERTIES = "workflow-properties"; - public static final String RUNSON = "runs-on"; - public static final String STOREDIN = "stored-in"; - public static final String FREQUENCY = "frequency"; - public static final String ATTRIBUTE_DB = "db"; - - /** - * Creates cluster entity - * - * @param cluster ClusterEntity - * @return cluster instance reference - */ - public static Referenceable createClusterEntity(final org.apache.falcon.entity.v0.cluster.Cluster cluster) { - LOG.info("Creating cluster Entity : {}", cluster.getName()); - - Referenceable clusterRef = new Referenceable(FalconDataTypes.FALCON_CLUSTER.getName()); - - clusterRef.set(AtlasClient.NAME, cluster.getName()); - clusterRef.set(AtlasClient.DESCRIPTION, cluster.getDescription()); - clusterRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, cluster.getName()); - - clusterRef.set(FalconBridge.COLO, cluster.getColo()); - - if (cluster.getACL() != null) { - clusterRef.set(AtlasClient.OWNER, cluster.getACL().getGroup()); - } - - if (StringUtils.isNotEmpty(cluster.getTags())) { - clusterRef.set(FalconBridge.TAGS, - EventUtil.convertKeyValueStringToMap(cluster.getTags())); - } - - return clusterRef; - } - - private static Referenceable createFeedEntity(Feed feed, Referenceable clusterReferenceable) { - LOG.info("Creating feed dataset: {}", feed.getName()); - - Referenceable feedEntity = new Referenceable(FalconDataTypes.FALCON_FEED.getName()); - feedEntity.set(AtlasClient.NAME, feed.getName()); - feedEntity.set(AtlasClient.DESCRIPTION, feed.getDescription()); - String feedQualifiedName = - getFeedQualifiedName(feed.getName(), (String) clusterReferenceable.get(AtlasClient.NAME)); - feedEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, feedQualifiedName); - feedEntity.set(FalconBridge.FREQUENCY, feed.getFrequency().toString()); - feedEntity.set(FalconBridge.STOREDIN, clusterReferenceable); - if (feed.getACL() != null) { - feedEntity.set(AtlasClient.OWNER, feed.getACL().getOwner()); - } - - if (StringUtils.isNotEmpty(feed.getTags())) { - feedEntity.set(FalconBridge.TAGS, - EventUtil.convertKeyValueStringToMap(feed.getTags())); - } - - if (feed.getGroups() != null) { - feedEntity.set(FalconBridge.GROUPS, feed.getGroups()); - } - - return feedEntity; - } - - public static List createFeedCreationEntity(Feed feed, ConfigurationStore falconStore) throws FalconException, URISyntaxException { - LOG.info("Creating feed : {}", feed.getName()); - - List entities = new ArrayList<>(); - - if (feed.getClusters() != null) { - List replicationInputs = new ArrayList<>(); - List replicationOutputs = new ArrayList<>(); - - for (org.apache.falcon.entity.v0.feed.Cluster feedCluster : feed.getClusters().getClusters()) { - org.apache.falcon.entity.v0.cluster.Cluster cluster = falconStore.get(EntityType.CLUSTER, - feedCluster.getName()); - - // set cluster - Referenceable clusterReferenceable = getClusterEntityReference(cluster.getName(), cluster.getColo()); - entities.add(clusterReferenceable); - - // input as hive_table or hdfs_path, output as falcon_feed dataset - List inputs = new ArrayList<>(); - List inputReferenceables = getInputEntities(cluster, feed); - if (inputReferenceables != null) { - entities.addAll(inputReferenceables); - inputs.add(inputReferenceables.get(inputReferenceables.size() - 1)); - } - - List outputs = new ArrayList<>(); - Referenceable feedEntity = createFeedEntity(feed, clusterReferenceable); - if (feedEntity != null) { - entities.add(feedEntity); - outputs.add(feedEntity); - } - - if (!inputs.isEmpty() || !outputs.isEmpty()) { - Referenceable feedCreateEntity = new Referenceable(FalconDataTypes.FALCON_FEED_CREATION.getName()); - String feedQualifiedName = getFeedQualifiedName(feed.getName(), cluster.getName()); - - feedCreateEntity.set(AtlasClient.NAME, feed.getName()); - feedCreateEntity.set(AtlasClient.DESCRIPTION, "Feed creation - " + feed.getName()); - feedCreateEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, feedQualifiedName); - - if (!inputs.isEmpty()) { - feedCreateEntity.set(AtlasClient.PROCESS_ATTRIBUTE_INPUTS, inputs); - } - if (!outputs.isEmpty()) { - feedCreateEntity.set(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS, outputs); - } - - feedCreateEntity.set(FalconBridge.STOREDIN, clusterReferenceable); - entities.add(feedCreateEntity); - } - - if (ClusterType.SOURCE == feedCluster.getType()) { - replicationInputs.add(feedEntity); - } else if (ClusterType.TARGET == feedCluster.getType()) { - replicationOutputs.add(feedEntity); - } - } - - if (!replicationInputs.isEmpty() && !replicationInputs.isEmpty()) { - Referenceable feedReplicationEntity = new Referenceable(FalconDataTypes - .FALCON_FEED_REPLICATION.getName()); - - feedReplicationEntity.set(AtlasClient.NAME, feed.getName()); - feedReplicationEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, feed.getName()); - - feedReplicationEntity.set(AtlasClient.PROCESS_ATTRIBUTE_INPUTS, replicationInputs); - feedReplicationEntity.set(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS, replicationOutputs); - entities.add(feedReplicationEntity); - } - - } - return entities; - } - - /** - * Creates process entity - * - * @param process process entity - * @param falconStore config store - * @return process instance reference - * - * @throws FalconException if retrieving from the configuration store fail - */ - public static List createProcessEntity(org.apache.falcon.entity.v0.process.Process process, - ConfigurationStore falconStore) throws FalconException { - LOG.info("Creating process Entity : {}", process.getName()); - - // The requirement is for each cluster, create a process entity with name - // clustername.processname - List entities = new ArrayList<>(); - - if (process.getClusters() != null) { - - for (Cluster processCluster : process.getClusters().getClusters()) { - org.apache.falcon.entity.v0.cluster.Cluster cluster = - falconStore.get(EntityType.CLUSTER, processCluster.getName()); - Referenceable clusterReferenceable = getClusterEntityReference(cluster.getName(), cluster.getColo()); - entities.add(clusterReferenceable); - - List inputs = new ArrayList<>(); - if (process.getInputs() != null) { - for (Input input : process.getInputs().getInputs()) { - Feed feed = falconStore.get(EntityType.FEED, input.getFeed()); - Referenceable inputReferenceable = getFeedDataSetReference(feed, clusterReferenceable); - entities.add(inputReferenceable); - inputs.add(inputReferenceable); - } - } - - List outputs = new ArrayList<>(); - if (process.getOutputs() != null) { - for (Output output : process.getOutputs().getOutputs()) { - Feed feed = falconStore.get(EntityType.FEED, output.getFeed()); - Referenceable outputReferenceable = getFeedDataSetReference(feed, clusterReferenceable); - entities.add(outputReferenceable); - outputs.add(outputReferenceable); - } - } - - if (!inputs.isEmpty() || !outputs.isEmpty()) { - - Referenceable processEntity = new Referenceable(FalconDataTypes.FALCON_PROCESS.getName()); - processEntity.set(AtlasClient.NAME, process.getName()); - processEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - getProcessQualifiedName(process.getName(), cluster.getName())); - processEntity.set(FalconBridge.FREQUENCY, process.getFrequency().toString()); - - if (!inputs.isEmpty()) { - processEntity.set(AtlasClient.PROCESS_ATTRIBUTE_INPUTS, inputs); - } - if (!outputs.isEmpty()) { - processEntity.set(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS, outputs); - } - - // set cluster - processEntity.set(FalconBridge.RUNSON, clusterReferenceable); - - // Set user - if (process.getACL() != null) { - processEntity.set(AtlasClient.OWNER, process.getACL().getOwner()); - } - - if (StringUtils.isNotEmpty(process.getTags())) { - processEntity.set(FalconBridge.TAGS, - EventUtil.convertKeyValueStringToMap(process.getTags())); - } - - if (process.getPipelines() != null) { - processEntity.set(FalconBridge.PIPELINES, process.getPipelines()); - } - - processEntity.set(FalconBridge.WFPROPERTIES, - getProcessEntityWFProperties(process.getWorkflow(), - process.getName())); - - entities.add(processEntity); - } - - } - } - return entities; - } - - private static List getInputEntities(org.apache.falcon.entity.v0.cluster.Cluster cluster, - Feed feed) throws URISyntaxException { - org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName()); - - if(feedCluster != null) { - final CatalogTable table = getTable(feedCluster, feed); - if (table != null) { - CatalogStorage storage = new CatalogStorage(cluster, table); - return createHiveTableInstance(cluster.getName(), storage.getDatabase().toLowerCase(), - storage.getTable().toLowerCase()); - } else { - List locations = FeedHelper.getLocations(feedCluster, feed); - if (CollectionUtils.isNotEmpty(locations)) { - Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA); - if (dataLocation != null) { - final String pathUri = normalize(dataLocation.getPath()); - LOG.info("Registering DFS Path {} ", pathUri); - return fillHDFSDataSet(pathUri, cluster.getName()); - } - } - } - } - - return null; - } - - private static CatalogTable getTable(org.apache.falcon.entity.v0.feed.Cluster cluster, Feed feed) { - // check if table is overridden in cluster - if (cluster.getTable() != null) { - return cluster.getTable(); - } - - return feed.getTable(); - } - - private static List fillHDFSDataSet(final String pathUri, final String clusterName) { - List entities = new ArrayList<>(); - Referenceable ref = new Referenceable(HiveMetaStoreBridge.HDFS_PATH); - ref.set("path", pathUri); - // Path path = new Path(pathUri); - // ref.set("name", path.getName()); - //TODO - Fix after ATLAS-542 to shorter Name - Path path = new Path(pathUri); - ref.set(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(path).toString().toLowerCase()); - ref.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, pathUri); - ref.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName); - entities.add(ref); - return entities; - } - - private static Referenceable createHiveDatabaseInstance(String clusterName, String dbName) { - Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName()); - dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName); - dbRef.set(AtlasClient.NAME, dbName); - dbRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getDBQualifiedName(clusterName, dbName)); - return dbRef; - } - - private static List createHiveTableInstance(String clusterName, String dbName, - String tableName) { - List entities = new ArrayList<>(); - Referenceable dbRef = createHiveDatabaseInstance(clusterName, dbName); - entities.add(dbRef); - - Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); - tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName)); - tableRef.set(AtlasClient.NAME, tableName.toLowerCase()); - tableRef.set(ATTRIBUTE_DB, dbRef); - entities.add(tableRef); - - return entities; - } - - private static Referenceable getClusterEntityReference(final String clusterName, - final String colo) { - LOG.info("Getting reference for entity {}", clusterName); - Referenceable clusterRef = new Referenceable(FalconDataTypes.FALCON_CLUSTER.getName()); - clusterRef.set(AtlasClient.NAME, String.format("%s", clusterName)); - clusterRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, clusterName); - clusterRef.set(FalconBridge.COLO, colo); - return clusterRef; - } - - - private static Referenceable getFeedDataSetReference(Feed feed, Referenceable clusterReference) { - LOG.info("Getting reference for entity {}", feed.getName()); - Referenceable feedDatasetRef = new Referenceable(FalconDataTypes.FALCON_FEED.getName()); - feedDatasetRef.set(AtlasClient.NAME, feed.getName()); - feedDatasetRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getFeedQualifiedName(feed.getName(), - (String) clusterReference.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME))); - feedDatasetRef.set(FalconBridge.STOREDIN, clusterReference); - feedDatasetRef.set(FalconBridge.FREQUENCY, feed.getFrequency()); - return feedDatasetRef; - } - - private static Map getProcessEntityWFProperties(final Workflow workflow, - final String processName) { - Map wfProperties = new HashMap<>(); - wfProperties.put(WorkflowExecutionArgs.USER_WORKFLOW_NAME.getName(), - ProcessHelper.getProcessWorkflowName(workflow.getName(), processName)); - wfProperties.put(WorkflowExecutionArgs.USER_WORKFLOW_VERSION.getName(), - workflow.getVersion()); - wfProperties.put(WorkflowExecutionArgs.USER_WORKFLOW_ENGINE.getName(), - workflow.getEngine().value()); - - return wfProperties; - } - - public static String getFeedQualifiedName(final String feedName, final String clusterName) { - return String.format("%s@%s", feedName, clusterName); - } - - public static String getProcessQualifiedName(final String processName, final String clusterName) { - return String.format("%s@%s", processName, clusterName); - } - - public static String normalize(final String str) { - if (StringUtils.isBlank(str)) { - return null; - } - return str.toLowerCase().trim(); - } -} diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/event/FalconEvent.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/event/FalconEvent.java deleted file mode 100644 index 51db894ab6..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/event/FalconEvent.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.event; - -import org.apache.falcon.entity.v0.Entity; - -/** - * Falcon event to interface with Atlas Service. - */ -public class FalconEvent { - protected String user; - protected OPERATION operation; - protected Entity entity; - - public FalconEvent(String doAsUser, OPERATION falconOperation, Entity entity) { - this.user = doAsUser; - this.operation = falconOperation; - this.entity = entity; - } - - public enum OPERATION { - ADD_CLUSTER, - UPDATE_CLUSTER, - ADD_FEED, - UPDATE_FEED, - ADD_PROCESS, - UPDATE_PROCESS, - } - - public String getUser() { - return user; - } - - public OPERATION getOperation() { - return operation; - } - - public Entity getEntity() { - return entity; - } -} diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java deleted file mode 100644 index b8a73cbe63..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java +++ /dev/null @@ -1,138 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.hook; - -import org.apache.atlas.falcon.bridge.FalconBridge; -import org.apache.atlas.falcon.event.FalconEvent; -import org.apache.atlas.falcon.publisher.FalconEventPublisher; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest; -import org.apache.falcon.FalconException; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.feed.Feed; -import org.apache.falcon.entity.v0.process.Process; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import static org.apache.atlas.repository.Constants.FALCON_SOURCE; - -/** - * Falcon hook sends lineage information to the Atlas Service. - */ -public class FalconHook extends AtlasHook implements FalconEventPublisher { - private static final Logger LOG = LoggerFactory.getLogger(FalconHook.class); - - private static ConfigurationStore STORE; - - @Override - public String getMessageSource() { - return FALCON_SOURCE; - } - - private enum Operation { - ADD, - UPDATE - } - - static { - try { - STORE = ConfigurationStore.get(); - } catch (Exception e) { - LOG.error("Caught exception initializing the falcon hook.", e); - } - - LOG.info("Created Atlas Hook for Falcon"); - } - - @Override - public void publish(final Data data) { - final FalconEvent event = data.getEvent(); - try { - fireAndForget(event); - } catch (Throwable t) { - LOG.warn("Error in processing data {}", data, t); - } - } - - private void fireAndForget(FalconEvent event) throws FalconException, URISyntaxException { - LOG.info("Entered Atlas hook for Falcon hook operation {}", event.getOperation()); - List messages = new ArrayList<>(); - - Operation op = getOperation(event.getOperation()); - String user = getUser(event.getUser()); - LOG.info("fireAndForget user:{}", user); - switch (op) { - case ADD: - messages.add(new EntityCreateRequest(user, createEntities(event, user))); - break; - - } - notifyEntities(messages, null); - } - - private List createEntities(FalconEvent event, String user) throws FalconException, URISyntaxException { - List entities = new ArrayList<>(); - - switch (event.getOperation()) { - case ADD_CLUSTER: - entities.add(FalconBridge - .createClusterEntity((org.apache.falcon.entity.v0.cluster.Cluster) event.getEntity())); - break; - - case ADD_PROCESS: - entities.addAll(FalconBridge.createProcessEntity((Process) event.getEntity(), STORE)); - break; - - case ADD_FEED: - entities.addAll(FalconBridge.createFeedCreationEntity((Feed) event.getEntity(), STORE)); - break; - - case UPDATE_CLUSTER: - case UPDATE_FEED: - case UPDATE_PROCESS: - default: - LOG.info("Falcon operation {} is not valid or supported", event.getOperation()); - } - - return entities; - } - - private static Operation getOperation(final FalconEvent.OPERATION op) throws FalconException { - switch (op) { - case ADD_CLUSTER: - case ADD_FEED: - case ADD_PROCESS: - return Operation.ADD; - - case UPDATE_CLUSTER: - case UPDATE_FEED: - case UPDATE_PROCESS: - return Operation.UPDATE; - - default: - throw new FalconException("Falcon operation " + op + " is not valid or supported"); - } - } -} - diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/model/FalconDataTypes.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/model/FalconDataTypes.java deleted file mode 100644 index e36ff23aff..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/model/FalconDataTypes.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.model; - -/** - * Falcon Data Types for model and bridge. - */ -public enum FalconDataTypes { - // Classes - FALCON_CLUSTER, - FALCON_FEED_CREATION, - FALCON_FEED, - FALCON_FEED_REPLICATION, - FALCON_PROCESS; - - public String getName() { - return name().toLowerCase(); - } - -} diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/publisher/FalconEventPublisher.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/publisher/FalconEventPublisher.java deleted file mode 100644 index a01ec14beb..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/publisher/FalconEventPublisher.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.publisher; - - -import org.apache.atlas.falcon.event.FalconEvent; - -/** - * Falcon publisher for Atlas - */ -public interface FalconEventPublisher { - class Data { - private FalconEvent event; - - public Data(FalconEvent event) { - this.event = event; - } - - public FalconEvent getEvent() { - return event; - } - } - - void publish(final Data data); -} diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/service/AtlasService.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/service/AtlasService.java deleted file mode 100644 index 7482ba7b82..0000000000 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/service/AtlasService.java +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.service; - -import org.apache.atlas.falcon.Util.EventUtil; -import org.apache.atlas.falcon.event.FalconEvent; -import org.apache.atlas.falcon.hook.FalconHook; -import org.apache.atlas.falcon.publisher.FalconEventPublisher; -import org.apache.falcon.FalconException; -import org.apache.falcon.entity.v0.Entity; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.service.ConfigurationChangeListener; -import org.apache.falcon.service.FalconService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * Atlas service to publish Falcon events - */ -public class AtlasService implements FalconService, ConfigurationChangeListener { - - private static final Logger LOG = LoggerFactory.getLogger(AtlasService.class); - private FalconEventPublisher publisher; - - /** - * Constant for the service name. - */ - public static final String SERVICE_NAME = AtlasService.class.getSimpleName(); - - @Override - public String getName() { - return SERVICE_NAME; - } - - @Override - public void init() throws FalconException { - publisher = new FalconHook(); - } - - @Override - public void destroy() throws FalconException { - } - - @Override - public void onAdd(Entity entity) throws FalconException { - try { - EntityType entityType = entity.getEntityType(); - switch (entityType) { - case CLUSTER: - addEntity(entity, FalconEvent.OPERATION.ADD_CLUSTER); - break; - - case PROCESS: - addEntity(entity, FalconEvent.OPERATION.ADD_PROCESS); - break; - - case FEED: - addEntity(entity, FalconEvent.OPERATION.ADD_FEED); - break; - - default: - LOG.debug("Entity type not processed {}", entityType); - } - } catch(Throwable t) { - LOG.warn("Error handling entity {}", entity, t); - } - } - - @Override - public void onRemove(Entity entity) throws FalconException { - } - - @Override - public void onChange(Entity oldEntity, Entity newEntity) throws FalconException { - /** - * Skipping update for now - update uses full update currently and this might result in all attributes wiped for hive entities - EntityType entityType = newEntity.getEntityType(); - switch (entityType) { - case CLUSTER: - addEntity(newEntity, FalconEvent.OPERATION.UPDATE_CLUSTER); - break; - - case PROCESS: - addEntity(newEntity, FalconEvent.OPERATION.UPDATE_PROCESS); - break; - - case FEED: - FalconEvent.OPERATION operation = isReplicationFeed((Feed) newEntity) ? - FalconEvent.OPERATION.UPDATE_REPLICATION_FEED : - FalconEvent.OPERATION.UPDATE_FEED; - addEntity(newEntity, operation); - break; - - default: - LOG.debug("Entity type not processed {}", entityType); - } - **/ - } - - @Override - public void onReload(Entity entity) throws FalconException { - //Since there is no import script that can import existing falcon entities to atlas, adding on falcon service start - onAdd(entity); - } - - private void addEntity(Entity entity, FalconEvent.OPERATION operation) throws FalconException { - LOG.info("Adding {} entity to Atlas: {}", entity.getEntityType().name(), entity.getName()); - - try { - FalconEvent event = - new FalconEvent(EventUtil.getUser(), operation, entity); - FalconEventPublisher.Data data = new FalconEventPublisher.Data(event); - publisher.publish(data); - } catch (Exception ex) { - throw new FalconException("Unable to publish data to publisher " + ex.getMessage(), ex); - } - } -} diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java deleted file mode 100644 index 24f36168c9..0000000000 --- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java +++ /dev/null @@ -1,351 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.falcon.hook; - -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.falcon.bridge.FalconBridge; -import org.apache.atlas.falcon.model.FalconDataTypes; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.atlas.falcon.service.AtlasService; -import org.apache.falcon.entity.FeedHelper; -import org.apache.falcon.entity.FileSystemStorage; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.Entity; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.cluster.Cluster; -import org.apache.falcon.entity.v0.feed.Feed; -import org.apache.falcon.entity.v0.feed.Location; -import org.apache.falcon.entity.v0.feed.LocationType; -import org.apache.falcon.entity.v0.process.Process; -import org.apache.falcon.security.CurrentUser; -import org.slf4j.Logger; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.xml.bind.JAXBException; -import java.util.List; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -public class FalconHookIT { - public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(FalconHookIT.class); - - public static final String CLUSTER_RESOURCE = "/cluster.xml"; - public static final String FEED_RESOURCE = "/feed.xml"; - public static final String FEED_HDFS_RESOURCE = "/feed-hdfs.xml"; - public static final String FEED_REPLICATION_RESOURCE = "/feed-replication.xml"; - public static final String PROCESS_RESOURCE = "/process.xml"; - - private AtlasClient atlasClient; - - private static final ConfigurationStore STORE = ConfigurationStore.get(); - - @BeforeClass - public void setUp() throws Exception { - Configuration atlasProperties = ApplicationProperties.get(); - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClient = new AtlasClient(atlasProperties.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT), new String[]{"admin", "admin"}); - } else { - atlasClient = new AtlasClient(atlasProperties.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT)); - } - - AtlasService service = new AtlasService(); - service.init(); - STORE.registerListener(service); - CurrentUser.authenticate(System.getProperty("user.name")); - } - - private boolean isDataModelAlreadyRegistered() throws Exception { - try { - atlasClient.getType(FalconDataTypes.FALCON_PROCESS.getName()); - LOG.info("Hive data model is already registered!"); - return true; - } catch(AtlasServiceException ase) { - if (ase.getStatus() == ClientResponse.Status.NOT_FOUND) { - return false; - } - throw ase; - } - } - - private T loadEntity(EntityType type, String resource, String name) throws JAXBException { - Entity entity = (Entity) type.getUnmarshaller().unmarshal(this.getClass().getResourceAsStream(resource)); - switch (entity.getEntityType()) { - case CLUSTER: - ((Cluster) entity).setName(name); - break; - - case FEED: - ((Feed) entity).setName(name); - break; - - case PROCESS: - ((Process) entity).setName(name); - break; - } - return (T)entity; - } - - private String random() { - return RandomStringUtils.randomAlphanumeric(10); - } - - private String getTableUri(String dbName, String tableName) { - return String.format("catalog:%s:%s#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}", dbName, tableName); - } - - @Test - public void testCreateProcess() throws Exception { - Cluster cluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random()); - STORE.publish(EntityType.CLUSTER, cluster); - assertClusterIsRegistered(cluster); - - Feed infeed = getTableFeed(FEED_RESOURCE, cluster.getName(), null); - String infeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(infeed.getName(), cluster.getName())).getId()._getId(); - - Feed outfeed = getTableFeed(FEED_RESOURCE, cluster.getName()); - String outFeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(outfeed.getName(), cluster.getName())).getId()._getId(); - - Process process = loadEntity(EntityType.PROCESS, PROCESS_RESOURCE, "process" + random()); - process.getClusters().getClusters().get(0).setName(cluster.getName()); - process.getInputs().getInputs().get(0).setFeed(infeed.getName()); - process.getOutputs().getOutputs().get(0).setFeed(outfeed.getName()); - STORE.publish(EntityType.PROCESS, process); - - String pid = assertProcessIsRegistered(process, cluster.getName()); - Referenceable processEntity = atlasClient.getEntity(pid); - assertNotNull(processEntity); - assertEquals(processEntity.get(AtlasClient.NAME), process.getName()); - assertEquals(((List)processEntity.get("inputs")).get(0)._getId(), infeedId); - assertEquals(((List)processEntity.get("outputs")).get(0)._getId(), outFeedId); - } - - private String assertProcessIsRegistered(Process process, String clusterName) throws Exception { - return assertEntityIsRegistered(FalconDataTypes.FALCON_PROCESS.getName(), - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getProcessQualifiedName(process.getName(), clusterName)); - } - - private String assertClusterIsRegistered(Cluster cluster) throws Exception { - return assertEntityIsRegistered(FalconDataTypes.FALCON_CLUSTER.getName(), - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, cluster.getName()); - } - - private TypesUtil.Pair getHDFSFeed(String feedResource, String clusterName) throws Exception { - Feed feed = loadEntity(EntityType.FEED, feedResource, "feed" + random()); - org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0); - feedCluster.setName(clusterName); - STORE.publish(EntityType.FEED, feed); - String feedId = assertFeedIsRegistered(feed, clusterName); - assertFeedAttributes(feedId); - - String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_CREATION.getName(), - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(feed.getName(), clusterName)); - Referenceable processEntity = atlasClient.getEntity(processId); - assertEquals(((List)processEntity.get("outputs")).get(0).getId(), feedId); - - String inputId = ((List) processEntity.get("inputs")).get(0).getId(); - Referenceable pathEntity = atlasClient.getEntity(inputId); - assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH); - - List locations = FeedHelper.getLocations(feedCluster, feed); - Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA); - assertEquals(pathEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), - FalconBridge.normalize(dataLocation.getPath())); - - return TypesUtil.Pair.of(feedId, feed); - } - - private Feed getTableFeed(String feedResource, String clusterName) throws Exception { - return getTableFeed(feedResource, clusterName, null); - } - - private Feed getTableFeed(String feedResource, String clusterName, String secondClusterName) throws Exception { - Feed feed = loadEntity(EntityType.FEED, feedResource, "feed" + random()); - org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0); - feedCluster.setName(clusterName); - String dbName = "db" + random(); - String tableName = "table" + random(); - feedCluster.getTable().setUri(getTableUri(dbName, tableName)); - - String dbName2 = "db" + random(); - String tableName2 = "table" + random(); - - if (secondClusterName != null) { - org.apache.falcon.entity.v0.feed.Cluster feedCluster2 = feed.getClusters().getClusters().get(1); - feedCluster2.setName(secondClusterName); - feedCluster2.getTable().setUri(getTableUri(dbName2, tableName2)); - } - - STORE.publish(EntityType.FEED, feed); - String feedId = assertFeedIsRegistered(feed, clusterName); - assertFeedAttributes(feedId); - verifyFeedLineage(feed.getName(), clusterName, feedId, dbName, tableName); - - if (secondClusterName != null) { - String feedId2 = assertFeedIsRegistered(feed, secondClusterName); - assertFeedAttributes(feedId2); - verifyFeedLineage(feed.getName(), secondClusterName, feedId2, dbName2, tableName2); - } - return feed; - } - - private void assertFeedAttributes(String feedId) throws Exception { - Referenceable feedEntity = atlasClient.getEntity(feedId); - assertEquals(feedEntity.get(AtlasClient.OWNER), "testuser"); - assertEquals(feedEntity.get(FalconBridge.FREQUENCY), "hours(1)"); - assertEquals(feedEntity.get(AtlasClient.DESCRIPTION), "test input"); - } - - private void verifyFeedLineage(String feedName, String clusterName, String feedId, String dbName, String tableName) - throws Exception{ - //verify that lineage from hive table to falcon feed is created - String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_CREATION.getName(), - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(feedName, clusterName)); - Referenceable processEntity = atlasClient.getEntity(processId); - assertEquals(((List)processEntity.get("outputs")).get(0).getId(), feedId); - - String inputId = ((List) processEntity.get("inputs")).get(0).getId(); - Referenceable tableEntity = atlasClient.getEntity(inputId); - assertEquals(tableEntity.getTypeName(), HiveDataTypes.HIVE_TABLE.getName()); - assertEquals(tableEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), - HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName)); - - } - - private String assertFeedIsRegistered(Feed feed, String clusterName) throws Exception { - return assertEntityIsRegistered(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(feed.getName(), clusterName)); - } - - @Test - public void testReplicationFeed() throws Exception { - Cluster srcCluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random()); - STORE.publish(EntityType.CLUSTER, srcCluster); - assertClusterIsRegistered(srcCluster); - - Cluster targetCluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random()); - STORE.publish(EntityType.CLUSTER, targetCluster); - assertClusterIsRegistered(targetCluster); - - Feed feed = getTableFeed(FEED_REPLICATION_RESOURCE, srcCluster.getName(), targetCluster.getName()); - String inId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(feed.getName(), srcCluster.getName())).getId()._getId(); - String outId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(feed.getName(), targetCluster.getName())).getId()._getId(); - - - String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_REPLICATION.getName(), - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, feed.getName()); - Referenceable process = atlasClient.getEntity(processId); - assertEquals(((List)process.get("inputs")).get(0)._getId(), inId); - assertEquals(((List)process.get("outputs")).get(0)._getId(), outId); - } - - @Test - public void testCreateProcessWithHDFSFeed() throws Exception { - Cluster cluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random()); - STORE.publish(EntityType.CLUSTER, cluster); - - TypesUtil.Pair result = getHDFSFeed(FEED_HDFS_RESOURCE, cluster.getName()); - Feed infeed = result.right; - String infeedId = result.left; - - Feed outfeed = getTableFeed(FEED_RESOURCE, cluster.getName()); - String outfeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - FalconBridge.getFeedQualifiedName(outfeed.getName(), cluster.getName())).getId()._getId(); - - Process process = loadEntity(EntityType.PROCESS, PROCESS_RESOURCE, "process" + random()); - process.getClusters().getClusters().get(0).setName(cluster.getName()); - process.getInputs().getInputs().get(0).setFeed(infeed.getName()); - process.getOutputs().getOutputs().get(0).setFeed(outfeed.getName()); - STORE.publish(EntityType.PROCESS, process); - - String pid = assertProcessIsRegistered(process, cluster.getName()); - Referenceable processEntity = atlasClient.getEntity(pid); - assertEquals(processEntity.get(AtlasClient.NAME), process.getName()); - assertEquals(processEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), - FalconBridge.getProcessQualifiedName(process.getName(), cluster.getName())); - assertEquals(((List)processEntity.get("inputs")).get(0)._getId(), infeedId); - assertEquals(((List)processEntity.get("outputs")).get(0)._getId(), outfeedId); - } - - private String assertEntityIsRegistered(final String typeName, final String property, final String value) throws Exception { - waitFor(80000, new Predicate() { - @Override - public void evaluate() throws Exception { - Referenceable entity = atlasClient.getEntity(typeName, property, value); - assertNotNull(entity); - } - }); - Referenceable entity = atlasClient.getEntity(typeName, property, value); - return entity.getId()._getId(); - } - - public interface Predicate { - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - void evaluate() throws Exception; - } - - /** - * Wait for a condition, expressed via a {@link Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected void waitFor(int timeout, Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - predicate.evaluate(); - return; - } catch(Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - Thread.sleep(400); - } - } - } -} diff --git a/addons/falcon-bridge/src/test/resources/atlas-application.properties b/addons/falcon-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 3b12e5fb33..0000000000 --- a/addons/falcon-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,125 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase -atlas.graph.index.search.solr.wait-searcher=true - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/falcon-bridge/src/test/resources/atlas-log4j.xml b/addons/falcon-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 262a710f7a..0000000000 --- a/addons/falcon-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/falcon-bridge/src/test/resources/cluster.xml b/addons/falcon-bridge/src/test/resources/cluster.xml deleted file mode 100644 index b183847db3..0000000000 --- a/addons/falcon-bridge/src/test/resources/cluster.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/falcon-bridge/src/test/resources/feed-hdfs.xml b/addons/falcon-bridge/src/test/resources/feed-hdfs.xml deleted file mode 100644 index 435db07451..0000000000 --- a/addons/falcon-bridge/src/test/resources/feed-hdfs.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - online,bi - - hours(1) - UTC - - - - - - - - - - - - - - - - diff --git a/addons/falcon-bridge/src/test/resources/feed-replication.xml b/addons/falcon-bridge/src/test/resources/feed-replication.xml deleted file mode 100644 index dcd427b180..0000000000 --- a/addons/falcon-bridge/src/test/resources/feed-replication.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - online,bi - - hours(1) - UTC - - - - - - - - - - - -
- - - -
- - - - diff --git a/addons/falcon-bridge/src/test/resources/feed.xml b/addons/falcon-bridge/src/test/resources/feed.xml deleted file mode 100644 index 473c745ce8..0000000000 --- a/addons/falcon-bridge/src/test/resources/feed.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - online,bi - - hours(1) - UTC - - - - - - -
- - - -
- - - - diff --git a/addons/falcon-bridge/src/test/resources/hive-site.xml b/addons/falcon-bridge/src/test/resources/hive-site.xml deleted file mode 100644 index f058c2edc2..0000000000 --- a/addons/falcon-bridge/src/test/resources/hive-site.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - - hive.exec.post.hooks - org.apache.atlas.hive.hook.HiveHook - - - - hive.support.concurrency - false - - - - hive.metastore.warehouse.dir - ${user.dir}/target/metastore - - - - javax.jdo.option.ConnectionURL - jdbc:derby:${user.dir}/target/metastore_db;create=true - - - - atlas.hook.hive.synchronous - true - - - - fs.pfile.impl - org.apache.hadoop.fs.ProxyLocalFileSystem - - \ No newline at end of file diff --git a/addons/falcon-bridge/src/test/resources/process.xml b/addons/falcon-bridge/src/test/resources/process.xml deleted file mode 100644 index b94d0a8470..0000000000 --- a/addons/falcon-bridge/src/test/resources/process.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - consumer=consumer@xyz.com, owner=producer@xyz.com, department=forecasting - - - - - - - - 1 - FIFO - days(1) - UTC - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/falcon-bridge/src/test/resources/startup.properties b/addons/falcon-bridge/src/test/resources/startup.properties deleted file mode 100644 index 9623470396..0000000000 --- a/addons/falcon-bridge/src/test/resources/startup.properties +++ /dev/null @@ -1,21 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -*.domain=debug -*.config.store.persist=false -*.config.store.uri=target/config_store \ No newline at end of file diff --git a/addons/falcon-bridge/src/test/resources/users-credentials.properties b/addons/falcon-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index da69923502..0000000000 --- a/addons/falcon-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256+salt-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/hbase-bridge-shim/pom.xml b/addons/hbase-bridge-shim/pom.xml deleted file mode 100644 index eb1b2e9493..0000000000 --- a/addons/hbase-bridge-shim/pom.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hbase-bridge-shim - Apache Atlas Hbase Bridge Shim Module - Apache Atlas Hbase Bridge Shim - jar - - - - - org.apache.atlas - atlas-plugin-classloader - - - org.apache.hbase - hbase-server - ${hbase.version} - provided - - - javax.servlet - servlet-api - - - com.github.stephenc.findbugs - findbugs-annotations - - - javax.ws.rs - * - - - - - diff --git a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java b/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java deleted file mode 100755 index 0b69104b17..0000000000 --- a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java +++ /dev/null @@ -1,277 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hbase.hook; - - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.RegionInfo; -import org.apache.hadoop.hbase.CoprocessorEnvironment; -import org.apache.hadoop.hbase.client.SnapshotDescription; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; -import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; -import org.apache.hadoop.hbase.coprocessor.MasterObserver; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.coprocessor.RegionObserver; -import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; - -import java.io.IOException; -import java.util.Optional; - - -public class HBaseAtlasCoprocessor implements MasterCoprocessor, MasterObserver, RegionObserver, RegionServerObserver { - public static final Log LOG = LogFactory.getLog(HBaseAtlasCoprocessor.class); - - private static final String ATLAS_PLUGIN_TYPE = "hbase"; - private static final String ATLAS_HBASE_HOOK_IMPL_CLASSNAME = "org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private Object impl = null; - private MasterObserver implMasterObserver = null; - private RegionObserver implRegionObserver = null; - private RegionServerObserver implRegionServerObserver = null; - private MasterCoprocessor implMasterCoprocessor = null; - - public HBaseAtlasCoprocessor() { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()"); - } - - this.init(); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()"); - } - } - - private void init(){ - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.init()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = Class.forName(ATLAS_HBASE_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - impl = cls.newInstance(); - implMasterObserver = (MasterObserver)impl; - implRegionObserver = (RegionObserver)impl; - implRegionServerObserver = (RegionServerObserver)impl; - implMasterCoprocessor = (MasterCoprocessor)impl; - - } catch (Exception e) { - // check what need to be done - LOG.error("Error Enabling RangerHbasePlugin", e); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.init()"); - } - } - - @Override - public Optional getMasterObserver() { - return Optional.of(this); - } - - @Override - public void start(CoprocessorEnvironment env) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.start()"); - } - - try { - activatePluginClassLoader(); - if (env instanceof MasterCoprocessorEnvironment) { - implMasterCoprocessor.start(env); - } - } finally { - deactivatePluginClassLoader(); - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.start()"); - } - } - - @Override - public void postCreateTable(ObserverContext ctx, TableDescriptor desc, RegionInfo[] regions) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postCreateTable()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postCreateTable(ctx, desc, regions); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()"); - } - } - - @Override - public void postModifyTable(ObserverContext ctx, TableName tableName, TableDescriptor htd) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postModifyTable(ctx, tableName, htd); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()"); - } - } - - @Override - public void postDeleteTable(ObserverContext ctx, TableName tableName) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postDeleteTable(ctx, tableName); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()"); - } - } - - @Override - public void postCreateNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.preCreateNamespace()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postCreateNamespace(ctx, ns); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.preCreateNamespace()"); - } - } - - @Override - public void postDeleteNamespace(ObserverContext ctx, String ns) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.preDeleteNamespace()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postDeleteNamespace(ctx, ns); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.preDeleteNamespace()"); - } - } - @Override - public void postModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.preModifyNamespace()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.preModifyNamespace(ctx, ns); - } finally { - deactivatePluginClassLoader(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.preModifyNamespace()"); - } - } - - @Override - public void postCloneSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postCloneSnapshot(observerContext,snapshot,tableDescriptor); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()"); - } - } - - @Override - public void postRestoreSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()"); - } - - try { - activatePluginClassLoader(); - implMasterObserver.postRestoreSnapshot(observerContext,snapshot,tableDescriptor); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()"); - } - } - - private void activatePluginClassLoader() { - if(atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if(atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } - -} diff --git a/addons/hbase-bridge/pom.xml b/addons/hbase-bridge/pom.xml deleted file mode 100644 index fe5a711816..0000000000 --- a/addons/hbase-bridge/pom.xml +++ /dev/null @@ -1,562 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hbase-bridge - Apache Atlas Hbase Bridge Module - Apache Atlas Hbase Bridge - jar - - - 3.0.3 - 9.3.14.v20161028 - - - - - - org.apache.hbase - hbase-server - ${hbase.version} - provided - - - javax.servlet - servlet-api - - - org.mortbay.jetty - servlet-api-2.5 - - - - - - org.apache.atlas - atlas-notification - - - - - com.sun.jersey - jersey-bundle - 1.19 - test - - - - org.apache.atlas - atlas-webapp - war - test - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - - - javax.servlet - servlet-api - - - - - org.apache.hadoop - hadoop-hdfs-client - ${hadoop.version} - - - - org.apache.hadoop - hadoop-annotations - - - - org.apache.hadoop - hadoop-minicluster - ${hadoop.version} - - - javax.servlet - servlet-api - - - - - - org.testng - testng - - - - org.mockito - mockito-all - - - - org.apache.httpcomponents - httpcore - ${httpcomponents-httpcore.version} - - - - org.eclipse.jetty - jetty-webapp - ${jetty.version} - compile - - - - org.eclipse.jetty - jetty-server - test - - - - org.apache.hbase - hbase-server - ${hbase.version} - test-jar - test - - - org.mortbay.jetty - servlet-api-2.5 - - - org.eclipse.jetty - jetty-server - - - - - - junit - junit - test - 4.12 - - - - org.apache.hbase - hbase-client - ${hbase.version} - - - org.apache.hbase - hbase-common - ${hbase.version} - - - com.github.stephenc.findbugs - findbugs-annotations - - - - - org.apache.hbase - hbase-hadoop2-compat - ${hbase.version} - test-jar - test - - - org.apache.hbase - hbase-hadoop-compat - ${hbase.version} - test-jar - test - - - com.google.guava - guava - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - compile - - - javax.servlet - servlet-api - - - - - org.apache.atlas - atlas-client-v2 - ${project.version} - - - org.apache.hbase - hbase-zookeeper - test-jar - test - ${hbase.version} - - - org.apache.hbase - hbase-common - test-jar - ${hbase.version} - test - - - - - org.apache.hbase - hbase-testing-util - ${hbase.version} - - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/hbase/atlas-hbase-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey.contribs - jersey-multipart - ${jersey.version} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - commons-configuration - commons-configuration - ${commons-conf.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/hbase - false - false - true - - - ${project.groupId} - hbase-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - true - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/** - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - - diff --git a/addons/hbase-bridge/src/bin/import-hbase.sh b/addons/hbase-bridge/src/bin/import-hbase.sh deleted file mode 100644 index a343036faf..0000000000 --- a/addons/hbase-bridge/src/bin/import-hbase.sh +++ /dev/null @@ -1,162 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. See accompanying LICENSE file. -# - -# resolve links - $0 may be a softlink -PRG="${0}" - -[[ `uname -s` == *"CYGWIN"* ]] && CYGWIN=true - -while [ -h "${PRG}" ]; do - ls=`ls -ld "${PRG}"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "${PRG}"`/"$link" - fi -done - -echo ">>>>> $PRG" - -BASEDIR=`dirname ${PRG}` -BASEDIR=`cd ${BASEDIR}/..;pwd` - -echo ">>>>> $BASEDIR" - -if test -z "${JAVA_HOME}" -then - JAVA_BIN=`which java` - JAR_BIN=`which jar` -else - JAVA_BIN="${JAVA_HOME}/bin/java" - JAR_BIN="${JAVA_HOME}/bin/jar" -fi -export JAVA_BIN - -if [ ! -e "${JAVA_BIN}" ] || [ ! -e "${JAR_BIN}" ]; then - echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available." - exit 1 -fi - -# Construct Atlas classpath using jars from hook/hbase/atlas-hbase-plugin-impl/ directory. -for i in "${BASEDIR}/hook/hbase/atlas-hbase-plugin-impl/"*.jar; do - ATLASCPPATH="${ATLASCPPATH}:$i" -done - -if [ -z "${ATLAS_CONF_DIR}" ] && [ -e /etc/atlas/conf ];then - ATLAS_CONF_DIR=/etc/atlas/conf -fi -ATLASCPPATH=${ATLASCPPATH}:${ATLAS_CONF_DIR} - -# log dir for applications -ATLAS_LOG_DIR="${ATLAS_LOG_DIR:-/var/log/atlas}" -export ATLAS_LOG_DIR -LOGFILE="$ATLAS_LOG_DIR/import-hbase.log" - -TIME=`date +%Y%m%d%H%M%s` - -#Add HBase conf in classpath -if [ ! -z "$HBASE_CONF_DIR" ]; then - HBASE_CONF=$HBASE_CONF_DIR -elif [ ! -z "$HBASE_HOME" ]; then - HBASE_CONF="$HBASE_HOME/conf" -elif [ -e /etc/hbase/conf ]; then - HBASE_CONF="/etc/hbase/conf" -else - echo "Could not find a valid HBASE configuration" - exit 1 -fi - -echo Using HBase configuration directory "[$HBASE_CONF]" - - -if [ -f "${HBASE_CONF}/hbase-env.sh" ]; then - . "${HBASE_CONF}/hbase-env.sh" -fi - -if [ -z "$HBASE_HOME" ]; then - if [ -d "${BASEDIR}/../hbase" ]; then - HBASE_HOME=${BASEDIR}/../hbase - else - echo "Please set HBASE_HOME to the root of HBase installation" - exit 1 - fi -fi - -HBASE_CP="${HBASE_CONF}" - -for i in "${HBASE_HOME}/lib/"*.jar; do - HBASE_CP="${HBASE_CP}:$i" -done - -#Add hadoop conf in classpath -if [ ! -z "$HADOOP_CLASSPATH" ]; then - HADOOP_CP=$HADOOP_CLASSPATH -elif [ ! -z "$HADOOP_HOME" ]; then - HADOOP_CP=`$HADOOP_HOME/bin/hadoop classpath` -elif [ $(command -v hadoop) ]; then - HADOOP_CP=`hadoop classpath` - echo $HADOOP_CP -else - echo "Environment variable HADOOP_CLASSPATH or HADOOP_HOME need to be set" - exit 1 -fi - -CP="${HBASE_CP}:${HADOOP_CP}:${ATLASCPPATH}" - -# If running in cygwin, convert pathnames and classpath to Windows format. -if [ "${CYGWIN}" == "true" ] -then - ATLAS_LOG_DIR=`cygpath -w ${ATLAS_LOG_DIR}` - LOGFILE=`cygpath -w ${LOGFILE}` - HBASE_CP=`cygpath -w ${HBASE_CP}` - HADOOP_CP=`cygpath -w ${HADOOP_CP}` - CP=`cygpath -w -p ${CP}` -fi - -JAVA_PROPERTIES="$ATLAS_OPTS -Datlas.log.dir=$ATLAS_LOG_DIR -Datlas.log.file=import-hbase.log --Dlog4j.configuration=atlas-hbase-import-log4j.xml" - -IMPORT_ARGS= -JVM_ARGS= - -while true -do - option=$1 - shift - - case "$option" in - -n) IMPORT_ARGS="$IMPORT_ARGS -n $1"; shift;; - -t) IMPORT_ARGS="$IMPORT_ARGS -t $1"; shift;; - -f) IMPORT_ARGS="$IMPORT_ARGS -f $1"; shift;; - --namespace) IMPORT_ARGS="$IMPORT_ARGS --namespace $1"; shift;; - --table) IMPORT_ARGS="$IMPORT_ARGS --table $1"; shift;; - --filename) IMPORT_ARGS="$IMPORT_ARGS --filename $1"; shift;; - "") break;; - *) JVM_ARGS="$JVM_ARGS $option" - esac -done - -JAVA_PROPERTIES="${JAVA_PROPERTIES} ${JVM_ARGS}" - -echo "Log file for import is $LOGFILE" - -"${JAVA_BIN}" ${JAVA_PROPERTIES} -cp "${CP}" org.apache.atlas.hbase.bridge.HBaseBridge $IMPORT_ARGS - -RETVAL=$? -[ $RETVAL -eq 0 ] && echo HBase Data Model imported successfully!!! -[ $RETVAL -ne 0 ] && echo Failed to import HBase Data Model!!! - -exit $RETVAL diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java deleted file mode 100644 index 8e6c57dba3..0000000000 --- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java +++ /dev/null @@ -1,678 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase.bridge; - -import org.apache.atlas.AtlasConstants; -import org.apache.atlas.hbase.model.HBaseOperationContext; -import org.apache.atlas.hbase.model.HBaseDataTypes; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV2; -import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.repository.Constants.HBASE_SOURCE; - -// This will register Hbase entities into Atlas -public class HBaseAtlasHook extends AtlasHook { - private static final Logger LOG = LoggerFactory.getLogger(HBaseAtlasHook.class); - - - public static final String ATTR_DESCRIPTION = "description"; - public static final String ATTR_ATLAS_ENDPOINT = "atlas.rest.address"; - public static final String ATTR_PARAMETERS = "parameters"; - public static final String ATTR_URI = "uri"; - public static final String ATTR_NAMESPACE = "namespace"; - public static final String ATTR_TABLE = "table"; - public static final String ATTR_COLUMNFAMILIES = "column_families"; - public static final String ATTR_CREATE_TIME = "createTime"; - public static final String ATTR_MODIFIED_TIME = "modifiedTime"; - public static final String ATTR_OWNER = "owner"; - public static final String ATTR_NAME = "name"; - - // column addition metadata - public static final String ATTR_TABLE_MAX_FILESIZE = "maxFileSize"; - public static final String ATTR_TABLE_ISREADONLY = "isReadOnly"; - public static final String ATTR_TABLE_ISCOMPACTION_ENABLED = "isCompactionEnabled"; - public static final String ATTR_TABLE_ISNORMALIZATION_ENABLED = "isNormalizationEnabled"; - public static final String ATTR_TABLE_REPLICATION_PER_REGION = "replicasPerRegion"; - public static final String ATTR_TABLE_DURABLILITY = "durability"; - public static final String ATTR_TABLE_NORMALIZATION_ENABLED = "isNormalizationEnabled"; - - // column family additional metadata - public static final String ATTR_CF_BLOOMFILTER_TYPE = "bloomFilterType"; - public static final String ATTR_CF_COMPRESSION_TYPE = "compressionType"; - public static final String ATTR_CF_COMPACTION_COMPRESSION_TYPE = "compactionCompressionType"; - public static final String ATTR_CF_ENCRYPTION_TYPE = "encryptionType"; - public static final String ATTR_CF_INMEMORY_COMPACTION_POLICY = "inMemoryCompactionPolicy"; - public static final String ATTR_CF_KEEP_DELETE_CELLS = "keepDeletedCells"; - public static final String ATTR_CF_MAX_VERSIONS = "maxVersions"; - public static final String ATTR_CF_MIN_VERSIONS = "minVersions"; - public static final String ATTR_CF_DATA_BLOCK_ENCODING = "dataBlockEncoding"; - public static final String ATTR_CF_STORAGE_POLICY = "StoragePolicy"; - public static final String ATTR_CF_TTL = "ttl"; - public static final String ATTR_CF_BLOCK_CACHE_ENABLED = "blockCacheEnabled"; - public static final String ATTR_CF_CACHED_BLOOM_ON_WRITE = "cacheBloomsOnWrite"; - public static final String ATTR_CF_CACHED_DATA_ON_WRITE = "cacheDataOnWrite"; - public static final String ATTR_CF_CACHED_INDEXES_ON_WRITE = "cacheIndexesOnWrite"; - public static final String ATTR_CF_EVICT_BLOCK_ONCLOSE = "evictBlocksOnClose"; - public static final String ATTR_CF_PREFETCH_BLOCK_ONOPEN = "prefetchBlocksOnOpen"; - public static final String ATTR_CF_NEW_VERSION_BEHAVIOR = "newVersionBehavior"; - public static final String ATTR_CF_MOB_ENABLED = "isMobEnabled"; - public static final String ATTR_CF_MOB_COMPATCTPARTITION_POLICY = "mobCompactPartitionPolicy"; - - public static final String HBASE_NAMESPACE_QUALIFIED_NAME = "%s@%s"; - public static final String HBASE_TABLE_QUALIFIED_NAME_FORMAT = "%s:%s@%s"; - public static final String HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT = "%s:%s.%s@%s"; - - private static final String REFERENCEABLE_ATTRIBUTE_NAME = "qualifiedName"; - - public static final String RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES = "hbase_table_column_families"; - public static final String RELATIONSHIP_HBASE_TABLE_NAMESPACE = "hbase_table_namespace"; - - private static volatile HBaseAtlasHook me; - - public enum OPERATION { - CREATE_NAMESPACE("create_namespace"), - ALTER_NAMESPACE("alter_namespace"), - DELETE_NAMESPACE("delete_namespace"), - CREATE_TABLE("create_table"), - ALTER_TABLE("alter_table"), - DELETE_TABLE("delete_table"), - CREATE_COLUMN_FAMILY("create_column_Family"), - ALTER_COLUMN_FAMILY("alter_column_Family"), - DELETE_COLUMN_FAMILY("delete_column_Family"); - - private final String name; - - OPERATION(String s) { - name = s; - } - - public String getName() { - return name; - } - } - - public static HBaseAtlasHook getInstance() { - HBaseAtlasHook ret = me; - - if (ret == null) { - try { - synchronized (HBaseAtlasHook.class) { - ret = me; - - if (ret == null) { - me = ret = new HBaseAtlasHook(); - } - } - } catch (Exception e) { - LOG.error("Caught exception instantiating the Atlas HBase hook.", e); - } - } - - return ret; - } - - public HBaseAtlasHook() { - } - - public void createAtlasInstances(HBaseOperationContext hbaseOperationContext) { - OPERATION operation = hbaseOperationContext.getOperation(); - - LOG.info("HBaseAtlasHook(operation={})", operation); - - switch (operation) { - case CREATE_NAMESPACE: - case ALTER_NAMESPACE: - createOrUpdateNamespaceInstance(hbaseOperationContext); - break; - case DELETE_NAMESPACE: - deleteNameSpaceInstance(hbaseOperationContext); - break; - case CREATE_TABLE: - case ALTER_TABLE: - createOrUpdateTableInstance(hbaseOperationContext); - break; - case DELETE_TABLE: - deleteTableInstance(hbaseOperationContext); - break; - case CREATE_COLUMN_FAMILY: - case ALTER_COLUMN_FAMILY: - createOrUpdateColumnFamilyInstance(hbaseOperationContext); - break; - case DELETE_COLUMN_FAMILY: - deleteColumnFamilyInstance(hbaseOperationContext); - break; - } - } - - private void createOrUpdateNamespaceInstance(HBaseOperationContext hbaseOperationContext) { - AtlasEntity nameSpace = buildNameSpace(hbaseOperationContext); - - switch (hbaseOperationContext.getOperation()) { - case CREATE_NAMESPACE: - LOG.info("Create NameSpace {}", nameSpace.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityCreateRequestV2(hbaseOperationContext.getUser(), new AtlasEntitiesWithExtInfo(nameSpace))); - break; - - case ALTER_NAMESPACE: - LOG.info("Modify NameSpace {}", nameSpace.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityUpdateRequestV2(hbaseOperationContext.getUser(), new AtlasEntitiesWithExtInfo(nameSpace))); - break; - } - } - - private void deleteNameSpaceInstance(HBaseOperationContext hbaseOperationContext) { - String nameSpaceQName = getNameSpaceQualifiedName(getMetadataNamespace(), hbaseOperationContext.getNameSpace()); - AtlasObjectId nameSpaceId = new AtlasObjectId(HBaseDataTypes.HBASE_NAMESPACE.getName(), REFERENCEABLE_ATTRIBUTE_NAME, nameSpaceQName); - - LOG.info("Delete NameSpace {}", nameSpaceQName); - - hbaseOperationContext.addMessage(new EntityDeleteRequestV2(hbaseOperationContext.getUser(), Collections.singletonList(nameSpaceId))); - } - - private void createOrUpdateTableInstance(HBaseOperationContext hbaseOperationContext) { - AtlasEntity nameSpace = buildNameSpace(hbaseOperationContext); - AtlasEntity table = buildTable(hbaseOperationContext, nameSpace); - List columnFamilies = buildColumnFamilies(hbaseOperationContext, nameSpace, table); - - table.setRelationshipAttribute(ATTR_COLUMNFAMILIES, AtlasTypeUtil.getAtlasRelatedObjectIds(columnFamilies, RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES)); - - AtlasEntitiesWithExtInfo entities = new AtlasEntitiesWithExtInfo(table); - - entities.addReferredEntity(nameSpace); - - if (CollectionUtils.isNotEmpty(columnFamilies)) { - for (AtlasEntity columnFamily : columnFamilies) { - entities.addReferredEntity(columnFamily); - } - } - - switch (hbaseOperationContext.getOperation()) { - case CREATE_TABLE: - LOG.info("Create Table {}", table.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityCreateRequestV2(hbaseOperationContext.getUser(), entities)); - break; - - case ALTER_TABLE: - LOG.info("Modify Table {}", table.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityUpdateRequestV2(hbaseOperationContext.getUser(), entities)); - break; - } - } - - private void deleteTableInstance(HBaseOperationContext hbaseOperationContext) { - TableName tableName = hbaseOperationContext.getTableName(); - String nameSpaceName = tableName.getNamespaceAsString(); - - if (nameSpaceName == null) { - nameSpaceName = tableName.getNameWithNamespaceInclAsString(); - } - - String tableNameStr = tableName.getNameAsString(); - String tableQName = getTableQualifiedName(getMetadataNamespace(), nameSpaceName, tableNameStr); - AtlasObjectId tableId = new AtlasObjectId(HBaseDataTypes.HBASE_TABLE.getName(), REFERENCEABLE_ATTRIBUTE_NAME, tableQName); - - LOG.info("Delete Table {}", tableQName); - - hbaseOperationContext.addMessage(new EntityDeleteRequestV2(hbaseOperationContext.getUser(), Collections.singletonList(tableId))); - } - - private void createOrUpdateColumnFamilyInstance(HBaseOperationContext hbaseOperationContext) { - AtlasEntity nameSpace = buildNameSpace(hbaseOperationContext); - AtlasEntity table = buildTable(hbaseOperationContext, nameSpace); - AtlasEntity columnFamily = buildColumnFamily(hbaseOperationContext, hbaseOperationContext.gethColumnDescriptor(), nameSpace, table); - - AtlasEntitiesWithExtInfo entities = new AtlasEntitiesWithExtInfo(columnFamily); - - entities.addReferredEntity(nameSpace); - entities.addReferredEntity(table); - - switch (hbaseOperationContext.getOperation()) { - case CREATE_COLUMN_FAMILY: - LOG.info("Create ColumnFamily {}", columnFamily.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityCreateRequestV2(hbaseOperationContext.getUser(), entities)); - break; - - case ALTER_COLUMN_FAMILY: - LOG.info("Alter ColumnFamily {}", columnFamily.getAttribute(REFERENCEABLE_ATTRIBUTE_NAME)); - - hbaseOperationContext.addMessage(new EntityUpdateRequestV2(hbaseOperationContext.getUser(), entities)); - break; - } - } - - private void deleteColumnFamilyInstance(HBaseOperationContext hbaseOperationContext) { - TableName tableName = hbaseOperationContext.getTableName(); - String nameSpaceName = tableName.getNamespaceAsString(); - - if (nameSpaceName == null) { - nameSpaceName = tableName.getNameWithNamespaceInclAsString(); - } - - String tableNameStr = tableName.getNameAsString(); - String columnFamilyName = hbaseOperationContext.getColummFamily(); - String columnFamilyQName = getColumnFamilyQualifiedName(getMetadataNamespace(), nameSpaceName, tableNameStr, columnFamilyName); - AtlasObjectId columnFamilyId = new AtlasObjectId(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(), REFERENCEABLE_ATTRIBUTE_NAME, columnFamilyQName); - - LOG.info("Delete ColumnFamily {}", columnFamilyQName); - - hbaseOperationContext.addMessage(new EntityDeleteRequestV2(hbaseOperationContext.getUser(), Collections.singletonList(columnFamilyId))); - } - - - /** - * Construct the qualified name used to uniquely identify a ColumnFamily instance in Atlas. - * - * @param metadataNamespace Metadata namespace of the cluster to which the HBase component belongs - * @param nameSpace Name of the HBase database to which the Table belongs - * @param tableName Name of the HBase table - * @param columnFamily Name of the ColumnFamily - * @return Unique qualified name to identify the Table instance in Atlas. - */ - public static String getColumnFamilyQualifiedName(String metadataNamespace, String nameSpace, String tableName, String columnFamily) { - if (metadataNamespace == null || nameSpace == null || tableName == null || columnFamily == null) { - return null; - } else { - return String.format(HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT, nameSpace, stripNameSpace(tableName), columnFamily, metadataNamespace); - } - } - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * - * @param metadataNamespace Metadata namespace of the cluster to which the HBase component belongs - * @param nameSpace Name of the HBase database to which the Table belongs - * @param tableName Name of the HBase table - * @return Unique qualified name to identify the Table instance in Atlas. - */ - public static String getTableQualifiedName(String metadataNamespace, String nameSpace, String tableName) { - if (metadataNamespace == null || nameSpace == null || tableName == null) { - return null; - } else { - return String.format(HBASE_TABLE_QUALIFIED_NAME_FORMAT, nameSpace, stripNameSpace(tableName), metadataNamespace); - } - } - - /** - * Construct the qualified name used to uniquely identify a HBase NameSpace instance in Atlas. - * - * @param metadataNamespace Metadata namespace of the cluster to which the HBase component belongs - * @param nameSpace - * @return Unique qualified name to identify the HBase NameSpace instance in Atlas. - */ - public static String getNameSpaceQualifiedName(String metadataNamespace, String nameSpace) { - if (metadataNamespace == null || nameSpace == null) { - return null; - } else { - return String.format(HBASE_NAMESPACE_QUALIFIED_NAME, nameSpace, metadataNamespace); - } - } - - private static String stripNameSpace(String tableName) { - return tableName.substring(tableName.indexOf(":") + 1); - } - - private AtlasEntity buildNameSpace(HBaseOperationContext hbaseOperationContext) { - AtlasEntity nameSpace = new AtlasEntity(HBaseDataTypes.HBASE_NAMESPACE.getName()); - NamespaceDescriptor nameSpaceDesc = hbaseOperationContext.getNamespaceDescriptor(); - String nameSpaceName = nameSpaceDesc == null ? null : hbaseOperationContext.getNamespaceDescriptor().getName(); - - if (nameSpaceName == null) { - nameSpaceName = hbaseOperationContext.getNameSpace(); - } - - Date now = new Date(System.currentTimeMillis()); - - nameSpace.setAttribute(ATTR_NAME, nameSpaceName); - nameSpace.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, getNameSpaceQualifiedName(getMetadataNamespace(), nameSpaceName)); - nameSpace.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getMetadataNamespace()); - nameSpace.setAttribute(ATTR_DESCRIPTION, nameSpaceName); - nameSpace.setAttribute(ATTR_PARAMETERS, hbaseOperationContext.getHbaseConf()); - nameSpace.setAttribute(ATTR_OWNER, hbaseOperationContext.getOwner()); - nameSpace.setAttribute(ATTR_MODIFIED_TIME, now); - - if (OPERATION.CREATE_NAMESPACE.equals(hbaseOperationContext.getOperation())) { - nameSpace.setAttribute(ATTR_CREATE_TIME, now); - } - - return nameSpace; - } - - private AtlasEntity buildTable(HBaseOperationContext hbaseOperationContext, AtlasEntity nameSpace) { - AtlasEntity table = new AtlasEntity(HBaseDataTypes.HBASE_TABLE.getName()); - String tableName = getTableName(hbaseOperationContext); - String nameSpaceName = (String) nameSpace.getAttribute(ATTR_NAME); - String tableQName = getTableQualifiedName(getMetadataNamespace(), nameSpaceName, tableName); - OPERATION operation = hbaseOperationContext.getOperation(); - Date now = new Date(System.currentTimeMillis()); - - table.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, tableQName); - table.setAttribute(ATTR_NAME, tableName); - table.setAttribute(ATTR_URI, tableName); - table.setAttribute(ATTR_OWNER, hbaseOperationContext.getOwner()); - table.setAttribute(ATTR_DESCRIPTION, tableName); - table.setAttribute(ATTR_PARAMETERS, hbaseOperationContext.getHbaseConf()); - table.setRelationshipAttribute(ATTR_NAMESPACE, AtlasTypeUtil.getAtlasRelatedObjectId(nameSpace, RELATIONSHIP_HBASE_TABLE_NAMESPACE)); - - TableDescriptor tableDescriptor = hbaseOperationContext.gethTableDescriptor(); - if (tableDescriptor != null) { - table.setAttribute(ATTR_TABLE_MAX_FILESIZE, tableDescriptor.getMaxFileSize()); - table.setAttribute(ATTR_TABLE_REPLICATION_PER_REGION, tableDescriptor.getRegionReplication()); - table.setAttribute(ATTR_TABLE_ISREADONLY, tableDescriptor.isReadOnly()); - table.setAttribute(ATTR_TABLE_ISNORMALIZATION_ENABLED, tableDescriptor.isNormalizationEnabled()); - table.setAttribute(ATTR_TABLE_ISCOMPACTION_ENABLED, tableDescriptor.isCompactionEnabled()); - table.setAttribute(ATTR_TABLE_DURABLILITY, (tableDescriptor.getDurability() != null ? tableDescriptor.getDurability().name() : null)); - table.setAttribute(ATTR_TABLE_NORMALIZATION_ENABLED, tableDescriptor.isNormalizationEnabled()); - } - - switch (operation) { - case CREATE_TABLE: - table.setAttribute(ATTR_CREATE_TIME, now); - table.setAttribute(ATTR_MODIFIED_TIME, now); - break; - case CREATE_COLUMN_FAMILY: - table.setAttribute(ATTR_MODIFIED_TIME, now); - break; - case ALTER_TABLE: - case ALTER_COLUMN_FAMILY: - table.setAttribute(ATTR_MODIFIED_TIME, now); - break; - default: - break; - } - - return table; - } - - private List buildColumnFamilies(HBaseOperationContext hbaseOperationContext, AtlasEntity nameSpace, AtlasEntity table) { - List columnFamilies = new ArrayList<>(); - ColumnFamilyDescriptor[] columnFamilyDescriptors = hbaseOperationContext.gethColumnDescriptors(); - - if (columnFamilyDescriptors != null) { - for (ColumnFamilyDescriptor columnFamilyDescriptor : columnFamilyDescriptors) { - AtlasEntity columnFamily = buildColumnFamily(hbaseOperationContext, columnFamilyDescriptor, nameSpace, table); - - columnFamilies.add(columnFamily); - } - } - - return columnFamilies; - } - - private AtlasEntity buildColumnFamily(HBaseOperationContext hbaseOperationContext, ColumnFamilyDescriptor columnFamilyDescriptor, AtlasEntity nameSpace, AtlasEntity table) { - AtlasEntity columnFamily = new AtlasEntity(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName()); - String columnFamilyName = columnFamilyDescriptor.getNameAsString(); - String tableName = (String) table.getAttribute(ATTR_NAME); - String nameSpaceName = (String) nameSpace.getAttribute(ATTR_NAME); - String columnFamilyQName = getColumnFamilyQualifiedName(getMetadataNamespace(), nameSpaceName, tableName, columnFamilyName); - Date now = new Date(System.currentTimeMillis()); - - columnFamily.setAttribute(ATTR_NAME, columnFamilyName); - columnFamily.setAttribute(ATTR_DESCRIPTION, columnFamilyName); - columnFamily.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, columnFamilyQName); - columnFamily.setAttribute(ATTR_OWNER, hbaseOperationContext.getOwner()); - columnFamily.setRelationshipAttribute(ATTR_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(table, RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES)); - - if (columnFamilyDescriptor!= null) { - columnFamily.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, columnFamilyDescriptor.isBlockCacheEnabled()); - columnFamily.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (columnFamilyDescriptor.getBloomFilterType() != null ? columnFamilyDescriptor.getBloomFilterType().name():null)); - columnFamily.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, columnFamilyDescriptor.isCacheBloomsOnWrite()); - columnFamily.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, columnFamilyDescriptor.isCacheDataOnWrite()); - columnFamily.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, columnFamilyDescriptor.isCacheIndexesOnWrite()); - columnFamily.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, (columnFamilyDescriptor.getCompactionCompressionType() != null ? columnFamilyDescriptor.getCompactionCompressionType().name():null)); - columnFamily.setAttribute(ATTR_CF_COMPRESSION_TYPE, (columnFamilyDescriptor.getCompressionType() != null ? columnFamilyDescriptor.getCompressionType().name():null)); - columnFamily.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, (columnFamilyDescriptor.getDataBlockEncoding() != null ? columnFamilyDescriptor.getDataBlockEncoding().name():null)); - columnFamily.setAttribute(ATTR_CF_ENCRYPTION_TYPE, columnFamilyDescriptor.getEncryptionType()); - columnFamily.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, columnFamilyDescriptor.isEvictBlocksOnClose()); - columnFamily.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, (columnFamilyDescriptor.getInMemoryCompaction() != null ? columnFamilyDescriptor.getInMemoryCompaction().name():null)); - columnFamily.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( columnFamilyDescriptor.getKeepDeletedCells() != null ? columnFamilyDescriptor.getKeepDeletedCells().name():null)); - columnFamily.setAttribute(ATTR_CF_MAX_VERSIONS, columnFamilyDescriptor.getMaxVersions()); - columnFamily.setAttribute(ATTR_CF_MIN_VERSIONS, columnFamilyDescriptor.getMinVersions()); - columnFamily.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR, columnFamilyDescriptor.isNewVersionBehavior()); - columnFamily.setAttribute(ATTR_CF_MOB_ENABLED, columnFamilyDescriptor.isMobEnabled()); - columnFamily.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( columnFamilyDescriptor.getMobCompactPartitionPolicy() != null ? columnFamilyDescriptor.getMobCompactPartitionPolicy().name():null)); - columnFamily.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, columnFamilyDescriptor.isPrefetchBlocksOnOpen()); - columnFamily.setAttribute(ATTR_CF_STORAGE_POLICY, columnFamilyDescriptor.getStoragePolicy()); - columnFamily.setAttribute(ATTR_CF_TTL, columnFamilyDescriptor.getTimeToLive()); - } - - switch (hbaseOperationContext.getOperation()) { - case CREATE_COLUMN_FAMILY: - case CREATE_TABLE: - columnFamily.setAttribute(ATTR_CREATE_TIME, now); - columnFamily.setAttribute(ATTR_MODIFIED_TIME, now); - break; - - case ALTER_COLUMN_FAMILY: - columnFamily.setAttribute(ATTR_MODIFIED_TIME, now); - break; - - default: - break; - } - - return columnFamily; - } - - public String getMessageSource() { - return HBASE_SOURCE; - } - - private String getTableName(HBaseOperationContext hbaseOperationContext) { - final String ret; - - TableName tableName = hbaseOperationContext.getTableName(); - - if (tableName != null) { - ret = tableName.getNameAsString(); - } else { - TableDescriptor tableDescriptor = hbaseOperationContext.gethTableDescriptor(); - - ret = (tableDescriptor != null) ? tableDescriptor.getTableName().getNameAsString() : null; - } - - return ret; - } - - public void sendHBaseNameSpaceOperation(final NamespaceDescriptor namespaceDescriptor, final String nameSpace, final OPERATION operation, ObserverContext ctx) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasHook.sendHBaseNameSpaceOperation()"); - } - - try { - final UserGroupInformation ugi = getUGI(ctx); - final User user = getActiveUser(ctx); - final String userName = (user != null) ? user.getShortName() : null; - HBaseOperationContext hbaseOperationContext = handleHBaseNameSpaceOperation(namespaceDescriptor, nameSpace, operation, ugi, userName); - - sendNotification(hbaseOperationContext); - } catch (Throwable t) { - LOG.error("HBaseAtlasHook.sendHBaseNameSpaceOperation(): failed to send notification", t); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasHook.sendHBaseNameSpaceOperation()"); - } - } - - public void sendHBaseTableOperation(TableDescriptor tableDescriptor, final TableName tableName, final OPERATION operation, ObserverContext ctx) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasHook.sendHBaseTableOperation()"); - } - - try { - final UserGroupInformation ugi = getUGI(ctx); - final User user = getActiveUser(ctx); - final String userName = (user != null) ? user.getShortName() : null; - HBaseOperationContext hbaseOperationContext = handleHBaseTableOperation(tableDescriptor, tableName, operation, ugi, userName); - - sendNotification(hbaseOperationContext); - } catch (Throwable t) { - LOG.error("<== HBaseAtlasHook.sendHBaseTableOperation(): failed to send notification", t); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasHook.sendHBaseTableOperation()"); - } - } - - private void sendNotification(HBaseOperationContext hbaseOperationContext) { - UserGroupInformation ugi = hbaseOperationContext.getUgi(); - - if (ugi != null && ugi.getRealUser() != null) { - ugi = ugi.getRealUser(); - } - - notifyEntities(hbaseOperationContext.getMessages(), ugi); - } - - private HBaseOperationContext handleHBaseNameSpaceOperation(NamespaceDescriptor namespaceDescriptor, String nameSpace, OPERATION operation, UserGroupInformation ugi, String userName) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasHook.handleHBaseNameSpaceOperation()"); - } - - HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(namespaceDescriptor, nameSpace, operation, ugi, userName, userName); - createAtlasInstances(hbaseOperationContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasHook.handleHBaseNameSpaceOperation(): {}", hbaseOperationContext); - } - - return hbaseOperationContext; - } - - private HBaseOperationContext handleHBaseTableOperation(TableDescriptor tableDescriptor, TableName tableName, OPERATION operation, UserGroupInformation ugi, String userName) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasHook.handleHBaseTableOperation()"); - } - - Map hbaseConf = null; - String owner = null; - String tableNameSpace = null; - TableName hbaseTableName = null; - ColumnFamilyDescriptor[] columnFamilyDescriptors = null; - - if (tableDescriptor != null) { - owner = tableDescriptor.getOwnerString(); - hbaseConf = null; - hbaseTableName = tableDescriptor.getTableName(); - if (hbaseTableName != null) { - tableNameSpace = hbaseTableName.getNamespaceAsString(); - if (tableNameSpace == null) { - tableNameSpace = hbaseTableName.getNameWithNamespaceInclAsString(); - } - } - } - - if (owner == null) { - owner = userName; - } - - if (tableDescriptor != null) { - columnFamilyDescriptors = tableDescriptor.getColumnFamilies(); - } - - HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableDescriptor, tableName, columnFamilyDescriptors, operation, ugi, userName, owner, hbaseConf); - createAtlasInstances(hbaseOperationContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasHook.handleHBaseTableOperation(): {}", hbaseOperationContext); - } - return hbaseOperationContext; - } - - private HBaseOperationContext handleHBaseColumnFamilyOperation(ColumnFamilyDescriptor columnFamilyDescriptor, TableName tableName, String columnFamily, OPERATION operation, UserGroupInformation ugi, String userName) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasHook.handleHBaseColumnFamilyOperation()"); - } - - String owner = userName; - Map hbaseConf = new HashMap<>(); - - String tableNameSpace = tableName.getNamespaceAsString(); - if (tableNameSpace == null) { - tableNameSpace = tableName.getNameWithNamespaceInclAsString(); - } - - if (columnFamilyDescriptor != null) { - hbaseConf = columnFamilyDescriptor.getConfiguration(); - } - - HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableName, columnFamilyDescriptor, columnFamily, operation, ugi, userName, owner, hbaseConf); - createAtlasInstances(hbaseOperationContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasHook.handleHBaseColumnFamilyOperation(): {}", hbaseOperationContext); - } - return hbaseOperationContext; - } - - private UserGroupInformation getUGI(ObserverContext ctx) { - UserGroupInformation ugi = null; - User user = null; - try { - user = getActiveUser(ctx); - ugi = UserGroupInformation.getLoginUser(); - } catch (Exception e) { - // not setting the UGI here - } - - if (ugi == null) { - if (user != null) { - ugi = user.getUGI(); - } - } - - LOG.info("HBaseAtlasHook: UGI: {}", ugi); - return ugi; - } - - private User getActiveUser(ObserverContext ctx) throws IOException { - return (User)ctx.getCaller().orElse(User.getCurrent()); - } -} diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java deleted file mode 100644 index f5a1d2ad51..0000000000 --- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java +++ /dev/null @@ -1,720 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase.bridge; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.hbase.model.HBaseDataTypes; -import org.apache.atlas.hook.AtlasHookException; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AtlasConfigurationUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class HBaseBridge { - private static final Logger LOG = LoggerFactory.getLogger(HBaseBridge.class); - - private static final int EXIT_CODE_SUCCESS = 0; - private static final int EXIT_CODE_FAILED = 1; - private static final String ATLAS_ENDPOINT = "atlas.rest.address"; - private static final String DEFAULT_ATLAS_URL = "http://localhost:21000/"; - private static final String CLUSTER_NAME_KEY = "atlas.cluster.name"; - private static final String DEFAULT_CLUSTER_NAME = "primary"; - private static final String HBASE_METADATA_NAMESPACE = "atlas.metadata.namespace"; - private static final String QUALIFIED_NAME = "qualifiedName"; - private static final String NAME = "name"; - private static final String URI = "uri"; - private static final String OWNER = "owner"; - private static final String DESCRIPTION_ATTR = "description"; - private static final String CLUSTERNAME = "clusterName"; - private static final String NAMESPACE = "namespace"; - private static final String TABLE = "table"; - private static final String COLUMN_FAMILIES = "column_families"; - - // table metadata - private static final String ATTR_TABLE_MAX_FILESIZE = "maxFileSize"; - private static final String ATTR_TABLE_ISREADONLY = "isReadOnly"; - private static final String ATTR_TABLE_ISCOMPACTION_ENABLED = "isCompactionEnabled"; - private static final String ATTR_TABLE_REPLICATION_PER_REGION = "replicasPerRegion"; - private static final String ATTR_TABLE_DURABLILITY = "durability"; - private static final String ATTR_TABLE_NORMALIZATION_ENABLED = "isNormalizationEnabled"; - - // column family metadata - private static final String ATTR_CF_BLOOMFILTER_TYPE = "bloomFilterType"; - private static final String ATTR_CF_COMPRESSION_TYPE = "compressionType"; - private static final String ATTR_CF_COMPACTION_COMPRESSION_TYPE = "compactionCompressionType"; - private static final String ATTR_CF_ENCRYPTION_TYPE = "encryptionType"; - private static final String ATTR_CF_KEEP_DELETE_CELLS = "keepDeletedCells"; - private static final String ATTR_CF_MAX_VERSIONS = "maxVersions"; - private static final String ATTR_CF_MIN_VERSIONS = "minVersions"; - private static final String ATTR_CF_DATA_BLOCK_ENCODING = "dataBlockEncoding"; - private static final String ATTR_CF_TTL = "ttl"; - private static final String ATTR_CF_BLOCK_CACHE_ENABLED = "blockCacheEnabled"; - private static final String ATTR_CF_CACHED_BLOOM_ON_WRITE = "cacheBloomsOnWrite"; - private static final String ATTR_CF_CACHED_DATA_ON_WRITE = "cacheDataOnWrite"; - private static final String ATTR_CF_CACHED_INDEXES_ON_WRITE = "cacheIndexesOnWrite"; - private static final String ATTR_CF_EVICT_BLOCK_ONCLOSE = "evictBlocksOnClose"; - private static final String ATTR_CF_PREFETCH_BLOCK_ONOPEN = "prefetchBlocksOnOpen"; - private static final String ATTRIBUTE_QUALIFIED_NAME = "qualifiedName"; - private static final String ATTR_CF_INMEMORY_COMPACTION_POLICY = "inMemoryCompactionPolicy"; - private static final String ATTR_CF_MOB_COMPATCTPARTITION_POLICY = "mobCompactPartitionPolicy"; - private static final String ATTR_CF_MOB_ENABLED = "isMobEnabled"; - private static final String ATTR_CF_NEW_VERSION_BEHAVIOR = "newVersionBehavior"; - - private static final String HBASE_NAMESPACE_QUALIFIED_NAME = "%s@%s"; - private static final String HBASE_TABLE_QUALIFIED_NAME_FORMAT = "%s:%s@%s"; - private static final String HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT = "%s:%s.%s@%s"; - - private final String metadataNamespace; - private final AtlasClientV2 atlasClientV2; - private final Admin hbaseAdmin; - - - public static void main(String[] args) { - int exitCode = EXIT_CODE_FAILED; - AtlasClientV2 atlasClientV2 =null; - - try { - Options options = new Options(); - options.addOption("n","namespace", true, "namespace"); - options.addOption("t", "table", true, "tablename"); - options.addOption("f", "filename", true, "filename"); - - CommandLineParser parser = new BasicParser(); - CommandLine cmd = parser.parse(options, args); - String namespaceToImport = cmd.getOptionValue("n"); - String tableToImport = cmd.getOptionValue("t"); - String fileToImport = cmd.getOptionValue("f"); - Configuration atlasConf = ApplicationProperties.get(); - String[] urls = atlasConf.getStringArray(ATLAS_ENDPOINT); - - if (urls == null || urls.length == 0) { - urls = new String[] { DEFAULT_ATLAS_URL }; - } - - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - String[] basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput(); - - atlasClientV2 = new AtlasClientV2(urls, basicAuthUsernamePassword); - } else { - UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - - atlasClientV2 = new AtlasClientV2(ugi, ugi.getShortUserName(), urls); - } - - HBaseBridge importer = new HBaseBridge(atlasConf, atlasClientV2); - - if (StringUtils.isNotEmpty(fileToImport)) { - File f = new File(fileToImport); - - if (f.exists() && f.canRead()) { - BufferedReader br = new BufferedReader(new FileReader(f)); - String line = null; - - while((line = br.readLine()) != null) { - String val[] = line.split(":"); - - if (ArrayUtils.isNotEmpty(val)) { - namespaceToImport = val[0]; - - if (val.length > 1) { - tableToImport = val[1]; - } else { - tableToImport = ""; - } - - importer.importHBaseEntities(namespaceToImport, tableToImport); - } - } - - exitCode = EXIT_CODE_SUCCESS; - } else { - LOG.error("Failed to read the file"); - } - } else { - importer.importHBaseEntities(namespaceToImport, tableToImport); - - exitCode = EXIT_CODE_SUCCESS; - } - } catch(ParseException e) { - LOG.error("Failed to parse arguments. Error: ", e.getMessage()); - printUsage(); - } catch(Exception e) { - System.out.println("ImportHBaseEntities failed. Please check the log file for the detailed error message"); - - LOG.error("ImportHBaseEntities failed", e); - }finally { - if(atlasClientV2!=null) { - atlasClientV2.close(); - } - } - - System.exit(exitCode); - } - - public HBaseBridge(Configuration atlasConf, AtlasClientV2 atlasClientV2) throws Exception { - this.atlasClientV2 = atlasClientV2; - this.metadataNamespace = getMetadataNamespace(atlasConf); - - org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create(); - - LOG.info("checking HBase availability.."); - - HBaseAdmin.available(conf); - - LOG.info("HBase is available"); - - Connection conn = ConnectionFactory.createConnection(conf); - - hbaseAdmin = conn.getAdmin(); - } - - private String getMetadataNamespace(Configuration config) { - return AtlasConfigurationUtil.getRecentString(config, HBASE_METADATA_NAMESPACE, getClusterName(config)); - } - - private String getClusterName(Configuration config) { - return config.getString(CLUSTER_NAME_KEY, DEFAULT_CLUSTER_NAME); - } - - private boolean importHBaseEntities(String namespaceToImport, String tableToImport) throws Exception { - boolean ret = false; - - if (StringUtils.isEmpty(namespaceToImport) && StringUtils.isEmpty(tableToImport)) { - // when both NameSpace and Table options are not present - importNameSpaceAndTable(); - ret = true; - } else if (StringUtils.isNotEmpty(namespaceToImport)) { - // When Namespace option is present or both namespace and table options are present - importNameSpaceWithTable(namespaceToImport, tableToImport); - ret = true; - } else if (StringUtils.isNotEmpty(tableToImport)) { - importTable(tableToImport); - ret = true; - } - - return ret; - } - - public void importNameSpace(final String nameSpace) throws Exception { - List matchingNameSpaceDescriptors = getMatchingNameSpaces(nameSpace); - - if (CollectionUtils.isNotEmpty(matchingNameSpaceDescriptors)) { - for (NamespaceDescriptor namespaceDescriptor : matchingNameSpaceDescriptors) { - createOrUpdateNameSpace(namespaceDescriptor); - } - } else { - throw new AtlasHookException("No NameSpace found for the given criteria. NameSpace = " + nameSpace); - } - } - - public void importTable(final String tableName) throws Exception { - String tableNameStr = null; - TableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableName)); - - if (ArrayUtils.isNotEmpty(htds)) { - for (TableDescriptor htd : htds) { - String tblNameWithNameSpace = htd.getTableName().getNameWithNamespaceInclAsString(); - String tblNameWithOutNameSpace = htd.getTableName().getNameAsString(); - - if (tableName.equals(tblNameWithNameSpace)) { - tableNameStr = tblNameWithNameSpace; - } else if (tableName.equals(tblNameWithOutNameSpace)) { - tableNameStr = tblNameWithOutNameSpace; - } else { - // when wild cards are used in table name - if (tblNameWithNameSpace != null) { - tableNameStr = tblNameWithNameSpace; - } else if (tblNameWithOutNameSpace != null) { - tableNameStr = tblNameWithOutNameSpace; - } - } - - byte[] nsByte = htd.getTableName().getNamespace(); - String nsName = new String(nsByte); - NamespaceDescriptor nsDescriptor = hbaseAdmin.getNamespaceDescriptor(nsName); - AtlasEntityWithExtInfo entity = createOrUpdateNameSpace(nsDescriptor); - ColumnFamilyDescriptor[] hcdts = htd.getColumnFamilies(); - - createOrUpdateTable(nsName, tableNameStr, entity.getEntity(), htd, hcdts); - } - } else { - throw new AtlasHookException("No Table found for the given criteria. Table = " + tableName); - } - } - - private void importNameSpaceAndTable() throws Exception { - NamespaceDescriptor[] namespaceDescriptors = hbaseAdmin.listNamespaceDescriptors(); - - if (ArrayUtils.isNotEmpty(namespaceDescriptors)) { - for (NamespaceDescriptor namespaceDescriptor : namespaceDescriptors) { - String namespace = namespaceDescriptor.getName(); - - importNameSpace(namespace); - } - } - - TableDescriptor[] htds = hbaseAdmin.listTables(); - - if (ArrayUtils.isNotEmpty(htds)) { - for (TableDescriptor htd : htds) { - String tableName = htd.getTableName().getNameAsString(); - - importTable(tableName); - } - } - } - - private void importNameSpaceWithTable(String namespaceToImport, String tableToImport) throws Exception { - importNameSpace(namespaceToImport); - - List hTableDescriptors = new ArrayList<>(); - - if (StringUtils.isEmpty(tableToImport)) { - List matchingNameSpaceDescriptors = getMatchingNameSpaces(namespaceToImport); - - if (CollectionUtils.isNotEmpty(matchingNameSpaceDescriptors)) { - hTableDescriptors = getTableDescriptors(matchingNameSpaceDescriptors); - } - } else { - tableToImport = namespaceToImport +":" + tableToImport; - - TableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableToImport)); - - hTableDescriptors.addAll(Arrays.asList(htds)); - } - - if (CollectionUtils.isNotEmpty(hTableDescriptors)) { - for (TableDescriptor htd : hTableDescriptors) { - String tblName = htd.getTableName().getNameAsString(); - - importTable(tblName); - } - } - } - - private List getMatchingNameSpaces(String nameSpace) throws Exception { - List ret = new ArrayList<>(); - NamespaceDescriptor[] namespaceDescriptors = hbaseAdmin.listNamespaceDescriptors(); - Pattern pattern = Pattern.compile(nameSpace); - - for (NamespaceDescriptor namespaceDescriptor:namespaceDescriptors){ - String nmSpace = namespaceDescriptor.getName(); - Matcher matcher = pattern.matcher(nmSpace); - - if (matcher.find()){ - ret.add(namespaceDescriptor); - } - } - - return ret; - } - - private List getTableDescriptors(List namespaceDescriptors) throws Exception { - List ret = new ArrayList<>(); - - for(NamespaceDescriptor namespaceDescriptor:namespaceDescriptors) { - TableDescriptor[] tableDescriptors = hbaseAdmin.listTableDescriptorsByNamespace(namespaceDescriptor.getName()); - - ret.addAll(Arrays.asList(tableDescriptors)); - } - - return ret; - } - - protected AtlasEntityWithExtInfo createOrUpdateNameSpace(NamespaceDescriptor namespaceDescriptor) throws Exception { - String nsName = namespaceDescriptor.getName(); - String nsQualifiedName = getNameSpaceQualifiedName(metadataNamespace, nsName); - AtlasEntityWithExtInfo nsEntity = findNameSpaceEntityInAtlas(nsQualifiedName); - - if (nsEntity == null) { - LOG.info("Importing NameSpace: " + nsQualifiedName); - - AtlasEntity entity = getNameSpaceEntity(nsName, null); - - nsEntity = createEntityInAtlas(new AtlasEntityWithExtInfo(entity)); - } else { - LOG.info("NameSpace already present in Atlas. Updating it..: " + nsQualifiedName); - - AtlasEntity entity = getNameSpaceEntity(nsName, nsEntity.getEntity()); - - nsEntity.setEntity(entity); - - nsEntity = updateEntityInAtlas(nsEntity); - } - return nsEntity; - } - - protected AtlasEntityWithExtInfo createOrUpdateTable(String nameSpace, String tableName, AtlasEntity nameSapceEntity, TableDescriptor htd, ColumnFamilyDescriptor[] hcdts) throws Exception { - String owner = htd.getOwnerString(); - String tblQualifiedName = getTableQualifiedName(metadataNamespace, nameSpace, tableName); - AtlasEntityWithExtInfo ret = findTableEntityInAtlas(tblQualifiedName); - - if (ret == null) { - LOG.info("Importing Table: " + tblQualifiedName); - - AtlasEntity entity = getTableEntity(nameSpace, tableName, owner, nameSapceEntity, htd, null); - - ret = createEntityInAtlas(new AtlasEntityWithExtInfo(entity)); - } else { - LOG.info("Table already present in Atlas. Updating it..: " + tblQualifiedName); - - AtlasEntity entity = getTableEntity(nameSpace, tableName, owner, nameSapceEntity, htd, ret.getEntity()); - - ret.setEntity(entity); - - ret = updateEntityInAtlas(ret); - } - - AtlasEntity tableEntity = ret.getEntity(); - - if (tableEntity != null) { - List cfEntities = createOrUpdateColumnFamilies(nameSpace, tableName, owner, hcdts, tableEntity); - - List cfIDs = new ArrayList<>(); - - if (CollectionUtils.isNotEmpty(cfEntities)) { - for (AtlasEntityWithExtInfo cfEntity : cfEntities) { - cfIDs.add(AtlasTypeUtil.getAtlasObjectId(cfEntity.getEntity())); - } - } - tableEntity.setRelationshipAttribute(COLUMN_FAMILIES, AtlasTypeUtil.getAtlasRelatedObjectIdList(cfIDs, HBaseAtlasHook.RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES)); - } - - return ret; - } - - protected List createOrUpdateColumnFamilies(String nameSpace, String tableName, String owner, ColumnFamilyDescriptor[] hcdts , AtlasEntity tableEntity) throws Exception { - List ret = new ArrayList<>(); - - if (hcdts != null) { - AtlasObjectId tableId = AtlasTypeUtil.getAtlasObjectId(tableEntity); - - for (ColumnFamilyDescriptor columnFamilyDescriptor : hcdts) { - String cfName = columnFamilyDescriptor.getNameAsString(); - String cfQualifiedName = getColumnFamilyQualifiedName(metadataNamespace, nameSpace, tableName, cfName); - AtlasEntityWithExtInfo cfEntity = findColumnFamiltyEntityInAtlas(cfQualifiedName); - - if (cfEntity == null) { - LOG.info("Importing Column-family: " + cfQualifiedName); - - AtlasEntity entity = getColumnFamilyEntity(nameSpace, tableName, owner, columnFamilyDescriptor, tableId, null); - - cfEntity = createEntityInAtlas(new AtlasEntityWithExtInfo(entity)); - } else { - LOG.info("ColumnFamily already present in Atlas. Updating it..: " + cfQualifiedName); - - AtlasEntity entity = getColumnFamilyEntity(nameSpace, tableName, owner, columnFamilyDescriptor, tableId, cfEntity.getEntity()); - - cfEntity.setEntity(entity); - - cfEntity = updateEntityInAtlas(cfEntity); - } - - ret.add(cfEntity); - } - } - - return ret; - } - - private AtlasEntityWithExtInfo findNameSpaceEntityInAtlas(String nsQualifiedName) { - AtlasEntityWithExtInfo ret = null; - - try { - ret = findEntityInAtlas(HBaseDataTypes.HBASE_NAMESPACE.getName(), nsQualifiedName); - clearRelationshipAttributes(ret); - } catch (Exception e) { - ret = null; // entity doesn't exist in Atlas - } - - return ret; - } - - private AtlasEntityWithExtInfo findTableEntityInAtlas(String tableQualifiedName) { - AtlasEntityWithExtInfo ret = null; - - try { - ret = findEntityInAtlas(HBaseDataTypes.HBASE_TABLE.getName(), tableQualifiedName); - clearRelationshipAttributes(ret); - } catch (Exception e) { - ret = null; // entity doesn't exist in Atlas - } - - return ret; - } - - private AtlasEntityWithExtInfo findColumnFamiltyEntityInAtlas(String columnFamilyQualifiedName) { - AtlasEntityWithExtInfo ret = null; - - try { - ret = findEntityInAtlas(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(), columnFamilyQualifiedName); - clearRelationshipAttributes(ret); - } catch (Exception e) { - ret = null; // entity doesn't exist in Atlas - } - - return ret; - } - - private AtlasEntityWithExtInfo findEntityInAtlas(String typeName, String qualifiedName) throws Exception { - Map attributes = Collections.singletonMap(QUALIFIED_NAME, qualifiedName); - - return atlasClientV2.getEntityByAttribute(typeName, attributes); - } - - private AtlasEntity getNameSpaceEntity(String nameSpace, AtlasEntity nsEtity) { - AtlasEntity ret = null ; - - if (nsEtity == null) { - ret = new AtlasEntity(HBaseDataTypes.HBASE_NAMESPACE.getName()); - } else { - ret = nsEtity; - } - - String qualifiedName = getNameSpaceQualifiedName(metadataNamespace, nameSpace); - - ret.setAttribute(QUALIFIED_NAME, qualifiedName); - ret.setAttribute(CLUSTERNAME, metadataNamespace); - ret.setAttribute(NAME, nameSpace); - ret.setAttribute(DESCRIPTION_ATTR, nameSpace); - - return ret; - } - - private AtlasEntity getTableEntity(String nameSpace, String tableName, String owner, AtlasEntity nameSpaceEntity, TableDescriptor htd, AtlasEntity atlasEntity) { - AtlasEntity ret = null; - - if (atlasEntity == null) { - ret = new AtlasEntity(HBaseDataTypes.HBASE_TABLE.getName()); - } else { - ret = atlasEntity; - } - - String tableQualifiedName = getTableQualifiedName(metadataNamespace, nameSpace, tableName); - - ret.setAttribute(QUALIFIED_NAME, tableQualifiedName); - ret.setAttribute(CLUSTERNAME, metadataNamespace); - ret.setRelationshipAttribute(NAMESPACE, AtlasTypeUtil.getAtlasRelatedObjectId(nameSpaceEntity, HBaseAtlasHook.RELATIONSHIP_HBASE_TABLE_NAMESPACE)); - ret.setAttribute(NAME, tableName); - ret.setAttribute(DESCRIPTION_ATTR, tableName); - ret.setAttribute(OWNER, owner); - ret.setAttribute(URI, tableName); - ret.setAttribute(ATTR_TABLE_MAX_FILESIZE, htd.getMaxFileSize()); - ret.setAttribute(ATTR_TABLE_REPLICATION_PER_REGION, htd.getRegionReplication()); - ret.setAttribute(ATTR_TABLE_ISREADONLY, htd.isReadOnly()); - ret.setAttribute(ATTR_TABLE_ISCOMPACTION_ENABLED, htd.isCompactionEnabled()); - ret.setAttribute(ATTR_TABLE_DURABLILITY, (htd.getDurability() != null ? htd.getDurability().name() : null)); - ret.setAttribute(ATTR_TABLE_NORMALIZATION_ENABLED, htd.isNormalizationEnabled()); - - return ret; - } - - private AtlasEntity getColumnFamilyEntity(String nameSpace, String tableName, String owner, ColumnFamilyDescriptor hcdt, AtlasObjectId tableId, AtlasEntity atlasEntity){ - AtlasEntity ret = null; - - if (atlasEntity == null) { - ret = new AtlasEntity(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName()); - } else { - ret = atlasEntity; - } - - String cfName = hcdt.getNameAsString(); - String cfQualifiedName = getColumnFamilyQualifiedName(metadataNamespace, nameSpace, tableName, cfName); - - ret.setAttribute(QUALIFIED_NAME, cfQualifiedName); - ret.setAttribute(CLUSTERNAME, metadataNamespace); - ret.setRelationshipAttribute(TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(tableId, HBaseAtlasHook.RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES)); - ret.setAttribute(NAME, cfName); - ret.setAttribute(DESCRIPTION_ATTR, cfName); - ret.setAttribute(OWNER, owner); - ret.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, hcdt.isBlockCacheEnabled()); - ret.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (hcdt.getBloomFilterType() != null ? hcdt.getBloomFilterType().name():null)); - ret.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, hcdt.isCacheBloomsOnWrite()); - ret.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, hcdt.isCacheDataOnWrite()); - ret.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, hcdt.isCacheIndexesOnWrite()); - ret.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, (hcdt.getCompactionCompressionType() != null ? hcdt.getCompactionCompressionType().name():null)); - ret.setAttribute(ATTR_CF_COMPRESSION_TYPE, (hcdt.getCompressionType() != null ? hcdt.getCompressionType().name():null)); - ret.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, (hcdt.getDataBlockEncoding() != null ? hcdt.getDataBlockEncoding().name():null)); - ret.setAttribute(ATTR_CF_ENCRYPTION_TYPE, hcdt.getEncryptionType()); - ret.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, hcdt.isEvictBlocksOnClose()); - ret.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( hcdt.getKeepDeletedCells() != null ? hcdt.getKeepDeletedCells().name():null)); - ret.setAttribute(ATTR_CF_MAX_VERSIONS, hcdt.getMaxVersions()); - ret.setAttribute(ATTR_CF_MIN_VERSIONS, hcdt.getMinVersions()); - ret.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, hcdt.isPrefetchBlocksOnOpen()); - ret.setAttribute(ATTR_CF_TTL, hcdt.getTimeToLive()); - ret.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, (hcdt.getInMemoryCompaction() != null ? hcdt.getInMemoryCompaction().name():null)); - ret.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( hcdt.getMobCompactPartitionPolicy() != null ? hcdt.getMobCompactPartitionPolicy().name():null)); - ret.setAttribute(ATTR_CF_MOB_ENABLED,hcdt.isMobEnabled()); - ret.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR,hcdt.isNewVersionBehavior()); - - return ret; - } - - private AtlasEntityWithExtInfo createEntityInAtlas(AtlasEntityWithExtInfo entity) throws Exception { - AtlasEntityWithExtInfo ret = null; - EntityMutationResponse response = atlasClientV2.createEntity(entity); - List entities = response.getCreatedEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - AtlasEntityWithExtInfo getByGuidResponse = atlasClientV2.getEntityByGuid(entities.get(0).getGuid()); - - ret = getByGuidResponse; - - LOG.info("Created {} entity: name={}, guid={}", ret.getEntity().getTypeName(), ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), ret.getEntity().getGuid()); - } - return ret; - } - - private AtlasEntityWithExtInfo updateEntityInAtlas(AtlasEntityWithExtInfo entity) throws Exception { - AtlasEntityWithExtInfo ret = null; - EntityMutationResponse response = atlasClientV2.updateEntity(entity); - - if (response != null) { - List entities = response.getUpdatedEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - AtlasEntityWithExtInfo getByGuidResponse = atlasClientV2.getEntityByGuid(entities.get(0).getGuid()); - - ret = getByGuidResponse; - - LOG.info("Updated {} entity: name={}, guid={} ", ret.getEntity().getTypeName(), ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), ret.getEntity().getGuid()); - } else { - LOG.info("Entity: name={} ", entity.toString() + " not updated as it is unchanged from what is in Atlas" ); - ret = entity; - } - } else { - LOG.info("Entity: name={} ", entity.toString() + " not updated as it is unchanged from what is in Atlas" ); - ret = entity; - } - - return ret; - } - - /** - * Construct the qualified name used to uniquely identify a ColumnFamily instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hbase component belongs - * @param nameSpace Name of the Hbase database to which the Table belongs - * @param tableName Name of the Hbase table - * @param columnFamily Name of the ColumnFamily - * @return Unique qualified name to identify the Table instance in Atlas. - */ - private static String getColumnFamilyQualifiedName(String metadataNamespace, String nameSpace, String tableName, String columnFamily) { - tableName = stripNameSpace(tableName); - return String.format(HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT, nameSpace, tableName, columnFamily, metadataNamespace); - } - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hbase component belongs - * @param nameSpace Name of the Hbase database to which the Table belongs - * @param tableName Name of the Hbase table - * @return Unique qualified name to identify the Table instance in Atlas. - */ - private static String getTableQualifiedName(String metadataNamespace, String nameSpace, String tableName) { - tableName = stripNameSpace(tableName); - return String.format(HBASE_TABLE_QUALIFIED_NAME_FORMAT, nameSpace, tableName, metadataNamespace); - } - - /** - * Construct the qualified name used to uniquely identify a Hbase NameSpace instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hbase component belongs - * @param nameSpace Name of the NameSpace - * @return Unique qualified name to identify the HBase NameSpace instance in Atlas. - */ - private static String getNameSpaceQualifiedName(String metadataNamespace, String nameSpace) { - return String.format(HBASE_NAMESPACE_QUALIFIED_NAME, nameSpace, metadataNamespace); - } - - private static String stripNameSpace(String tableName){ - tableName = tableName.substring(tableName.indexOf(":")+1); - - return tableName; - } - - private static void printUsage() { - System.out.println("Usage 1: import-hbase.sh [-n OR --namespace ] [-t

OR --table
]"); - System.out.println("Usage 2: import-hbase.sh [-f ]" ); - System.out.println(" Format:"); - System.out.println(" namespace1:tbl1"); - System.out.println(" namespace1:tbl2"); - System.out.println(" namespace2:tbl1"); - } - - private void clearRelationshipAttributes(AtlasEntityWithExtInfo entity) { - if (entity != null) { - clearRelationshipAttributes(entity.getEntity()); - - if (entity.getReferredEntities() != null) { - clearRelationshipAttributes(entity.getReferredEntities().values()); - } - } - } - - private void clearRelationshipAttributes(Collection entities) { - if (entities != null) { - for (AtlasEntity entity : entities) { - clearRelationshipAttributes(entity); - } - } - } - - private void clearRelationshipAttributes(AtlasEntity entity) { - if (entity != null && entity.getRelationshipAttributes() != null) { - entity.getRelationshipAttributes().clear(); - } - } -} diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java deleted file mode 100644 index 313132de6c..0000000000 --- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase.hook; - - -import org.apache.atlas.hbase.bridge.HBaseAtlasHook; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.client.RegionInfo; -import org.apache.hadoop.hbase.client.SnapshotDescription; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; -import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver; -import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; -import org.apache.hadoop.hbase.coprocessor.MasterObserver; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.coprocessor.RegionObserver; -import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; - -public class HBaseAtlasCoprocessor implements MasterCoprocessor, MasterObserver, RegionObserver, RegionServerObserver { - private static final Logger LOG = LoggerFactory.getLogger(HBaseAtlasCoprocessor.class); - - final HBaseAtlasHook hbaseAtlasHook; - - public HBaseAtlasCoprocessor() { - hbaseAtlasHook = HBaseAtlasHook.getInstance(); - } - - @Override - public void postCreateTable(ObserverContext observerContext, TableDescriptor tableDescriptor, RegionInfo[] hRegionInfos) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postCreateTable()"); - - hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext); - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()"); - } - } - - @Override - public void postDeleteTable(ObserverContext observerContext, TableName tableName) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postDeleteTable()"); - hbaseAtlasHook.sendHBaseTableOperation(null, tableName, HBaseAtlasHook.OPERATION.DELETE_TABLE, observerContext); - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()"); - } - } - - @Override - public void postModifyTable(ObserverContext observerContext, TableName tableName, TableDescriptor tableDescriptor) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postModifyTable()"); - hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, tableName, HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext); - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()"); - } - } - - @Override - public void postCreateNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postCreateNamespace()"); - - hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_NAMESPACE, observerContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()"); - } - } - - @Override - public void postDeleteNamespace(ObserverContext observerContext, String s) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postDeleteNamespace()"); - - hbaseAtlasHook.sendHBaseNameSpaceOperation(null, s, HBaseAtlasHook.OPERATION.DELETE_NAMESPACE, observerContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()"); - } - } - - @Override - public void postModifyNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postModifyNamespace()"); - - hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.ALTER_NAMESPACE, observerContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()"); - } - } - - @Override - public void postCloneSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postCloneSnapshot()"); - - hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()"); - } - } - - @Override - public void postRestoreSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException { - LOG.info("==> HBaseAtlasCoprocessor.postRestoreSnapshot()"); - - hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, snapshot.getTableName(), HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()"); - } - } - -} - - diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseDataTypes.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseDataTypes.java deleted file mode 100644 index b83e1b54ba..0000000000 --- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseDataTypes.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase.model; - -/** - * HBASE Data Types for model and bridge. - */ -public enum HBaseDataTypes { - // Classes - HBASE_NAMESPACE, - HBASE_TABLE, - HBASE_COLUMN_FAMILY, - HBASE_COLUMN; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java deleted file mode 100644 index 1ef7c07dec..0000000000 --- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java +++ /dev/null @@ -1,174 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase.model; - -import org.apache.atlas.hbase.bridge.HBaseAtlasHook; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.security.UserGroupInformation; - -import java.util.ArrayList; -import java.util.List; - -import java.util.Map; - -public class HBaseOperationContext { - private final UserGroupInformation ugi; - private final Map hbaseConf; - private final HBaseAtlasHook.OPERATION operation; - private final String user; - private final NamespaceDescriptor namespaceDescriptor; - private final TableDescriptor tableDescriptor; - private final ColumnFamilyDescriptor[] columnFamilyDescriptors; - private final TableName tableName; - private final String nameSpace; - private final String columnFamily; - private final String owner; - private final ColumnFamilyDescriptor columnFamilyDescriptor; - - public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, TableDescriptor tableDescriptor, TableName tableName, ColumnFamilyDescriptor[] columnFamilyDescriptors, - ColumnFamilyDescriptor columnFamilyDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner, - Map hbaseConf) { - this.namespaceDescriptor = namespaceDescriptor; - this.nameSpace = nameSpace; - this.tableDescriptor = tableDescriptor; - this.tableName = tableName; - this.columnFamilyDescriptors = columnFamilyDescriptors; - this.columnFamilyDescriptor = columnFamilyDescriptor; - this.columnFamily = columnFamily; - this.operation = operation; - this.ugi = ugi; - this.user = user; - this.owner = owner; - this.hbaseConf = hbaseConf; - } - - public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner) { - this(namespaceDescriptor, nameSpace, null, null, null, null, null, operation, ugi, user, owner, null); - } - - public HBaseOperationContext(String nameSpace, TableDescriptor tableDescriptor, TableName tableName, ColumnFamilyDescriptor[] columnFamilyDescriptors, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, String owner, Map hbaseConf) { - this(null, nameSpace, tableDescriptor, tableName, columnFamilyDescriptors, null, null, operation, ugi, user, owner, hbaseConf); - } - - public HBaseOperationContext(String nameSpace, TableName tableName, ColumnFamilyDescriptor columnFamilyDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, String owner, Map hbaseConf) { - this(null, nameSpace, null, tableName, null, columnFamilyDescriptor, columnFamily, operation, ugi, user, owner, hbaseConf); - } - - private List messages = new ArrayList<>(); - - public UserGroupInformation getUgi() { - return ugi; - } - - public Map getHbaseConf() { - return hbaseConf; - } - - public String getUser() { - return user; - } - - public HBaseAtlasHook.OPERATION getOperation() { - return operation; - } - - public NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor; - } - - public TableDescriptor gethTableDescriptor() { - return tableDescriptor; - } - - public ColumnFamilyDescriptor[] gethColumnDescriptors() { - return columnFamilyDescriptors; - } - - public TableName getTableName() { - return tableName; - } - - public String getNameSpace() { - return nameSpace; - } - - public ColumnFamilyDescriptor gethColumnDescriptor() { - return columnFamilyDescriptor; - } - - public String getColummFamily() { - return columnFamily; - } - - public void addMessage(HookNotification message) { - messages.add(message); - } - - public String getOwner() { - return owner; - } - - public List getMessages() { - return messages; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - toString(sb); - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("HBaseOperationContext={"); - sb.append("Operation={").append(operation).append("} "); - sb.append("User ={").append(user).append("} "); - if (nameSpace != null ) { - sb.append("NameSpace={").append(nameSpace).append("}"); - } else { - if (namespaceDescriptor != null) { - sb.append("NameSpace={").append(namespaceDescriptor.toString()).append("}"); - } - } - if (tableName != null ) { - sb.append("Table={").append(tableName).append("}"); - } else { - if ( columnFamilyDescriptor != null) { - sb.append("Table={").append(tableDescriptor.toString()).append("}"); - } - } - if (columnFamily != null ) { - sb.append("Columm Family={").append(columnFamily).append("}"); - } else { - if ( columnFamilyDescriptor != null) { - sb.append("Columm Family={").append(columnFamilyDescriptor.toString()).append("}"); - } - } - sb.append("Message ={").append(getMessages()).append("} "); - sb.append(" }"); - return sb; - } - -} diff --git a/addons/hbase-bridge/src/main/resources/atlas-hbase-import-log4j.xml b/addons/hbase-bridge/src/main/resources/atlas-hbase-import-log4j.xml deleted file mode 100644 index 3fc2dcf9c3..0000000000 --- a/addons/hbase-bridge/src/main/resources/atlas-hbase-import-log4j.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java b/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java deleted file mode 100644 index e346788713..0000000000 --- a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java +++ /dev/null @@ -1,307 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hbase; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.hbase.bridge.HBaseAtlasHook; -import org.apache.atlas.hbase.model.HBaseDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.IOException; -import java.net.ServerSocket; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; -import static org.testng.AssertJUnit.assertFalse; - - -public class HBaseAtlasHookIT { - private static final Logger LOG = LoggerFactory.getLogger(HBaseAtlasHookIT.class); - protected static final String ATLAS_URL = "http://localhost:31000/"; - protected static final String CLUSTER_NAME = "primary"; - - private HBaseTestingUtility utility; - private int port; - private AtlasClientV2 atlasClient; - - - @BeforeClass - public void setUp() { - try { - createHBaseCluster(); - createAtlasClient(); - } catch (Exception e) { - LOG.error("Unable to create Hbase Admin for Testing ", e); - } - } - - @AfterClass - public void cleanup() throws Exception { - LOG.info("Stopping mini cluster.. "); - utility.shutdownMiniCluster(); - } - - @Test - public void testGetMetaTableRows() throws Exception { - List results = utility.getMetaTableRows(); - assertFalse("results should have some entries and is empty.", results.isEmpty()); - } - - @Test (enabled = false) - public void testCreateNamesapce() throws Exception { - final Configuration conf = HBaseConfiguration.create(); - - conf.set("hbase.zookeeper.quorum", "localhost"); - conf.set("hbase.zookeeper.property.clientPort", String.valueOf(port)); - conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); - NamespaceDescriptor ns = NamespaceDescriptor.create("test_namespace").build(); - - admin.createNamespace(ns); - - //assert on qualified name - String nameSpace = assertNameSpaceIsRegistered(ns.getName()); - AtlasClientV2 atlasClient = getAtlasClient(); - - if (atlasClient != null) { - AtlasEntityWithExtInfo nameSpaceRef = atlasClient.getEntityByGuid(nameSpace); - String nameSpaceQualifiedName = HBaseAtlasHook.getNameSpaceQualifiedName(CLUSTER_NAME, ns.getName()); - - Assert.assertEquals(nameSpaceRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), nameSpaceQualifiedName); - } else { - Assert.fail("Unable to create AtlasClient for Testing"); - } - } - - @Test (enabled = false) - public void testCreateTable() throws Exception { - final Configuration conf = HBaseConfiguration.create(); - - conf.set("hbase.zookeeper.quorum", "localhost"); - conf.set("hbase.zookeeper.property.clientPort", String.valueOf(port)); - conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); - String namespace = "test_namespace1"; - String tablename = "test_table"; - - // Create a table - if (!admin.tableExists(TableName.valueOf(namespace, tablename))) { - NamespaceDescriptor ns = NamespaceDescriptor.create(namespace).build(); - - admin.createNamespace(ns); - - HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(namespace, tablename)); - - tableDescriptor.addFamily(new HColumnDescriptor("colfam1")); - - admin.createTable(tableDescriptor); - } - - //assert on qualified name - String table = assertTableIsRegistered(namespace, tablename); - AtlasClientV2 atlasClient = getAtlasClient(); - - if (atlasClient != null) { - AtlasEntityWithExtInfo tableRef = atlasClient.getEntityByGuid(table); - String entityName = HBaseAtlasHook.getTableQualifiedName(CLUSTER_NAME, namespace, tablename); - - Assert.assertEquals(tableRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), entityName); - } else { - Assert.fail("Unable to create AtlasClient for Testing"); - } - } - - // Methods for creating HBase - - private void createAtlasClient() { - try { - org.apache.commons.configuration.Configuration configuration = ApplicationProperties.get(); - String[] atlasEndPoint = configuration.getStringArray(HBaseAtlasHook.ATTR_ATLAS_ENDPOINT); - - configuration.setProperty("atlas.cluster.name", CLUSTER_NAME); - - if (atlasEndPoint == null || atlasEndPoint.length == 0) { - atlasEndPoint = new String[]{ATLAS_URL}; - } - - Iterator keys = configuration.getKeys(); - while (keys.hasNext()) { - String key = keys.next(); - LOG.info("{} = {} ", key, configuration.getString(key)); - } - - if (AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClient = new AtlasClientV2(atlasEndPoint); - } else { - atlasClient = new AtlasClientV2(configuration, atlasEndPoint, new String[]{"admin", "admin"}); - } - } catch (Exception e) { - LOG.error("Unable to create AtlasClient for Testing ", e); - } - } - - private static int getFreePort() throws IOException { - ServerSocket serverSocket = new ServerSocket(0); - int port = serverSocket.getLocalPort(); - - serverSocket.close(); - - return port; - } - - private void createHBaseCluster() throws Exception { - LOG.info("Creating Hbase Admin..."); - - port = getFreePort(); - utility = new HBaseTestingUtility(); - - utility.getConfiguration().set("test.hbase.zookeeper.property.clientPort", String.valueOf(port)); - utility.getConfiguration().set("hbase.master.port", String.valueOf(getFreePort())); - utility.getConfiguration().set("hbase.master.info.port", String.valueOf(getFreePort())); - utility.getConfiguration().set("hbase.regionserver.port", String.valueOf(getFreePort())); - utility.getConfiguration().set("hbase.regionserver.info.port", String.valueOf(getFreePort())); - utility.getConfiguration().set("zookeeper.znode.parent", "/hbase-unsecure"); - utility.getConfiguration().set("hbase.table.sanity.checks", "false"); - utility.getConfiguration().set("hbase.coprocessor.master.classes", "org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor"); - - utility.startMiniCluster(); - } - - - public AtlasClientV2 getAtlasClient() { - AtlasClientV2 ret = null; - if (atlasClient != null) { - ret = atlasClient; - } - return ret; - } - - protected String assertNameSpaceIsRegistered(String nameSpace) throws Exception { - return assertNameSpaceIsRegistered(nameSpace, null); - } - - protected String assertNameSpaceIsRegistered(String nameSpace, HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("Searching for nameSpace {}", nameSpace); - } - String nameSpaceQualifiedName = HBaseAtlasHook.getNameSpaceQualifiedName(CLUSTER_NAME, nameSpace); - return assertEntityIsRegistered(HBaseDataTypes.HBASE_NAMESPACE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - nameSpaceQualifiedName, assertPredicate); - } - - protected String assertTableIsRegistered(String nameSpace, String tableName) throws Exception { - return assertTableIsRegistered(nameSpace, tableName, null); - } - - protected String assertTableIsRegistered(String nameSpace, String tableName, HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("Searching for nameSpace:Table {} {}", nameSpace, tableName); - } - String tableQualifiedName = HBaseAtlasHook.getTableQualifiedName(CLUSTER_NAME, nameSpace, tableName); - return assertEntityIsRegistered(HBaseDataTypes.HBASE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName, - assertPredicate); - } - - public interface AssertPredicate { - void assertOnEntity(AtlasEntity entity) throws Exception; - } - - public interface Predicate { - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - void evaluate() throws Exception; - } - - - protected String assertEntityIsRegistered(final String typeName, final String property, final String value, - final HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception { - waitFor(30000, new HBaseAtlasHookIT.Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntityWithExtInfo entity = atlasClient.getEntityByAttribute(typeName, Collections.singletonMap(property, value)); - - assertNotNull(entity); - - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity.getEntity()); - } - } - }); - - AtlasEntityWithExtInfo entity = atlasClient.getEntityByAttribute(typeName, Collections.singletonMap(property, value)); - - return entity.getEntity().getGuid(); - } - - /** - * Wait for a condition, expressed via a {@link HBaseAtlasHookIT.Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected void waitFor(int timeout, HBaseAtlasHookIT.Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - predicate.evaluate(); - return; - } catch (Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - if (LOG.isDebugEnabled()) { - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - } - Thread.sleep(5000); - } - } - } - - -} diff --git a/addons/hbase-bridge/src/test/resources/atlas-application.properties b/addons/hbase-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 3b12e5fb33..0000000000 --- a/addons/hbase-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,125 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase -atlas.graph.index.search.solr.wait-searcher=true - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/hbase-bridge/src/test/resources/atlas-log4j.xml b/addons/hbase-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 2c9815ff54..0000000000 --- a/addons/hbase-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/hbase-bridge/src/test/resources/users-credentials.properties b/addons/hbase-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/hbase-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/hbase-testing-util/pom.xml b/addons/hbase-testing-util/pom.xml deleted file mode 100644 index 982e9c85ea..0000000000 --- a/addons/hbase-testing-util/pom.xml +++ /dev/null @@ -1,228 +0,0 @@ - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hbase-testing-util - Apache HBase - Testing Util - HBase Testing Utilities. - jar - - - 3.0.3 - 9.3.14.v20161028 - - - - - org.testng - testng - - - - org.apache.hbase - hbase-server - ${hbase.version} - compile - - - - org.apache.hbase - hbase-server - ${hbase.version} - test-jar - compile - - - - org.apache.hbase - hbase-zookeeper - ${hbase.version} - jar - compile - - - - org.apache.hbase - hbase-zookeeper - ${hbase.version} - test-jar - compile - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - compile - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test-jar - compile - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - junit - junit - - - - - - junit - junit - ${junit.version} - - - - org.apache.hadoop - hadoop-minicluster - ${hadoop.version} - compile - - - org.apache.htrace - htrace-core - - - - - org.apache.hadoop - hadoop-minikdc - ${hadoop.version} - - - - org.apache.hbase - hbase-hadoop-compat - ${hbase.version} - jar - compile - - - - org.apache.hbase - hbase-hadoop-compat - ${hbase.version} - test-jar - compile - - - - org.apache.hbase - hbase-hadoop2-compat - ${hbase.version} - jar - compile - - - com.github.stephenc.findbugs - findbugs-annotations - - - - - - org.apache.hbase - hbase-hadoop2-compat - ${hbase.version} - test-jar - compile - - - - - org.slf4j - slf4j-log4j12 - - - - org.apache.hbase - hbase-common - ${hbase.version} - jar - compile - - - com.github.stephenc.findbugs - findbugs-annotations - - - - - - org.apache.hbase - hbase-common - ${hbase.version} - test-jar - compile - - - - org.apache.hbase - hbase-annotations - ${hbase.version} - test-jar - compile - - - jdk.tools - jdk.tools - - - - - - org.apache.hbase - hbase-protocol - ${hbase.version} - jar - compile - - - - org.apache.hbase - hbase-client - ${hbase.version} - jar - compile - - - - - - - - diff --git a/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java b/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java deleted file mode 100644 index 0beb035521..0000000000 --- a/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hbase; - -import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -import static org.testng.AssertJUnit.assertFalse; - - -/** - * Make sure we can spin up a HBTU without a hbase-site.xml - */ -public class TestHBaseTestingUtilSpinup { - private static final Logger LOG = LoggerFactory.getLogger(TestHBaseTestingUtilSpinup.class); - private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); - - @BeforeClass - public static void beforeClass() throws Exception { - UTIL.startMiniCluster(); - if (!UTIL.getHBaseCluster().waitForActiveAndReadyMaster(30000)) { - throw new RuntimeException("Active master not ready"); - } - } - - @AfterClass - public static void afterClass() throws Exception { - UTIL.shutdownMiniCluster(); - } - - @Test - public void testGetMetaTableRows() throws Exception { - List results = UTIL.getMetaTableRows(); - assertFalse("results should have some entries and is empty.", results.isEmpty()); - } - -} diff --git a/addons/hbase-testing-util/src/test/resources/atlas-log4j.xml b/addons/hbase-testing-util/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 47d4d5c7ce..0000000000 --- a/addons/hbase-testing-util/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/hdfs-model/pom.xml b/addons/hdfs-model/pom.xml deleted file mode 100644 index 05ba173684..0000000000 --- a/addons/hdfs-model/pom.xml +++ /dev/null @@ -1,127 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hdfs-model - Apache Atlas FileSystem Model - Apache Atlas FileSystem Model - jar - - - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-notification - - - - org.mockito - mockito-all - - - - - - - org.testng - testng - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - org.apache.atlas - atlas-repository - test - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - - - diff --git a/addons/hive-bridge-shim/pom.xml b/addons/hive-bridge-shim/pom.xml deleted file mode 100755 index 849ca2a8f6..0000000000 --- a/addons/hive-bridge-shim/pom.xml +++ /dev/null @@ -1,47 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hive-bridge-shim - Apache Atlas Hive Bridge Shim Module - Apache Atlas Hive Bridge Shim - jar - - - - - org.apache.atlas - atlas-plugin-classloader - - - - org.apache.hive - hive-exec - ${hive.version} - provided - - - diff --git a/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveHook.java deleted file mode 100755 index 2a4d067e59..0000000000 --- a/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveHook.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook; - - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Hive hook used for atlas entity registration. - */ -public class HiveHook implements ExecuteWithHookContext { - private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class); - - private static final String ATLAS_PLUGIN_TYPE = "hive"; - private static final String ATLAS_HIVE_HOOK_IMPL_CLASSNAME = "org.apache.atlas.hive.hook.HiveHook"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private ExecuteWithHookContext hiveHookImpl = null; - - public HiveHook() { - this.initialize(); - } - - @Override - public void run(final HookContext hookContext) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveHook.run({})", hookContext); - } - - try { - activatePluginClassLoader(); - hiveHookImpl.run(hookContext); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveHook.run({})", hookContext); - } - } - - private void initialize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveHook.initialize()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = (Class) Class - .forName(ATLAS_HIVE_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - hiveHookImpl = cls.newInstance(); - } catch (Exception excp) { - LOG.error("Error instantiating Atlas hook implementation", excp); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveHook.initialize()"); - } - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} diff --git a/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHook.java b/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHook.java deleted file mode 100644 index 2894e99bdd..0000000000 --- a/addons/hive-bridge-shim/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHook.java +++ /dev/null @@ -1,199 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook; - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.MetaStoreEventListener; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.events.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Hive Metastore hook to capture DDL operations for atlas entity registration. - */ -public class HiveMetastoreHook extends MetaStoreEventListener { - private static final String ATLAS_PLUGIN_TYPE = "hive"; - private static final String ATLAS_HIVE_METASTORE_HOOK_IMPL_CLASSNAME = "org.apache.atlas.hive.hook.HiveMetastoreHookImpl"; - public static final Logger LOG = LoggerFactory.getLogger(HiveMetastoreHook.class); - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private MetaStoreEventListener atlasMetastoreHookImpl = null; - private Configuration config; - - public HiveMetastoreHook(Configuration config) { - super(config); - - this.config = config; - - this.initialize(); - } - - private void initialize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.initialize()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = (Class) - Class.forName(ATLAS_HIVE_METASTORE_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - atlasMetastoreHookImpl = cls.getDeclaredConstructor(Configuration.class).newInstance(config); - } catch (Exception ex) { - LOG.error("Error instantiating Atlas hook implementation", ex); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.initialize()"); - } - } - - @Override - public void onCreateTable(CreateTableEvent tableEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onCreateTable()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onCreateTable(tableEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onCreateTable()"); - } - } - - @Override - public void onDropTable(DropTableEvent tableEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onDropTable()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onDropTable(tableEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onDropTable()"); - } - } - - @Override - public void onAlterTable(AlterTableEvent tableEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onAlterTable()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onAlterTable(tableEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onAlterTable()"); - } - } - - @Override - public void onCreateDatabase(CreateDatabaseEvent dbEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onCreateDatabase()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onCreateDatabase(dbEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onCreateDatabase()"); - } - } - - @Override - public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onDropDatabase()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onDropDatabase(dbEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onDropDatabase()"); - } - } - - @Override - public void onAlterDatabase(AlterDatabaseEvent dbEvent) throws MetaException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetastoreHook.onAlterDatabase()"); - } - - try { - activatePluginClassLoader(); - - atlasMetastoreHookImpl.onAlterDatabase(dbEvent); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetastoreHook.onAlterDatabase()"); - } - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} \ No newline at end of file diff --git a/addons/hive-bridge/pom.xml b/addons/hive-bridge/pom.xml deleted file mode 100755 index 8c3636e5d6..0000000000 --- a/addons/hive-bridge/pom.xml +++ /dev/null @@ -1,535 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - hive-bridge - Apache Atlas Hive Bridge Module - Apache Atlas Hive Bridge - jar - - - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - - - org.apache.hive - hive-metastore - ${hive.version} - provided - - - org.mortbay.jetty - * - - - org.eclipse.jetty - * - - - com.github.stephenc.findbugs - findbugs-annotations - - - - - - - org.apache.hive - hive-exec - ${hive.version} - provided - - - javax.servlet - * - - - - - - org.apache.hive - hive-jdbc - ${hive.version} - test - - - javax.servlet - * - - - javax.ws.rs - * - - - org.eclipse.jetty - * - - - - - - org.apache.hive - hive-cli - ${hive.version} - test - - - javax.servlet - * - - - org.eclipse.jetty.aggregate - * - - - - - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-client-v2 - ${project.version} - - - - org.apache.atlas - atlas-notification - - - - - org.apache.atlas - atlas-webapp - war - test - - - - org.apache.hadoop - hadoop-client - - - javax.servlet - servlet-api - - - org.eclipse.jetty - * - - - - - - org.apache.hadoop - hadoop-annotations - - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - - org.testng - testng - - - - org.mockito - mockito-all - - - - org.eclipse.jetty - jetty-server - test - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - org.apache.atlas - atlas-intg - tests - test - - - - org.apache.atlas - atlas-repository - tests - test - - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - test - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/hive/atlas-hive-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey.contribs - jersey-multipart - ${jersey.version} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - commons-configuration - commons-configuration - ${commons-conf.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/hive - false - false - true - - - ${project.groupId} - hive-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - ${debug.jetty.daemon} - ${project.build.testOutputDirectory} - true - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/0010-base_model.json - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - - diff --git a/addons/hive-bridge/src/bin/import-hive.sh b/addons/hive-bridge/src/bin/import-hive.sh deleted file mode 100755 index ebe6976f0e..0000000000 --- a/addons/hive-bridge/src/bin/import-hive.sh +++ /dev/null @@ -1,173 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. See accompanying LICENSE file. -# -# resolve links - $0 may be a softlink -PRG="${0}" - -[[ `uname -s` == *"CYGWIN"* ]] && CYGWIN=true - -while [ -h "${PRG}" ]; do - ls=`ls -ld "${PRG}"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "${PRG}"`/"$link" - fi -done - -BASEDIR=`dirname ${PRG}` -BASEDIR=`cd ${BASEDIR}/..;pwd` - -if test -z "${JAVA_HOME}" -then - JAVA_BIN=`which java` - JAR_BIN=`which jar` -else - JAVA_BIN="${JAVA_HOME}/bin/java" - JAR_BIN="${JAVA_HOME}/bin/jar" -fi -export JAVA_BIN - -if [ ! -e "${JAVA_BIN}" ] || [ ! -e "${JAR_BIN}" ]; then - echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available." - exit 1 -fi - -# Construct Atlas classpath using jars from hook/hive/atlas-hive-plugin-impl/ directory. -for i in "${BASEDIR}/hook/hive/atlas-hive-plugin-impl/"*.jar; do - ATLASCPPATH="${ATLASCPPATH}:$i" -done - -if [ -z "${ATLAS_CONF_DIR}" ] && [ -e /etc/atlas/conf ];then - ATLAS_CONF_DIR=/etc/atlas/conf -fi -ATLASCPPATH=${ATLASCPPATH}:${ATLAS_CONF_DIR} - -# log dir for applications -ATLAS_LOG_DIR="${ATLAS_LOG_DIR:-/var/log/atlas}" -export ATLAS_LOG_DIR -LOGFILE="$ATLAS_LOG_DIR/import-hive.log" - -TIME=`date +%Y%m%d%H%M%s` - -#Add hive conf in classpath -if [ ! -z "$HIVE_CONF_DIR" ]; then - HIVE_CONF=$HIVE_CONF_DIR -elif [ ! -z "$HIVE_HOME" ]; then - HIVE_CONF="$HIVE_HOME/conf" -elif [ -e /etc/hive/conf ]; then - HIVE_CONF="/etc/hive/conf" -else - echo "Could not find a valid HIVE configuration" - exit 1 -fi - -echo Using Hive configuration directory ["$HIVE_CONF"] - - -if [ -f "${HIVE_CONF}/hive-env.sh" ]; then - . "${HIVE_CONF}/hive-env.sh" -fi - -if [ -z "$HIVE_HOME" ]; then - if [ -d "${BASEDIR}/../hive" ]; then - HIVE_HOME=${BASEDIR}/../hive - else - echo "Please set HIVE_HOME to the root of Hive installation" - exit 1 - fi -fi - -HIVE_CP="${HIVE_CONF}" -# Multiple jars in HIVE_CP_EXCLUDE_LIST can be added using "\|" separator -# Ex: HIVE_CP_EXCLUDE_LIST="javax.ws.rs-api\|jersey-multipart" -# exclude log4j libs from hive classpath to avoid conflict -HIVE_CP_EXCLUDE_LIST="javax.ws.rs-api\|log4j-slf4j-impl\|log4j-1.2-api\|log4j-api\|log4j-core\|log4j-web" - -for i in $(find "${HIVE_HOME}/lib/" -name "*.jar" | grep -v "$HIVE_CP_EXCLUDE_LIST"); do - HIVE_CP="${HIVE_CP}:$i" -done - -#Add hadoop conf in classpath -if [ ! -z "$HADOOP_CLASSPATH" ]; then - HADOOP_CP=$HADOOP_CLASSPATH -elif [ ! -z "$HADOOP_HOME" ]; then - HADOOP_CP=`$HADOOP_HOME/bin/hadoop classpath` -elif [ $(command -v hadoop) ]; then - HADOOP_CP=`hadoop classpath` - echo $HADOOP_CP -else - echo "Environment variable HADOOP_CLASSPATH or HADOOP_HOME need to be set" - exit 1 -fi - -CP="${HIVE_CP}:${HADOOP_CP}:${ATLASCPPATH}" - -# If running in cygwin, convert pathnames and classpath to Windows format. -if [ "${CYGWIN}" == "true" ] -then - ATLAS_LOG_DIR=`cygpath -w ${ATLAS_LOG_DIR}` - LOGFILE=`cygpath -w ${LOGFILE}` - HIVE_CP=`cygpath -w ${HIVE_CP}` - HADOOP_CP=`cygpath -w ${HADOOP_CP}` - CP=`cygpath -w -p ${CP}` -fi - -JAVA_PROPERTIES="$ATLAS_OPTS -Datlas.log.dir=$ATLAS_LOG_DIR -Datlas.log.file=import-hive.log --Dlog4j.configuration=atlas-hive-import-log4j.xml" - -IMPORT_ARGS= -JVM_ARGS= - -while true -do - option=$1 - shift - - case "$option" in - -d) IMPORT_ARGS="$IMPORT_ARGS -d $1"; shift;; - -t) IMPORT_ARGS="$IMPORT_ARGS -t $1"; shift;; - -f) IMPORT_ARGS="$IMPORT_ARGS -f $1"; shift;; - -o) IMPORT_ARGS="$IMPORT_ARGS -o $1"; shift;; - -i) IMPORT_ARGS="$IMPORT_ARGS -i";; - -h) export HELP_OPTION="true"; IMPORT_ARGS="$IMPORT_ARGS -h";; - --database) IMPORT_ARGS="$IMPORT_ARGS --database $1"; shift;; - --table) IMPORT_ARGS="$IMPORT_ARGS --table $1"; shift;; - --filename) IMPORT_ARGS="$IMPORT_ARGS --filename $1"; shift;; - --output) IMPORT_ARGS="$IMPORT_ARGS --output $1"; shift;; - --ignoreBulkImport) IMPORT_ARGS="$IMPORT_ARGS --ignoreBulkImport";; - --help) export HELP_OPTION="true"; IMPORT_ARGS="$IMPORT_ARGS --help";; - -deleteNonExisting) IMPORT_ARGS="$IMPORT_ARGS -deleteNonExisting";; - "") break;; - *) IMPORT_ARGS="$IMPORT_ARGS $option" - esac -done - -JAVA_PROPERTIES="${JAVA_PROPERTIES} ${JVM_ARGS}" - -if [ -z ${HELP_OPTION} ]; then - echo "Log file for import is $LOGFILE" -fi - -"${JAVA_BIN}" ${JAVA_PROPERTIES} -cp "${CP}" org.apache.atlas.hive.bridge.HiveMetaStoreBridge $IMPORT_ARGS - -RETVAL=$? -if [ -z ${HELP_OPTION} ]; then - [ $RETVAL -eq 0 ] && echo Hive Meta Data imported successfully! - [ $RETVAL -eq 1 ] && echo Failed to import Hive Meta Data! Check logs at: $LOGFILE for details. -fi - -exit $RETVAL - diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java deleted file mode 100755 index 28365bc5c3..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java +++ /dev/null @@ -1,1264 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.bridge; - -import com.google.common.annotations.VisibleForTesting; -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.hive.hook.events.BaseHiveEvent; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.hook.AtlasHookException; -import org.apache.atlas.model.discovery.AtlasSearchResult; -import org.apache.atlas.model.discovery.SearchParameters; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.instance.EntityMutations; -import org.apache.atlas.utils.AtlasPathExtractorUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.HdfsNameServiceResolver; -import org.apache.atlas.utils.AtlasConfigurationUtil; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.utils.PathExtractorContext; - -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.MissingArgumentException; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.InvalidTableException; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.*; - -/** - * A Bridge Utility that imports metadata from the Hive Meta Store - * and registers them in Atlas. - */ - -public class HiveMetaStoreBridge { - private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridge.class); - - public static final String CONF_PREFIX = "atlas.hook.hive."; - public static final String CLUSTER_NAME_KEY = "atlas.cluster.name"; - public static final String HIVE_USERNAME = "atlas.hook.hive.default.username"; - public static final String HIVE_METADATA_NAMESPACE = "atlas.metadata.namespace"; - public static final String HDFS_PATH_CONVERT_TO_LOWER_CASE = CONF_PREFIX + "hdfs_path.convert_to_lowercase"; - public static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION = CONF_PREFIX + "aws_s3.atlas.model.version"; - public static final String DEFAULT_CLUSTER_NAME = "primary"; - public static final String TEMP_TABLE_PREFIX = "_temp-"; - public static final String ATLAS_ENDPOINT = "atlas.rest.address"; - public static final String SEP = ":".intern(); - public static final String HDFS_PATH = "hdfs_path"; - public static final String DEFAULT_METASTORE_CATALOG = "hive"; - public static final String HIVE_TABLE_DB_EDGE_LABEL = "__hive_table.db"; - public static final String HOOK_HIVE_PAGE_LIMIT = CONF_PREFIX + "page.limit"; - - static final String OPTION_OUTPUT_FILEPATH_SHORT = "o"; - static final String OPTION_OUTPUT_FILEPATH_LONG = "output"; - static final String OPTION_IGNORE_BULK_IMPORT_SHORT = "i"; - static final String OPTION_IGNORE_BULK_IMPORT_LONG = "ignoreBulkImport"; - static final String OPTION_DATABASE_SHORT = "d"; - static final String OPTION_DATABASE_LONG = "database"; - static final String OPTION_TABLE_SHORT = "t"; - static final String OPTION_TABLE_LONG = "table"; - static final String OPTION_IMPORT_DATA_FILE_SHORT = "f"; - static final String OPTION_IMPORT_DATA_FILE_LONG = "filename"; - static final String OPTION_FAIL_ON_ERROR = "failOnError"; - static final String OPTION_DELETE_NON_EXISTING = "deleteNonExisting"; - static final String OPTION_HELP_SHORT = "h"; - static final String OPTION_HELP_LONG = "help"; - - public static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2 = "v2"; - - private static final int EXIT_CODE_SUCCESS = 0; - private static final int EXIT_CODE_FAILED = 1; - private static final int EXIT_CODE_INVALID_ARG = 2; - - private static final String DEFAULT_ATLAS_URL = "http://localhost:21000/"; - private static int pageLimit = 10000; - - private final String metadataNamespace; - private final Hive hiveClient; - private final AtlasClientV2 atlasClientV2; - private final boolean convertHdfsPathToLowerCase; - - private String awsS3AtlasModelVersion = null; - - public static void main(String[] args) { - int exitCode = EXIT_CODE_FAILED; - AtlasClientV2 atlasClientV2 = null; - Options acceptedCliOptions = prepareCommandLineOptions(); - - try { - CommandLine cmd = new BasicParser().parse(acceptedCliOptions, args); - List argsNotProcessed = cmd.getArgList(); - - if (argsNotProcessed != null && argsNotProcessed.size() > 0) { - throw new ParseException("Unrecognized arguments."); - } - - if (cmd.hasOption(OPTION_HELP_SHORT)) { - printUsage(acceptedCliOptions); - exitCode = EXIT_CODE_SUCCESS; - } else { - Configuration atlasConf = ApplicationProperties.get(); - String[] atlasEndpoint = atlasConf.getStringArray(ATLAS_ENDPOINT); - - if (atlasEndpoint == null || atlasEndpoint.length == 0) { - atlasEndpoint = new String[] { DEFAULT_ATLAS_URL }; - } - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - String[] basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput(); - - atlasClientV2 = new AtlasClientV2(atlasEndpoint, basicAuthUsernamePassword); - } else { - UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - - atlasClientV2 = new AtlasClientV2(ugi, ugi.getShortUserName(), atlasEndpoint); - } - - boolean createZip = cmd.hasOption(OPTION_OUTPUT_FILEPATH_LONG); - - if (createZip) { - HiveMetaStoreBridgeV2 hiveMetaStoreBridgeV2 = new HiveMetaStoreBridgeV2(atlasConf, new HiveConf(), atlasClientV2); - - if (hiveMetaStoreBridgeV2.exportDataToZipAndRunAtlasImport(cmd)) { - exitCode = EXIT_CODE_SUCCESS; - } - } else { - HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClientV2); - - if (hiveMetaStoreBridge.importDataDirectlyToAtlas(cmd)) { - exitCode = EXIT_CODE_SUCCESS; - } - } - } - } catch(ParseException e) { - LOG.error("Invalid argument. Error: {}", e.getMessage()); - System.out.println("Invalid argument. Error: " + e.getMessage()); - exitCode = EXIT_CODE_INVALID_ARG; - - if (!(e instanceof MissingArgumentException)) { - printUsage(acceptedCliOptions); - } - } catch(Exception e) { - LOG.error("Import Failed", e); - } finally { - if( atlasClientV2 !=null) { - atlasClientV2.close(); - } - } - - System.exit(exitCode); - } - - private static Options prepareCommandLineOptions() { - Options acceptedCliOptions = new Options(); - - return acceptedCliOptions.addOption(OPTION_OUTPUT_FILEPATH_SHORT, OPTION_OUTPUT_FILEPATH_LONG, true, "Output path or file for Zip import") - .addOption(OPTION_IGNORE_BULK_IMPORT_SHORT, OPTION_IGNORE_BULK_IMPORT_LONG, false, "Ignore bulk Import for Zip import") - .addOption(OPTION_DATABASE_SHORT, OPTION_DATABASE_LONG, true, "Database name") - .addOption(OPTION_TABLE_SHORT, OPTION_TABLE_LONG, true, "Table name") - .addOption(OPTION_IMPORT_DATA_FILE_SHORT, OPTION_IMPORT_DATA_FILE_LONG, true, "Filename") - .addOption(OPTION_FAIL_ON_ERROR, false, "failOnError") - .addOption(OPTION_DELETE_NON_EXISTING, false, "Delete database and table entities in Atlas if not present in Hive") - .addOption(OPTION_HELP_SHORT, OPTION_HELP_LONG, false, "Print this help message"); - } - - private static void printUsage(Options options) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("import-hive.sh", options); - System.out.println(); - System.out.println("Usage options:"); - System.out.println(" Usage 1: import-hive.sh [-d OR --database ] " ); - System.out.println(" Imports specified database and its tables ..."); - System.out.println(); - System.out.println(" Usage 2: import-hive.sh [-d OR --database ] [-t

OR --table
]"); - System.out.println(" Imports specified table within that database ..."); - System.out.println(); - System.out.println(" Usage 3: import-hive.sh"); - System.out.println(" Imports all databases and tables..."); - System.out.println(); - System.out.println(" Usage 4: import-hive.sh -f "); - System.out.println(" Imports all databases and tables in the file..."); - System.out.println(" Format:"); - System.out.println(" database1:tbl1"); - System.out.println(" database1:tbl2"); - System.out.println(" database2:tbl2"); - System.out.println(); - System.out.println(" Usage 5: import-hive.sh [-deleteNonExisting] " ); - System.out.println(" Deletes databases and tables which are not in Hive ..."); - System.out.println(); - System.out.println(" Usage 6: import-hive.sh -o [-f ] [-d OR --database ] [-t
OR --table
]"); - System.out.println(" To create zip file with exported data and import the zip file at Atlas ..."); - System.out.println(); - System.out.println(" Usage 7: import-hive.sh -i -o [-f ] [-d OR --database ] [-t
OR --table
]"); - System.out.println(" To create zip file with exported data without importing to Atlas which can be imported later ..."); - System.out.println(); - } - - /** - * Construct a HiveMetaStoreBridge. - * @param hiveConf {@link HiveConf} for Hive component in the cluster - */ - public HiveMetaStoreBridge(Configuration atlasProperties, HiveConf hiveConf, AtlasClientV2 atlasClientV2) throws Exception { - this.metadataNamespace = getMetadataNamespace(atlasProperties); - this.hiveClient = Hive.get(hiveConf); - this.atlasClientV2 = atlasClientV2; - this.convertHdfsPathToLowerCase = atlasProperties.getBoolean(HDFS_PATH_CONVERT_TO_LOWER_CASE, false); - this.awsS3AtlasModelVersion = atlasProperties.getString(HOOK_AWS_S3_ATLAS_MODEL_VERSION, HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2); - if (atlasProperties != null) { - pageLimit = atlasProperties.getInteger(HOOK_HIVE_PAGE_LIMIT, 10000); - } - } - - /** - * Construct a HiveMetaStoreBridge. - * @param hiveConf {@link HiveConf} for Hive component in the cluster - */ - public HiveMetaStoreBridge(Configuration atlasProperties, HiveConf hiveConf) throws Exception { - this(atlasProperties, hiveConf, null); - } - - HiveMetaStoreBridge(String metadataNamespace, Hive hiveClient, AtlasClientV2 atlasClientV2) { - this(metadataNamespace, hiveClient, atlasClientV2, true); - } - - HiveMetaStoreBridge(String metadataNamespace, Hive hiveClient, AtlasClientV2 atlasClientV2, boolean convertHdfsPathToLowerCase) { - this.metadataNamespace = metadataNamespace; - this.hiveClient = hiveClient; - this.atlasClientV2 = atlasClientV2; - this.convertHdfsPathToLowerCase = convertHdfsPathToLowerCase; - } - - public String getMetadataNamespace(Configuration config) { - return AtlasConfigurationUtil.getRecentString(config, HIVE_METADATA_NAMESPACE, getClusterName(config)); - } - - private String getClusterName(Configuration config) { - return config.getString(CLUSTER_NAME_KEY, DEFAULT_CLUSTER_NAME); - } - - public String getMetadataNamespace() { - return metadataNamespace; - } - - public Hive getHiveClient() { - return hiveClient; - } - - public boolean isConvertHdfsPathToLowerCase() { - return convertHdfsPathToLowerCase; - } - - public boolean importDataDirectlyToAtlas(CommandLine cmd) throws Exception { - LOG.info("Importing Hive metadata"); - boolean ret = false; - - String databaseToImport = cmd.getOptionValue(OPTION_DATABASE_SHORT); - String tableToImport = cmd.getOptionValue(OPTION_TABLE_SHORT); - String fileToImport = cmd.getOptionValue(OPTION_IMPORT_DATA_FILE_SHORT); - - boolean failOnError = cmd.hasOption(OPTION_FAIL_ON_ERROR); - boolean deleteNonExisting = cmd.hasOption(OPTION_DELETE_NON_EXISTING); - - LOG.info("delete non existing flag : {} ", deleteNonExisting); - - if (deleteNonExisting) { - deleteEntitiesForNonExistingHiveMetadata(failOnError); - ret = true; - } else if (StringUtils.isNotEmpty(fileToImport)) { - File f = new File(fileToImport); - - if (f.exists() && f.canRead()) { - BufferedReader br = new BufferedReader(new FileReader(f)); - String line = null; - - while((line = br.readLine()) != null) { - String val[] = line.split(":"); - - if (ArrayUtils.isNotEmpty(val)) { - databaseToImport = val[0]; - - if (val.length > 1) { - tableToImport = val[1]; - } else { - tableToImport = ""; - } - - importDatabases(failOnError, databaseToImport, tableToImport); - } - } - ret = true; - } else { - LOG.error("Failed to read the input file: " + fileToImport); - } - } else { - importDatabases(failOnError, databaseToImport, tableToImport); - ret = true; - } - return ret; - } - - @VisibleForTesting - public void importHiveMetadata(String databaseToImport, String tableToImport, boolean failOnError) throws Exception { - LOG.info("Importing Hive metadata"); - - importDatabases(failOnError, databaseToImport, tableToImport); - } - - private void importDatabases(boolean failOnError, String databaseToImport, String tableToImport) throws Exception { - List databaseNames = null; - - if (StringUtils.isEmpty(databaseToImport) && StringUtils.isEmpty(tableToImport)) { - //when both database and table to import are empty, import all - databaseNames = hiveClient.getAllDatabases(); - } else if (StringUtils.isEmpty(databaseToImport) && StringUtils.isNotEmpty(tableToImport)) { - //when database is empty and table is not, then check table has database name in it and import that db and table - if (isTableWithDatabaseName(tableToImport)) { - String val[] = tableToImport.split("\\."); - if (val.length > 1) { - databaseToImport = val[0]; - tableToImport = val[1]; - } - databaseNames = hiveClient.getDatabasesByPattern(databaseToImport); - } else { - databaseNames = hiveClient.getAllDatabases(); - } - } else { - //when database to import has some value then, import that db and all table under it. - databaseNames = hiveClient.getDatabasesByPattern(databaseToImport); - } - - if(!CollectionUtils.isEmpty(databaseNames)) { - LOG.info("Found {} databases", databaseNames.size()); - - for (String databaseName : databaseNames) { - AtlasEntityWithExtInfo dbEntity = registerDatabase(databaseName); - - if (dbEntity != null) { - importTables(dbEntity.getEntity(), databaseName, tableToImport, failOnError); - } - } - } else { - LOG.error("No database found"); - System.exit(EXIT_CODE_FAILED); - } - } - - /** - * Imports all tables for the given db - * @param dbEntity - * @param databaseName - * @param failOnError - * @throws Exception - */ - private int importTables(AtlasEntity dbEntity, String databaseName, String tblName, final boolean failOnError) throws Exception { - int tablesImported = 0; - - final List tableNames; - - if (StringUtils.isEmpty(tblName)) { - tableNames = hiveClient.getAllTables(databaseName); - } else { - tableNames = hiveClient.getTablesByPattern(databaseName, tblName); - } - - if(!CollectionUtils.isEmpty(tableNames)) { - LOG.info("Found {} tables to import in database {}", tableNames.size(), databaseName); - - try { - for (String tableName : tableNames) { - int imported = importTable(dbEntity, databaseName, tableName, failOnError); - - tablesImported += imported; - } - } finally { - if (tablesImported == tableNames.size()) { - LOG.info("Successfully imported {} tables from database {}", tablesImported, databaseName); - } else { - LOG.error("Imported {} of {} tables from database {}. Please check logs for errors during import", tablesImported, tableNames.size(), databaseName); - } - } - } else { - LOG.error("No tables to import in database {}", databaseName); - } - - return tablesImported; - } - - @VisibleForTesting - public int importTable(AtlasEntity dbEntity, String databaseName, String tableName, final boolean failOnError) throws Exception { - try { - Table table = hiveClient.getTable(databaseName, tableName); - AtlasEntityWithExtInfo tableEntity = registerTable(dbEntity, table); - - if (table.getTableType() == TableType.EXTERNAL_TABLE) { - String processQualifiedName = getTableProcessQualifiedName(metadataNamespace, table); - AtlasEntityWithExtInfo processEntity = findProcessEntity(processQualifiedName); - - if (processEntity == null) { - String tableLocationString = isConvertHdfsPathToLowerCase() ? lower(table.getDataLocation().toString()) : table.getDataLocation().toString(); - Path location = table.getDataLocation(); - String query = getCreateTableString(table, tableLocationString); - - PathExtractorContext pathExtractorCtx = new PathExtractorContext(getMetadataNamespace(), isConvertHdfsPathToLowerCase(), awsS3AtlasModelVersion); - AtlasEntityWithExtInfo entityWithExtInfo = AtlasPathExtractorUtil.getPathEntity(location, pathExtractorCtx); - AtlasEntity pathInst = entityWithExtInfo.getEntity(); - AtlasEntity tableInst = tableEntity.getEntity(); - AtlasEntity processInst = new AtlasEntity(HiveDataTypes.HIVE_PROCESS.getName()); - - long now = System.currentTimeMillis(); - - processInst.setAttribute(ATTRIBUTE_QUALIFIED_NAME, processQualifiedName); - processInst.setAttribute(ATTRIBUTE_NAME, query); - processInst.setAttribute(ATTRIBUTE_CLUSTER_NAME, metadataNamespace); - processInst.setRelationshipAttribute(ATTRIBUTE_INPUTS, Collections.singletonList(AtlasTypeUtil.getAtlasRelatedObjectId(pathInst, RELATIONSHIP_DATASET_PROCESS_INPUTS))); - processInst.setRelationshipAttribute(ATTRIBUTE_OUTPUTS, Collections.singletonList(AtlasTypeUtil.getAtlasRelatedObjectId(tableInst, RELATIONSHIP_PROCESS_DATASET_OUTPUTS))); - String userName = table.getOwner(); - if (StringUtils.isEmpty(userName)) { - userName = ApplicationProperties.get().getString(HIVE_USERNAME, "hive"); - } - processInst.setAttribute(ATTRIBUTE_USER_NAME, userName); - processInst.setAttribute(ATTRIBUTE_START_TIME, now); - processInst.setAttribute(ATTRIBUTE_END_TIME, now); - processInst.setAttribute(ATTRIBUTE_OPERATION_TYPE, "CREATETABLE"); - processInst.setAttribute(ATTRIBUTE_QUERY_TEXT, query); - processInst.setAttribute(ATTRIBUTE_QUERY_ID, query); - processInst.setAttribute(ATTRIBUTE_QUERY_PLAN, "{}"); - processInst.setAttribute(ATTRIBUTE_RECENT_QUERIES, Collections.singletonList(query)); - - AtlasEntitiesWithExtInfo createTableProcess = new AtlasEntitiesWithExtInfo(); - - createTableProcess.addEntity(processInst); - - if (pathExtractorCtx.getKnownEntities() != null) { - pathExtractorCtx.getKnownEntities().values().forEach(entity -> createTableProcess.addEntity(entity)); - } else { - createTableProcess.addEntity(pathInst); - } - - registerInstances(createTableProcess); - } else { - LOG.info("Process {} is already registered", processQualifiedName); - } - } - - return 1; - } catch (Exception e) { - LOG.error("Import failed for hive_table {}", tableName, e); - - if (failOnError) { - throw e; - } - - return 0; - } - } - - /** - * Checks if db is already registered, else creates and registers db entity - * @param databaseName - * @return - * @throws Exception - */ - private AtlasEntityWithExtInfo registerDatabase(String databaseName) throws Exception { - AtlasEntityWithExtInfo ret = null; - Database db = hiveClient.getDatabase(databaseName); - - if (db != null) { - ret = findDatabase(metadataNamespace, databaseName); - - if (ret == null) { - ret = registerInstance(new AtlasEntityWithExtInfo(toDbEntity(db))); - } else { - LOG.info("Database {} is already registered - id={}. Updating it.", databaseName, ret.getEntity().getGuid()); - - ret.setEntity(toDbEntity(db, ret.getEntity())); - - updateInstance(ret); - } - } - - return ret; - } - - private AtlasEntityWithExtInfo registerTable(AtlasEntity dbEntity, Table table) throws AtlasHookException { - try { - AtlasEntityWithExtInfo ret; - AtlasEntityWithExtInfo tableEntity = findTableEntity(table); - - if (tableEntity == null) { - tableEntity = toTableEntity(dbEntity, table); - - ret = registerInstance(tableEntity); - } else { - LOG.info("Table {}.{} is already registered with id {}. Updating entity.", table.getDbName(), table.getTableName(), tableEntity.getEntity().getGuid()); - - ret = toTableEntity(dbEntity, table, tableEntity); - - updateInstance(ret); - } - - return ret; - } catch (Exception e) { - throw new AtlasHookException("HiveMetaStoreBridge.registerTable() failed.", e); - } - } - - /** - * Registers an entity in atlas - * @param entity - * @return - * @throws Exception - */ - private AtlasEntityWithExtInfo registerInstance(AtlasEntityWithExtInfo entity) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("creating {} entity: {}", entity.getEntity().getTypeName(), entity); - } - - AtlasEntityWithExtInfo ret = null; - EntityMutationResponse response = atlasClientV2.createEntity(entity); - List createdEntities = response.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE); - - if (CollectionUtils.isNotEmpty(createdEntities)) { - for (AtlasEntityHeader createdEntity : createdEntities) { - if (ret == null) { - ret = atlasClientV2.getEntityByGuid(createdEntity.getGuid()); - - LOG.info("Created {} entity: name={}, guid={}", ret.getEntity().getTypeName(), ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), ret.getEntity().getGuid()); - } else if (ret.getEntity(createdEntity.getGuid()) == null) { - AtlasEntityWithExtInfo newEntity = atlasClientV2.getEntityByGuid(createdEntity.getGuid()); - - ret.addReferredEntity(newEntity.getEntity()); - - if (MapUtils.isNotEmpty(newEntity.getReferredEntities())) { - for (Map.Entry entry : newEntity.getReferredEntities().entrySet()) { - ret.addReferredEntity(entry.getKey(), entry.getValue()); - } - } - - LOG.info("Created {} entity: name={}, guid={}", newEntity.getEntity().getTypeName(), newEntity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), newEntity.getEntity().getGuid()); - } - } - } - - clearRelationshipAttributes(ret); - - return ret; - } - - /** - * Registers an entity in atlas - * @param entities - * @return - * @throws Exception - */ - private AtlasEntitiesWithExtInfo registerInstances(AtlasEntitiesWithExtInfo entities) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("creating {} entities: {}", entities.getEntities().size(), entities); - } - - AtlasEntitiesWithExtInfo ret = null; - EntityMutationResponse response = atlasClientV2.createEntities(entities); - List createdEntities = response.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE); - - if (CollectionUtils.isNotEmpty(createdEntities)) { - ret = new AtlasEntitiesWithExtInfo(); - - for (AtlasEntityHeader createdEntity : createdEntities) { - AtlasEntityWithExtInfo entity = atlasClientV2.getEntityByGuid(createdEntity.getGuid()); - - ret.addEntity(entity.getEntity()); - - if (MapUtils.isNotEmpty(entity.getReferredEntities())) { - for (Map.Entry entry : entity.getReferredEntities().entrySet()) { - ret.addReferredEntity(entry.getKey(), entry.getValue()); - } - } - - LOG.info("Created {} entity: name={}, guid={}", entity.getEntity().getTypeName(), entity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), entity.getEntity().getGuid()); - } - } - - clearRelationshipAttributes(ret); - - return ret; - } - - private void updateInstance(AtlasEntityWithExtInfo entity) throws AtlasServiceException { - if (LOG.isDebugEnabled()) { - LOG.debug("updating {} entity: {}", entity.getEntity().getTypeName(), entity); - } - - atlasClientV2.updateEntity(entity); - - LOG.info("Updated {} entity: name={}, guid={}", entity.getEntity().getTypeName(), entity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), entity.getEntity().getGuid()); - } - - /** - * Create a Hive Database entity - * @param hiveDB The Hive {@link Database} object from which to map properties - * @return new Hive Database AtlasEntity - * @throws HiveException - */ - private AtlasEntity toDbEntity(Database hiveDB) throws HiveException { - return toDbEntity(hiveDB, null); - } - - private AtlasEntity toDbEntity(Database hiveDB, AtlasEntity dbEntity) { - if (dbEntity == null) { - dbEntity = new AtlasEntity(HiveDataTypes.HIVE_DB.getName()); - } - - String dbName = getDatabaseName(hiveDB); - - dbEntity.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getDBQualifiedName(metadataNamespace, dbName)); - dbEntity.setAttribute(ATTRIBUTE_NAME, dbName); - dbEntity.setAttribute(ATTRIBUTE_DESCRIPTION, hiveDB.getDescription()); - dbEntity.setAttribute(ATTRIBUTE_OWNER, hiveDB.getOwnerName()); - - dbEntity.setAttribute(ATTRIBUTE_CLUSTER_NAME, metadataNamespace); - dbEntity.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(hiveDB.getLocationUri())); - dbEntity.setAttribute(ATTRIBUTE_PARAMETERS, hiveDB.getParameters()); - - if (hiveDB.getOwnerType() != null) { - dbEntity.setAttribute(ATTRIBUTE_OWNER_TYPE, OWNER_TYPE_TO_ENUM_VALUE.get(hiveDB.getOwnerType().getValue())); - } - - return dbEntity; - } - - public static String getDatabaseName(Database hiveDB) { - String dbName = hiveDB.getName().toLowerCase(); - String catalogName = hiveDB.getCatalogName() != null ? hiveDB.getCatalogName().toLowerCase() : null; - - if (StringUtils.isNotEmpty(catalogName) && !StringUtils.equals(catalogName, DEFAULT_METASTORE_CATALOG)) { - dbName = catalogName + SEP + dbName; - } - - return dbName; - } - - /** - * Create a new table instance in Atlas - * @param database AtlasEntity for Hive {@link AtlasEntity} to which this table belongs - * @param hiveTable reference to the Hive {@link Table} from which to map properties - * @return Newly created Hive AtlasEntity - * @throws Exception - */ - private AtlasEntityWithExtInfo toTableEntity(AtlasEntity database, Table hiveTable) throws AtlasHookException { - return toTableEntity(database, hiveTable, null); - } - - private AtlasEntityWithExtInfo toTableEntity(AtlasEntity database, final Table hiveTable, AtlasEntityWithExtInfo table) throws AtlasHookException { - if (table == null) { - table = new AtlasEntityWithExtInfo(new AtlasEntity(HiveDataTypes.HIVE_TABLE.getName())); - } - - AtlasEntity tableEntity = table.getEntity(); - String tableQualifiedName = getTableQualifiedName(metadataNamespace, hiveTable); - long createTime = BaseHiveEvent.getTableCreateTime(hiveTable); - long lastAccessTime = hiveTable.getLastAccessTime() > 0 ? hiveTable.getLastAccessTime() : createTime; - - tableEntity.setRelationshipAttribute(ATTRIBUTE_DB, AtlasTypeUtil.getAtlasRelatedObjectId(database, RELATIONSHIP_HIVE_TABLE_DB)); - tableEntity.setAttribute(ATTRIBUTE_QUALIFIED_NAME, tableQualifiedName); - tableEntity.setAttribute(ATTRIBUTE_NAME, hiveTable.getTableName().toLowerCase()); - tableEntity.setAttribute(ATTRIBUTE_OWNER, hiveTable.getOwner()); - - tableEntity.setAttribute(ATTRIBUTE_CREATE_TIME, createTime); - tableEntity.setAttribute(ATTRIBUTE_LAST_ACCESS_TIME, lastAccessTime); - tableEntity.setAttribute(ATTRIBUTE_RETENTION, hiveTable.getRetention()); - tableEntity.setAttribute(ATTRIBUTE_PARAMETERS, hiveTable.getParameters()); - tableEntity.setAttribute(ATTRIBUTE_COMMENT, hiveTable.getParameters().get(ATTRIBUTE_COMMENT)); - tableEntity.setAttribute(ATTRIBUTE_TABLE_TYPE, hiveTable.getTableType().name()); - tableEntity.setAttribute(ATTRIBUTE_TEMPORARY, hiveTable.isTemporary()); - - if (hiveTable.getViewOriginalText() != null) { - tableEntity.setAttribute(ATTRIBUTE_VIEW_ORIGINAL_TEXT, hiveTable.getViewOriginalText()); - } - - if (hiveTable.getViewExpandedText() != null) { - tableEntity.setAttribute(ATTRIBUTE_VIEW_EXPANDED_TEXT, hiveTable.getViewExpandedText()); - } - - AtlasEntity sdEntity = toStorageDescEntity(hiveTable.getSd(), tableQualifiedName, getStorageDescQFName(tableQualifiedName), AtlasTypeUtil.getObjectId(tableEntity)); - List partKeys = toColumns(hiveTable.getPartitionKeys(), tableEntity, RELATIONSHIP_HIVE_TABLE_PART_KEYS); - List columns = toColumns(hiveTable.getCols(), tableEntity, RELATIONSHIP_HIVE_TABLE_COLUMNS); - - tableEntity.setRelationshipAttribute(ATTRIBUTE_STORAGEDESC, AtlasTypeUtil.getAtlasRelatedObjectId(sdEntity, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC)); - tableEntity.setRelationshipAttribute(ATTRIBUTE_PARTITION_KEYS, AtlasTypeUtil.getAtlasRelatedObjectIds(partKeys, RELATIONSHIP_HIVE_TABLE_PART_KEYS)); - tableEntity.setRelationshipAttribute(ATTRIBUTE_COLUMNS, AtlasTypeUtil.getAtlasRelatedObjectIds(columns, RELATIONSHIP_HIVE_TABLE_COLUMNS)); - - table.addReferredEntity(database); - table.addReferredEntity(sdEntity); - - if (partKeys != null) { - for (AtlasEntity partKey : partKeys) { - table.addReferredEntity(partKey); - } - } - - if (columns != null) { - for (AtlasEntity column : columns) { - table.addReferredEntity(column); - } - } - - table.setEntity(tableEntity); - - return table; - } - - private AtlasEntity toStorageDescEntity(StorageDescriptor storageDesc, String tableQualifiedName, String sdQualifiedName, AtlasObjectId tableId ) throws AtlasHookException { - AtlasEntity ret = new AtlasEntity(HiveDataTypes.HIVE_STORAGEDESC.getName()); - - ret.setRelationshipAttribute(ATTRIBUTE_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(tableId, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC)); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, sdQualifiedName); - ret.setAttribute(ATTRIBUTE_PARAMETERS, storageDesc.getParameters()); - ret.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(storageDesc.getLocation())); - ret.setAttribute(ATTRIBUTE_INPUT_FORMAT, storageDesc.getInputFormat()); - ret.setAttribute(ATTRIBUTE_OUTPUT_FORMAT, storageDesc.getOutputFormat()); - ret.setAttribute(ATTRIBUTE_COMPRESSED, storageDesc.isCompressed()); - ret.setAttribute(ATTRIBUTE_NUM_BUCKETS, storageDesc.getNumBuckets()); - ret.setAttribute(ATTRIBUTE_STORED_AS_SUB_DIRECTORIES, storageDesc.isStoredAsSubDirectories()); - - if (storageDesc.getBucketCols().size() > 0) { - ret.setAttribute(ATTRIBUTE_BUCKET_COLS, storageDesc.getBucketCols()); - } - - if (storageDesc.getSerdeInfo() != null) { - SerDeInfo serdeInfo = storageDesc.getSerdeInfo(); - - LOG.debug("serdeInfo = {}", serdeInfo); - // SkewedInfo skewedInfo = storageDesc.getSkewedInfo(); - - AtlasStruct serdeInfoStruct = new AtlasStruct(HiveDataTypes.HIVE_SERDE.getName()); - - serdeInfoStruct.setAttribute(ATTRIBUTE_NAME, serdeInfo.getName()); - serdeInfoStruct.setAttribute(ATTRIBUTE_SERIALIZATION_LIB, serdeInfo.getSerializationLib()); - serdeInfoStruct.setAttribute(ATTRIBUTE_PARAMETERS, serdeInfo.getParameters()); - - ret.setAttribute(ATTRIBUTE_SERDE_INFO, serdeInfoStruct); - } - - if (CollectionUtils.isNotEmpty(storageDesc.getSortCols())) { - List sortColsStruct = new ArrayList<>(); - - for (Order sortcol : storageDesc.getSortCols()) { - String hiveOrderName = HiveDataTypes.HIVE_ORDER.getName(); - AtlasStruct colStruct = new AtlasStruct(hiveOrderName); - colStruct.setAttribute("col", sortcol.getCol()); - colStruct.setAttribute("order", sortcol.getOrder()); - - sortColsStruct.add(colStruct); - } - - ret.setAttribute(ATTRIBUTE_SORT_COLS, sortColsStruct); - } - - return ret; - } - - private List toColumns(List schemaList, AtlasEntity table, String relationshipType) throws AtlasHookException { - List ret = new ArrayList<>(); - - int columnPosition = 0; - for (FieldSchema fs : schemaList) { - LOG.debug("Processing field {}", fs); - - AtlasEntity column = new AtlasEntity(HiveDataTypes.HIVE_COLUMN.getName()); - - column.setRelationshipAttribute(ATTRIBUTE_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(table, relationshipType)); - column.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getColumnQualifiedName((String) table.getAttribute(ATTRIBUTE_QUALIFIED_NAME), fs.getName())); - column.setAttribute(ATTRIBUTE_NAME, fs.getName()); - column.setAttribute(ATTRIBUTE_OWNER, table.getAttribute(ATTRIBUTE_OWNER)); - column.setAttribute(ATTRIBUTE_COL_TYPE, fs.getType()); - column.setAttribute(ATTRIBUTE_COL_POSITION, columnPosition++); - column.setAttribute(ATTRIBUTE_COMMENT, fs.getComment()); - - ret.add(column); - } - return ret; - } - - /** - * Gets the atlas entity for the database - * @param databaseName database Name - * @param metadataNamespace cluster name - * @return AtlasEntity for database if exists, else null - * @throws Exception - */ - private AtlasEntityWithExtInfo findDatabase(String metadataNamespace, String databaseName) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("Searching Atlas for database {}", databaseName); - } - - String typeName = HiveDataTypes.HIVE_DB.getName(); - - return findEntity(typeName, getDBQualifiedName(metadataNamespace, databaseName), true, true); - } - - /** - * Gets Atlas Entity for the table - * - * @param hiveTable - * @return table entity from Atlas if exists, else null - * @throws Exception - */ - private AtlasEntityWithExtInfo findTableEntity(Table hiveTable) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("Searching Atlas for table {}.{}", hiveTable.getDbName(), hiveTable.getTableName()); - } - - String typeName = HiveDataTypes.HIVE_TABLE.getName(); - String tblQualifiedName = getTableQualifiedName(getMetadataNamespace(), hiveTable.getDbName(), hiveTable.getTableName()); - - return findEntity(typeName, tblQualifiedName, true, true); - } - - private AtlasEntityWithExtInfo findProcessEntity(String qualifiedName) throws Exception{ - if (LOG.isDebugEnabled()) { - LOG.debug("Searching Atlas for process {}", qualifiedName); - } - - String typeName = HiveDataTypes.HIVE_PROCESS.getName(); - - return findEntity(typeName, qualifiedName , true , true); - } - - private AtlasEntityWithExtInfo findEntity(final String typeName, final String qualifiedName , boolean minExtInfo, boolean ignoreRelationship) throws AtlasServiceException { - AtlasEntityWithExtInfo ret = null; - - try { - ret = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, qualifiedName), minExtInfo, ignoreRelationship); - } catch (AtlasServiceException e) { - if(e.getStatus() == ClientResponse.Status.NOT_FOUND) { - return null; - } - - throw e; - } - - return ret; - } - - private String getCreateTableString(Table table, String location){ - String colString = ""; - List colList = table.getAllCols(); - - if (colList != null) { - for (FieldSchema col : colList) { - colString += col.getName() + " " + col.getType() + ","; - } - - if (colList.size() > 0) { - colString = colString.substring(0, colString.length() - 1); - colString = "(" + colString + ")"; - } - } - - String query = "create external table " + table.getTableName() + colString + " location '" + location + "'"; - - return query; - } - - private String lower(String str) { - if (StringUtils.isEmpty(str)) { - return ""; - } - - return str.toLowerCase().trim(); - } - - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hive component belongs - * @param table hive table for which the qualified name is needed - * @return Unique qualified name to identify the Table instance in Atlas. - */ - private static String getTableQualifiedName(String metadataNamespace, Table table) { - return getTableQualifiedName(metadataNamespace, table.getDbName(), table.getTableName(), table.isTemporary()); - } - - private String getHdfsPathQualifiedName(String hdfsPath) { - return String.format("%s@%s", hdfsPath, metadataNamespace); - } - - /** - * Construct the qualified name used to uniquely identify a Database instance in Atlas. - * @param metadataNamespace Name of the cluster to which the Hive component belongs - * @param dbName Name of the Hive database - * @return Unique qualified name to identify the Database instance in Atlas. - */ - public static String getDBQualifiedName(String metadataNamespace, String dbName) { - return String.format("%s@%s", dbName.toLowerCase(), metadataNamespace); - } - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Name of the cluster to which the Hive component belongs - * @param dbName Name of the Hive database to which the Table belongs - * @param tableName Name of the Hive table - * @param isTemporaryTable is this a temporary table - * @return Unique qualified name to identify the Table instance in Atlas. - */ - public static String getTableQualifiedName(String metadataNamespace, String dbName, String tableName, boolean isTemporaryTable) { - String tableTempName = tableName; - - if (isTemporaryTable) { - if (SessionState.get() != null && SessionState.get().getSessionId() != null) { - tableTempName = tableName + TEMP_TABLE_PREFIX + SessionState.get().getSessionId(); - } else { - tableTempName = tableName + TEMP_TABLE_PREFIX + RandomStringUtils.random(10); - } - } - - return String.format("%s.%s@%s", dbName.toLowerCase(), tableTempName.toLowerCase(), metadataNamespace); - } - - public static String getTableProcessQualifiedName(String metadataNamespace, Table table) { - String tableQualifiedName = getTableQualifiedName(metadataNamespace, table); - long createdTime = getTableCreatedTime(table); - - return tableQualifiedName + SEP + createdTime; - } - - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hive component belongs - * @param dbName Name of the Hive database to which the Table belongs - * @param tableName Name of the Hive table - * @return Unique qualified name to identify the Table instance in Atlas. - */ - public static String getTableQualifiedName(String metadataNamespace, String dbName, String tableName) { - return getTableQualifiedName(metadataNamespace, dbName, tableName, false); - } - public static String getStorageDescQFName(String tableQualifiedName) { - return tableQualifiedName + "_storage"; - } - - public static String getColumnQualifiedName(final String tableQualifiedName, final String colName) { - final String[] parts = tableQualifiedName.split("@"); - final String tableName = parts[0]; - final String metadataNamespace = parts[1]; - - return String.format("%s.%s@%s", tableName, colName.toLowerCase(), metadataNamespace); - } - - public static long getTableCreatedTime(Table table) { - return table.getTTable().getCreateTime() * MILLIS_CONVERT_FACTOR; - } - - private void clearRelationshipAttributes(AtlasEntitiesWithExtInfo entities) { - if (entities != null) { - if (entities.getEntities() != null) { - for (AtlasEntity entity : entities.getEntities()) { - clearRelationshipAttributes(entity);; - } - } - - if (entities.getReferredEntities() != null) { - clearRelationshipAttributes(entities.getReferredEntities().values()); - } - } - } - - private void clearRelationshipAttributes(AtlasEntityWithExtInfo entity) { - if (entity != null) { - clearRelationshipAttributes(entity.getEntity()); - - if (entity.getReferredEntities() != null) { - clearRelationshipAttributes(entity.getReferredEntities().values()); - } - } - } - - private void clearRelationshipAttributes(Collection entities) { - if (entities != null) { - for (AtlasEntity entity : entities) { - clearRelationshipAttributes(entity); - } - } - } - - private void clearRelationshipAttributes(AtlasEntity entity) { - if (entity != null && entity.getRelationshipAttributes() != null) { - entity.getRelationshipAttributes().clear(); - } - } - - private boolean isTableWithDatabaseName(String tableName) { - boolean ret = false; - if (tableName.contains(".")) { - ret = true; - } - return ret; - } - - private List getAllDatabaseInCluster() throws AtlasServiceException { - - List entities = new ArrayList<>(); - final int pageSize = pageLimit; - - SearchParameters.FilterCriteria fc = new SearchParameters.FilterCriteria(); - fc.setAttributeName(ATTRIBUTE_CLUSTER_NAME); - fc.setAttributeValue(metadataNamespace); - fc.setOperator(SearchParameters.Operator.EQ); - - for (int i = 0; ; i++) { - int offset = pageSize * i; - LOG.info("Retrieving databases: offset={}, pageSize={}", offset, pageSize); - - AtlasSearchResult searchResult = atlasClientV2.basicSearch(HIVE_TYPE_DB, fc,null, null, true, pageSize, offset); - - List entityHeaders = searchResult == null ? null : searchResult.getEntities(); - int dbCount = entityHeaders == null ? 0 : entityHeaders.size(); - - LOG.info("Retrieved {} databases of {} cluster", dbCount, metadataNamespace); - - if (dbCount > 0) { - entities.addAll(entityHeaders); - } - - if (dbCount < pageSize) { // last page - break; - } - } - - return entities; - } - - private List getAllTablesInDb(String databaseGuid) throws AtlasServiceException { - - List entities = new ArrayList<>(); - final int pageSize = pageLimit; - - for (int i = 0; ; i++) { - int offset = pageSize * i; - LOG.info("Retrieving tables: offset={}, pageSize={}", offset, pageSize); - - AtlasSearchResult searchResult = atlasClientV2.relationshipSearch(databaseGuid, HIVE_TABLE_DB_EDGE_LABEL, null, null, true, pageSize, offset); - - List entityHeaders = searchResult == null ? null : searchResult.getEntities(); - int tableCount = entityHeaders == null ? 0 : entityHeaders.size(); - - LOG.info("Retrieved {} tables of {} database", tableCount, databaseGuid); - - if (tableCount > 0) { - entities.addAll(entityHeaders); - } - - if (tableCount < pageSize) { // last page - break; - } - } - - return entities; - } - - public String getHiveDatabaseName(String qualifiedName) { - - if (StringUtils.isNotEmpty(qualifiedName)) { - String[] split = qualifiedName.split("@"); - if (split.length > 0) { - return split[0]; - } - } - return null; - } - - - public String getHiveTableName(String qualifiedName, boolean isTemporary) { - - if (StringUtils.isNotEmpty(qualifiedName)) { - String tableName = StringUtils.substringBetween(qualifiedName, ".", "@"); - if (!isTemporary) { - return tableName; - } else { - if (StringUtils.isNotEmpty(tableName)) { - String[] splitTemp = tableName.split(TEMP_TABLE_PREFIX); - if (splitTemp.length > 0) { - return splitTemp[0]; - } - } - } - } - return null; - } - - private void deleteByGuid(List guidTodelete) throws AtlasServiceException { - - if (CollectionUtils.isNotEmpty(guidTodelete)) { - - for (String guid : guidTodelete) { - EntityMutationResponse response = atlasClientV2.deleteEntityByGuid(guid); - - if (response.getDeletedEntities().size() < 1) { - LOG.info("Entity with guid : {} is not deleted", guid); - } else { - LOG.info("Entity with guid : {} is deleted", guid); - } - } - } else { - LOG.info("No Entity to delete from Atlas"); - } - } - - public void deleteEntitiesForNonExistingHiveMetadata(boolean failOnError) throws Exception { - - //fetch databases from Atlas - List dbs = null; - try { - dbs = getAllDatabaseInCluster(); - LOG.info("Total Databases in cluster {} : {} ", metadataNamespace, dbs.size()); - } catch (AtlasServiceException e) { - LOG.error("Failed to retrieve database entities for cluster {} from Atlas", metadataNamespace, e); - if (failOnError) { - throw e; - } - } - - if (CollectionUtils.isNotEmpty(dbs)) { - //iterate all dbs to check if exists in hive - for (AtlasEntityHeader db : dbs) { - - String dbGuid = db.getGuid(); - String hiveDbName = getHiveDatabaseName((String) db.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - - if (StringUtils.isEmpty(hiveDbName)) { - LOG.error("Failed to get database from qualifiedName: {}, guid: {} ", db.getAttribute(ATTRIBUTE_QUALIFIED_NAME), dbGuid); - continue; - } - - List tables; - try { - tables = getAllTablesInDb(dbGuid); - LOG.info("Total Tables in database {} : {} ", hiveDbName, tables.size()); - } catch (AtlasServiceException e) { - LOG.error("Failed to retrieve table entities for database {} from Atlas", hiveDbName, e); - if (failOnError) { - throw e; - } - continue; - } - - List guidsToDelete = new ArrayList<>(); - if (!hiveClient.databaseExists(hiveDbName)) { - - //table guids - if (CollectionUtils.isNotEmpty(tables)) { - for (AtlasEntityHeader table : tables) { - guidsToDelete.add(table.getGuid()); - } - } - - //db guid - guidsToDelete.add(db.getGuid()); - LOG.info("Added database {}.{} and its {} tables to delete", metadataNamespace, hiveDbName, tables.size()); - - } else { - //iterate all table of db to check if it exists - if (CollectionUtils.isNotEmpty(tables)) { - for (AtlasEntityHeader table : tables) { - String hiveTableName = getHiveTableName((String) table.getAttribute(ATTRIBUTE_QUALIFIED_NAME), true); - - if (StringUtils.isEmpty(hiveTableName)) { - LOG.error("Failed to get table from qualifiedName: {}, guid: {} ", table.getAttribute(ATTRIBUTE_QUALIFIED_NAME), table.getGuid()); - continue; - } - - try { - hiveClient.getTable(hiveDbName, hiveTableName, true); - } catch (InvalidTableException e) { //table doesn't exists - LOG.info("Added table {}.{} to delete", hiveDbName, hiveTableName); - - guidsToDelete.add(table.getGuid()); - } catch (HiveException e) { - LOG.error("Failed to get table {}.{} from Hive", hiveDbName, hiveTableName, e); - - if (failOnError) { - throw e; - } - } - } - } - } - - //delete entities - if (CollectionUtils.isNotEmpty(guidsToDelete)) { - try { - deleteByGuid(guidsToDelete); - } catch (AtlasServiceException e) { - LOG.error("Failed to delete Atlas entities for database {}", hiveDbName, e); - - if (failOnError) { - throw e; - } - } - - } - } - - } else { - LOG.info("No database found in service."); - } - - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeV2.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeV2.java deleted file mode 100644 index 0627c0e095..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeV2.java +++ /dev/null @@ -1,1036 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.bridge; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.impexp.AtlasImportRequest; -import org.apache.atlas.model.impexp.AtlasImportResult; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.hive.hook.events.BaseHiveEvent; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.hook.AtlasHookException; -import org.apache.atlas.utils.AtlasPathExtractorUtil; -import org.apache.atlas.utils.HdfsNameServiceResolver; -import org.apache.atlas.utils.AtlasConfigurationUtil; -import org.apache.atlas.utils.PathExtractorContext; -import org.apache.atlas.utils.LruCache; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.commons.cli.MissingArgumentException; -import org.apache.commons.collections.CollectionUtils; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.OutputStream; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.*; -import java.util.stream.Collectors; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.*; - -/** - * A Bridge Utility that imports metadata into zip file from the Hive Meta Store - * which can be exported at Atlas - */ -public class HiveMetaStoreBridgeV2 { - private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreBridgeV2.class); - - private static final String OPTION_DATABASE_SHORT = "d"; - private static final String OPTION_TABLE_SHORT = "t"; - private static final String OPTION_IMPORT_DATA_FILE_SHORT = "f"; - private static final String OPTION_OUTPUT_FILEPATH_SHORT = "o"; - private static final String OPTION_IGNORE_BULK_IMPORT_SHORT = "i"; - - public static final String CONF_PREFIX = "atlas.hook.hive."; - public static final String HDFS_PATH_CONVERT_TO_LOWER_CASE = CONF_PREFIX + "hdfs_path.convert_to_lowercase"; - public static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION = CONF_PREFIX + "aws_s3.atlas.model.version"; - - public static final String CLUSTER_NAME_KEY = "atlas.cluster.name"; - public static final String HIVE_USERNAME = "atlas.hook.hive.default.username"; - public static final String HIVE_METADATA_NAMESPACE = "atlas.metadata.namespace"; - public static final String DEFAULT_CLUSTER_NAME = "primary"; - public static final String TEMP_TABLE_PREFIX = "_temp-"; - public static final String SEP = ":".intern(); - public static final String DEFAULT_METASTORE_CATALOG = "hive"; - public static final String HOOK_HIVE_PAGE_LIMIT = CONF_PREFIX + "page.limit"; - - private static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2 = "v2"; - private static final String ZIP_FILE_COMMENT_FORMAT = "{\"entitiesCount\":%d, \"total\":%d}"; - private static final int DEFAULT_PAGE_LIMIT = 10000; - private static final String DEFAULT_ZIP_FILE_NAME = "import-hive-output.zip"; - private static final String ZIP_ENTRY_ENTITIES = "entities.json"; - private static final String TYPES_DEF_JSON = "atlas-typesdef.json"; - - private static final String JSON_ARRAY_START = "["; - private static final String JSON_COMMA = ","; - private static final String JSON_EMPTY_OBJECT = "{}"; - private static final String JSON_ARRAY_END = "]"; - - private static int pageLimit = DEFAULT_PAGE_LIMIT; - private String awsS3AtlasModelVersion = null; - - private final String metadataNamespace; - private final Hive hiveClient; - private final AtlasClientV2 atlasClientV2; - private final boolean convertHdfsPathToLowerCase; - - private ZipOutputStream zipOutputStream; - private String outZipFileName; - private int totalProcessedEntities = 0; - - private final Map entityLRUCache = new LruCache<>(10000, 0); - private final Map hiveTablesAndAtlasEntity = new HashMap<>(); - private final Map dbEntities = new HashMap<>(); - private final List> databaseAndTableListToImport = new ArrayList<>(); - private final Map qualifiedNameGuidMap = new HashMap<>(); - - /** - * Construct a HiveMetaStoreBridgeV2. - * @param hiveConf {@link HiveConf} for Hive component in the cluster - */ - public HiveMetaStoreBridgeV2(Configuration atlasProperties, HiveConf hiveConf, AtlasClientV2 atlasClientV2) throws Exception { - this.metadataNamespace = getMetadataNamespace(atlasProperties); - this.hiveClient = Hive.get(hiveConf); - this.atlasClientV2 = atlasClientV2; - this.convertHdfsPathToLowerCase = atlasProperties.getBoolean(HDFS_PATH_CONVERT_TO_LOWER_CASE, false); - this.awsS3AtlasModelVersion = atlasProperties.getString(HOOK_AWS_S3_ATLAS_MODEL_VERSION, HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2); - - if (atlasProperties != null) { - pageLimit = atlasProperties.getInteger(HOOK_HIVE_PAGE_LIMIT, DEFAULT_PAGE_LIMIT); - } - } - - public boolean exportDataToZipAndRunAtlasImport(CommandLine cmd) throws MissingArgumentException, IOException, HiveException, AtlasBaseException { - boolean ret = true; - boolean failOnError = cmd.hasOption("failOnError"); - - String databaseToImport = cmd.getOptionValue(OPTION_DATABASE_SHORT); - String tableToImport = cmd.getOptionValue(OPTION_TABLE_SHORT); - String importDataFile = cmd.getOptionValue(OPTION_IMPORT_DATA_FILE_SHORT); - String outputFileOrPath = cmd.getOptionValue(OPTION_OUTPUT_FILEPATH_SHORT); - - boolean ignoreBulkImport = cmd.hasOption(OPTION_IGNORE_BULK_IMPORT_SHORT); - - validateOutputFileOrPath(outputFileOrPath); - - try { - initializeZipStream(); - - if (isValidImportDataFile(importDataFile)) { - File f = new File(importDataFile); - - BufferedReader br = new BufferedReader(new FileReader(f)); - String line = null; - - while ((line = br.readLine()) != null) { - String val[] = line.split(":"); - - if (ArrayUtils.isNotEmpty(val)) { - databaseToImport = val[0]; - - if (val.length > 1) { - tableToImport = val[1]; - } else { - tableToImport = ""; - } - - importHiveDatabases(databaseToImport, tableToImport, failOnError); - } - } - } else { - importHiveDatabases(databaseToImport, tableToImport, failOnError); - } - - importHiveTables(failOnError); - importHiveColumns(failOnError); - } finally { - endWritingAndZipStream(); - } - - if (!ignoreBulkImport) { - runAtlasImport(); - } - - return ret; - } - - private void validateOutputFileOrPath(String outputFileOrPath) throws MissingArgumentException { - if (StringUtils.isBlank(outputFileOrPath)) { - throw new MissingArgumentException("Output Path/File can't be empty"); - } - - File fileOrDirToImport = new File(outputFileOrPath); - if (fileOrDirToImport.exists()) { - if (fileOrDirToImport.isDirectory()) { - this.outZipFileName = outputFileOrPath + File.separator + DEFAULT_ZIP_FILE_NAME; - LOG.info("The default output zip file {} will be created at {}", DEFAULT_ZIP_FILE_NAME, outputFileOrPath); - } else { - throw new MissingArgumentException("output file: " + outputFileOrPath + " already present"); - } - } else if (fileOrDirToImport.getParentFile().isDirectory() && outputFileOrPath.endsWith(".zip")) { - LOG.info("The mentioned output zip file {} will be created", outputFileOrPath); - this.outZipFileName = outputFileOrPath; - } else { - throw new MissingArgumentException("Invalid File/Path"); - } - } - - private boolean isValidImportDataFile(String importDataFile) throws MissingArgumentException { - boolean ret = false; - if (StringUtils.isNotBlank(importDataFile)) { - File dataFile = new File(importDataFile); - - if (!dataFile.exists() || !dataFile.canRead()) { - throw new MissingArgumentException("Invalid import data file"); - } - ret = true; - } - - return ret; - } - - private void initializeZipStream() throws IOException, AtlasBaseException { - this.zipOutputStream = new ZipOutputStream(getOutputStream(this.outZipFileName)); - - storeTypesDefToZip(new AtlasTypesDef()); - - startWritingEntitiesToZip(); - } - - private void storeTypesDefToZip(AtlasTypesDef typesDef) throws AtlasBaseException { - String jsonData = AtlasType.toJson(typesDef); - saveToZip(TYPES_DEF_JSON, jsonData); - } - - private void saveToZip(String fileName, String jsonData) throws AtlasBaseException { - try { - ZipEntry e = new ZipEntry(fileName); - zipOutputStream.putNextEntry(e); - writeBytes(jsonData); - zipOutputStream.closeEntry(); - } catch (IOException e) { - throw new AtlasBaseException(String.format("Error writing file %s.", fileName), e); - } - } - - private void startWritingEntitiesToZip() throws IOException { - zipOutputStream.putNextEntry(new ZipEntry(ZIP_ENTRY_ENTITIES)); - writeBytes(JSON_ARRAY_START); - } - - private String getDatabaseToImport(String TableWithDatabase) { - String ret = null; - String val[] = TableWithDatabase.split("\\."); - if (val.length > 1) { - ret = val[0]; - } - return ret; - } - - private String getTableToImport(String TableWithDatabase) { - String ret = null; - String val[] = TableWithDatabase.split("\\."); - if (val.length > 1) { - ret = val[1]; - } - return ret; - } - - private void importHiveDatabases(String databaseToImport, String tableWithDatabaseToImport, boolean failOnError) throws HiveException, AtlasBaseException { - LOG.info("Importing Hive Databases"); - - List databaseNames = null; - - if (StringUtils.isEmpty(databaseToImport) && StringUtils.isNotEmpty(tableWithDatabaseToImport)) { - if (isTableWithDatabaseName(tableWithDatabaseToImport)) { - databaseToImport = getDatabaseToImport(tableWithDatabaseToImport); - tableWithDatabaseToImport = getTableToImport(tableWithDatabaseToImport); - } - } - - if (StringUtils.isEmpty(databaseToImport)) { - //when database to import is empty, import all - databaseNames = hiveClient.getAllDatabases(); - } else { - //when database to import has some value then, import that db and all table under it. - databaseNames = hiveClient.getDatabasesByPattern(databaseToImport); - } - - if (!CollectionUtils.isEmpty(databaseNames)) { - LOG.info("Found {} databases", databaseNames.size()); - for (String databaseName : databaseNames) { - try { - if (!dbEntities.containsKey(databaseName)) { - LOG.info("Importing Hive Database {}", databaseName); - AtlasEntityWithExtInfo dbEntity = writeDatabase(databaseName); - if (dbEntity != null) { - dbEntities.put(databaseName, dbEntity.getEntity()); - } - } - databaseAndTableListToImport.add(Collections.singletonMap(databaseName, tableWithDatabaseToImport)); - } catch (IOException e) { - LOG.error("Import failed for hive database {}", databaseName, e); - - if (failOnError) { - throw new AtlasBaseException(e.getMessage(), e); - } - } - } - } else { - LOG.error("No database found"); - if (failOnError) { - throw new AtlasBaseException("No database found"); - } - } - } - - private void writeEntity(AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo) throws IOException { - if (MapUtils.isNotEmpty(entityWithExtInfo.getReferredEntities())) { - Iterator> itr = entityWithExtInfo.getReferredEntities().entrySet().iterator(); - while (itr.hasNext()) { - Map.Entry eachEntity = itr.next(); - if (eachEntity.getValue().getTypeName().equalsIgnoreCase(HiveDataTypes.HIVE_DB.getName())) { - itr.remove(); - } - } - } - - if (!entityLRUCache.containsKey(entityWithExtInfo.getEntity().getGuid())) { - entityLRUCache.put(entityWithExtInfo.getEntity().getGuid(), entityWithExtInfo); - writeBytes(AtlasType.toJson(entityWithExtInfo) + JSON_COMMA); - } - totalProcessedEntities++; - } - - private void endWritingAndZipStream() throws IOException { - writeBytes(JSON_EMPTY_OBJECT); - writeBytes(JSON_ARRAY_END); - setStreamSize(totalProcessedEntities); - close(); - } - - private void flush() { - try { - zipOutputStream.flush(); - } catch (IOException e) { - LOG.error("Error: Flush: ", e); - } - } - - private void close() throws IOException { - zipOutputStream.flush(); - zipOutputStream.closeEntry(); - zipOutputStream.close(); - } - - private void writeBytes(String payload) throws IOException { - zipOutputStream.write(payload.getBytes()); - } - - private OutputStream getOutputStream(String fileToWrite) throws IOException { - return FileUtils.openOutputStream(new File(fileToWrite)); - } - - public String getMetadataNamespace(Configuration config) { - return AtlasConfigurationUtil.getRecentString(config, HIVE_METADATA_NAMESPACE, getClusterName(config)); - } - - private String getClusterName(Configuration config) { - return config.getString(CLUSTER_NAME_KEY, DEFAULT_CLUSTER_NAME); - } - - public String getMetadataNamespace() { - return metadataNamespace; - } - - public boolean isConvertHdfsPathToLowerCase() { - return convertHdfsPathToLowerCase; - } - - /** - * Imports Hive tables if databaseAndTableListToImport is populated - * @param failOnError - * @throws Exception - */ - public void importHiveTables(boolean failOnError) throws HiveException, AtlasBaseException { - LOG.info("Importing Hive Tables"); - - int tablesImported = 0; - - if (CollectionUtils.isNotEmpty(databaseAndTableListToImport) && MapUtils.isNotEmpty(dbEntities)) { - for (Map eachEntry : databaseAndTableListToImport) { - final List
tableObjects; - - String databaseName = eachEntry.keySet().iterator().next(); - - if (StringUtils.isEmpty(eachEntry.values().iterator().next())) { - tableObjects = hiveClient.getAllTableObjects(databaseName); - - populateQualifiedNameGuidMap(HiveDataTypes.HIVE_DB.getName(), (String) dbEntities.get(databaseName).getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } else { - List tableNames = hiveClient.getTablesByPattern(databaseName, eachEntry.values().iterator().next()); - tableObjects = new ArrayList<>(); - - for (String tableName : tableNames) { - Table table = hiveClient.getTable(databaseName, tableName); - tableObjects.add(table); - populateQualifiedNameGuidMap(HiveDataTypes.HIVE_TABLE.getName(), getTableQualifiedName(metadataNamespace, table)); - } - } - - if (!CollectionUtils.isEmpty(tableObjects)) { - LOG.info("Found {} tables to import in database {}", tableObjects.size(), databaseName); - - try { - for (Table table : tableObjects) { - int imported = importTable(dbEntities.get(databaseName), table, failOnError); - - tablesImported += imported; - } - } finally { - if (tablesImported == tableObjects.size()) { - LOG.info("Successfully imported {} tables from database {}", tablesImported, databaseName); - } else { - LOG.error("Imported {} of {} tables from database {}. Please check logs for errors during import", - tablesImported, tableObjects.size(), databaseName); - } - } - } else { - LOG.error("No tables to import in database {}", databaseName); - if (failOnError) { - throw new AtlasBaseException("No tables to import in database - " + databaseName); - } - } - } - } - - dbEntities.clear(); - } - - private void populateQualifiedNameGuidMap(String typeName, String qualifiedName) { - try { - AtlasEntitiesWithExtInfo entitiesWithExtInfo = atlasClientV2.getEntitiesByAttribute(typeName, Collections.singletonList(Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, qualifiedName)), true, false); - - if (entitiesWithExtInfo != null && entitiesWithExtInfo.getEntities() != null) { - for (AtlasEntity entity : entitiesWithExtInfo.getEntities()) { - qualifiedNameGuidMap.put((String) entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME), entity.getGuid()); - - for(Map.Entry eachEntry : entitiesWithExtInfo.getReferredEntities().entrySet()) { - qualifiedNameGuidMap.put((String) eachEntry.getValue().getAttribute(ATTRIBUTE_QUALIFIED_NAME), eachEntry.getKey()); - } - - if (typeName.equals(HiveDataTypes.HIVE_DB.getName())) { - for (String eachRelatedGuid : getAllRelatedGuids(entity)) { - AtlasEntityWithExtInfo relatedEntity = atlasClientV2.getEntityByGuid(eachRelatedGuid, true, false); - - qualifiedNameGuidMap.put((String) relatedEntity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), relatedEntity.getEntity().getGuid()); - for (Map.Entry eachEntry : relatedEntity.getReferredEntities().entrySet()) { - qualifiedNameGuidMap.put((String) eachEntry.getValue().getAttribute(ATTRIBUTE_QUALIFIED_NAME), eachEntry.getKey()); - } - } - } - } - } - } catch (AtlasServiceException e) { - LOG.info("Unable to load the related entities for type {} and qualified name {} from Atlas", typeName, qualifiedName, e); - } - } - - private Set getAllRelatedGuids(AtlasEntity entity) { - Set relGuidsSet = new HashSet<>(); - - for (Object o : entity.getRelationshipAttributes().values()) { - if (o instanceof AtlasObjectId) { - relGuidsSet.add(((AtlasObjectId) o).getGuid()); - } else if (o instanceof List) { - for (Object id : (List) o) { - if (id instanceof AtlasObjectId) { - relGuidsSet.add(((AtlasObjectId) id).getGuid()); - } - if (id instanceof Map) { - relGuidsSet.add((String) ((Map) id).get("guid")); - } - } - } - } - - return relGuidsSet; - } - - public void importHiveColumns(boolean failOnError) throws AtlasBaseException { - LOG.info("Importing Hive Columns"); - - if (MapUtils.isEmpty(hiveTablesAndAtlasEntity)) { - if (LOG.isDebugEnabled()) { - LOG.debug("No hive table present to import columns"); - } - - return; - } - - for (Map.Entry eachTable : hiveTablesAndAtlasEntity.entrySet()) { - int columnsImported = 0; - List columnEntities = new ArrayList<>(); - - try { - List partKeys = toColumns(eachTable.getKey().getPartitionKeys(), eachTable.getValue(), RELATIONSHIP_HIVE_TABLE_PART_KEYS); - List columns = toColumns(eachTable.getKey().getCols(), eachTable.getValue(), RELATIONSHIP_HIVE_TABLE_COLUMNS); - - partKeys.stream().collect(Collectors.toCollection(() -> columnEntities)); - columns.stream().collect(Collectors.toCollection(() -> columnEntities)); - - for (AtlasEntity eachColumnEntity : columnEntities) { - writeEntityToZip(new AtlasEntityWithExtInfo(eachColumnEntity)); - columnsImported++; - } - } catch (IOException e) { - LOG.error("Column Import failed for hive table {}", eachTable.getValue().getAttribute(ATTRIBUTE_QUALIFIED_NAME), e); - - if (failOnError) { - throw new AtlasBaseException(e.getMessage(), e); - } - } finally { - if (columnsImported == columnEntities.size()) { - LOG.info("Successfully imported {} columns for table {}", columnsImported, eachTable.getValue().getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } else { - LOG.error("Imported {} of {} columns for table {}. Please check logs for errors during import", columnsImported, columnEntities.size(), eachTable.getValue().getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } - } - } - - } - - private void runAtlasImport() { - AtlasImportRequest request = new AtlasImportRequest(); - request.setOption(AtlasImportRequest.UPDATE_TYPE_DEFINITION_KEY, "false"); - request.setOption(AtlasImportRequest.OPTION_KEY_FORMAT, AtlasImportRequest.OPTION_KEY_FORMAT_ZIP_DIRECT); - - try { - AtlasImportResult importResult = atlasClientV2.importData(request, this.outZipFileName); - - if (importResult.getOperationStatus() == AtlasImportResult.OperationStatus.SUCCESS) { - LOG.info("Successfully imported the zip file {} at Atlas and imported {} entities. Number of entities to be imported {}.", this.outZipFileName, importResult.getProcessedEntities().size(), totalProcessedEntities); - } else { - LOG.error("Failed to import or get the status of import for the zip file {} at Atlas. Number of entities to be imported {}.", this.outZipFileName, totalProcessedEntities); - } - } catch (AtlasServiceException e) { - LOG.error("Failed to import or get the status of import for the zip file {} at Atlas. Number of entities to be imported {}.", this.outZipFileName, totalProcessedEntities, e); - } - } - - public int importTable(AtlasEntity dbEntity, Table table, final boolean failOnError) throws AtlasBaseException { - try { - AtlasEntityWithExtInfo tableEntity = writeTable(dbEntity, table); - - hiveTablesAndAtlasEntity.put(table, tableEntity.getEntity()); - - if (table.getTableType() == TableType.EXTERNAL_TABLE) { - String processQualifiedName = getTableProcessQualifiedName(metadataNamespace, table); - String tableLocationString = isConvertHdfsPathToLowerCase() ? lower(table.getDataLocation().toString()) : table.getDataLocation().toString(); - Path location = table.getDataLocation(); - String query = getCreateTableString(table, tableLocationString); - - PathExtractorContext pathExtractorCtx = new PathExtractorContext(getMetadataNamespace(), isConvertHdfsPathToLowerCase(), awsS3AtlasModelVersion); - AtlasEntityWithExtInfo entityWithExtInfo = AtlasPathExtractorUtil.getPathEntity(location, pathExtractorCtx); - AtlasEntity pathInst = entityWithExtInfo.getEntity(); - AtlasEntity tableInst = tableEntity.getEntity(); - AtlasEntity processInst = new AtlasEntity(HiveDataTypes.HIVE_PROCESS.getName()); - - long now = System.currentTimeMillis(); - - processInst.setGuid(getGuid(processQualifiedName)); - processInst.setAttribute(ATTRIBUTE_QUALIFIED_NAME, processQualifiedName); - processInst.setAttribute(ATTRIBUTE_NAME, query); - processInst.setAttribute(ATTRIBUTE_CLUSTER_NAME, metadataNamespace); - processInst.setRelationshipAttribute(ATTRIBUTE_INPUTS, Collections.singletonList(AtlasTypeUtil.getAtlasRelatedObjectId(pathInst, RELATIONSHIP_DATASET_PROCESS_INPUTS))); - processInst.setRelationshipAttribute(ATTRIBUTE_OUTPUTS, Collections.singletonList(AtlasTypeUtil.getAtlasRelatedObjectId(tableInst, RELATIONSHIP_PROCESS_DATASET_OUTPUTS))); - String userName = table.getOwner(); - if (StringUtils.isEmpty(userName)) { - userName = ApplicationProperties.get().getString(HIVE_USERNAME, "hive"); - } - processInst.setAttribute(ATTRIBUTE_USER_NAME, userName); - processInst.setAttribute(ATTRIBUTE_START_TIME, now); - processInst.setAttribute(ATTRIBUTE_END_TIME, now); - processInst.setAttribute(ATTRIBUTE_OPERATION_TYPE, "CREATETABLE"); - processInst.setAttribute(ATTRIBUTE_QUERY_TEXT, query); - processInst.setAttribute(ATTRIBUTE_QUERY_ID, query); - processInst.setAttribute(ATTRIBUTE_QUERY_PLAN, "{}"); - processInst.setAttribute(ATTRIBUTE_RECENT_QUERIES, Collections.singletonList(query)); - - AtlasEntitiesWithExtInfo createTableProcess = new AtlasEntitiesWithExtInfo(); - - createTableProcess.addEntity(processInst); - - if (pathExtractorCtx.getKnownEntities() != null) { - pathExtractorCtx.getKnownEntities().values().forEach(entity -> createTableProcess.addEntity(entity)); - } else { - createTableProcess.addEntity(pathInst); - } - - writeEntitiesToZip(createTableProcess); - } - - return 1; - } catch (Exception e) { - LOG.error("Import failed for hive_table {}", table.getTableName(), e); - - if (failOnError) { - throw new AtlasBaseException(e.getMessage(), e); - } - - return 0; - } - } - - /** - * Write db entity - * @param databaseName - * @return - * @throws Exception - */ - private AtlasEntityWithExtInfo writeDatabase(String databaseName) throws HiveException, IOException { - AtlasEntityWithExtInfo ret = null; - Database db = hiveClient.getDatabase(databaseName); - - if (db != null) { - ret = new AtlasEntityWithExtInfo(toDbEntity(db)); - writeEntityToZip(ret); - } - - return ret; - } - - private AtlasEntityWithExtInfo writeTable(AtlasEntity dbEntity, Table table) throws AtlasHookException { - try { - AtlasEntityWithExtInfo tableEntity = toTableEntity(dbEntity, table); - writeEntityToZip(tableEntity); - - return tableEntity; - } catch (Exception e) { - throw new AtlasHookException("HiveMetaStoreBridgeV2.registerTable() failed.", e); - } - } - - /** - * Write an entity to Zip file - * @param entity - * @return - * @throws Exception - */ - private void writeEntityToZip(AtlasEntityWithExtInfo entity) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("Writing {} entity: {}", entity.getEntity().getTypeName(), entity); - } - - writeEntity(entity); - clearRelationshipAttributes(entity.getEntity()); - flush(); - } - - /** - * Registers an entity in atlas - * @param entities - * @return - * @throws Exception - */ - private void writeEntitiesToZip(AtlasEntitiesWithExtInfo entities) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("Writing {} entities: {}", entities.getEntities().size(), entities); - } - - for (AtlasEntity entity : entities.getEntities()) { - writeEntity(new AtlasEntityWithExtInfo(entity)); - } - - flush(); - clearRelationshipAttributes(entities); - } - - /** - * Create a Hive Database entity - * @param hiveDB The Hive {@link Database} object from which to map properties - * @return new Hive Database AtlasEntity - * @throws HiveException - */ - private AtlasEntity toDbEntity(Database hiveDB) { - return toDbEntity(hiveDB, null); - } - - private AtlasEntity toDbEntity(Database hiveDB, AtlasEntity dbEntity) { - if (dbEntity == null) { - dbEntity = new AtlasEntity(HiveDataTypes.HIVE_DB.getName()); - } - - String dbName = getDatabaseName(hiveDB); - - String qualifiedName = getDBQualifiedName(metadataNamespace, dbName); - dbEntity.setAttribute(ATTRIBUTE_QUALIFIED_NAME, qualifiedName); - - dbEntity.setGuid(getGuid(true, qualifiedName)); - - dbEntity.setAttribute(ATTRIBUTE_NAME, dbName); - dbEntity.setAttribute(ATTRIBUTE_DESCRIPTION, hiveDB.getDescription()); - dbEntity.setAttribute(ATTRIBUTE_OWNER, hiveDB.getOwnerName()); - - dbEntity.setAttribute(ATTRIBUTE_CLUSTER_NAME, metadataNamespace); - dbEntity.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(hiveDB.getLocationUri())); - dbEntity.setAttribute(ATTRIBUTE_PARAMETERS, hiveDB.getParameters()); - - if (hiveDB.getOwnerType() != null) { - dbEntity.setAttribute(ATTRIBUTE_OWNER_TYPE, OWNER_TYPE_TO_ENUM_VALUE.get(hiveDB.getOwnerType().getValue())); - } - - return dbEntity; - } - - private String getDBGuidFromAtlas(String dBQualifiedName) { - String guid = null; - try { - guid = atlasClientV2.getEntityHeaderByAttribute(HiveDataTypes.HIVE_DB.getName(), Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, dBQualifiedName)).getGuid(); - } catch (AtlasServiceException e) { - LOG.warn("Failed to get DB guid from Atlas with qualified name {}", dBQualifiedName, e); - } - return guid; - } - - public static String getDatabaseName(Database hiveDB) { - String dbName = hiveDB.getName().toLowerCase(); - String catalogName = hiveDB.getCatalogName() != null ? hiveDB.getCatalogName().toLowerCase() : null; - - if (StringUtils.isNotEmpty(catalogName) && !StringUtils.equals(catalogName, DEFAULT_METASTORE_CATALOG)) { - dbName = catalogName + SEP + dbName; - } - - return dbName; - } - - /** - * Create a new table instance in Atlas - * @param database AtlasEntity for Hive {@link AtlasEntity} to which this table belongs - * @param hiveTable reference to the Hive {@link Table} from which to map properties - * @return Newly created Hive AtlasEntity - * @throws Exception - */ - private AtlasEntityWithExtInfo toTableEntity(AtlasEntity database, final Table hiveTable) throws AtlasHookException { - AtlasEntityWithExtInfo table = new AtlasEntityWithExtInfo(new AtlasEntity(HiveDataTypes.HIVE_TABLE.getName())); - - AtlasEntity tableEntity = table.getEntity(); - String tableQualifiedName = getTableQualifiedName(metadataNamespace, hiveTable); - long createTime = BaseHiveEvent.getTableCreateTime(hiveTable); - long lastAccessTime = hiveTable.getLastAccessTime() > 0 ? hiveTable.getLastAccessTime() : createTime; - - tableEntity.setGuid(getGuid(tableQualifiedName)); - tableEntity.setRelationshipAttribute(ATTRIBUTE_DB, AtlasTypeUtil.getAtlasRelatedObjectId(database, RELATIONSHIP_HIVE_TABLE_DB)); - tableEntity.setAttribute(ATTRIBUTE_QUALIFIED_NAME, tableQualifiedName); - tableEntity.setAttribute(ATTRIBUTE_NAME, hiveTable.getTableName().toLowerCase()); - tableEntity.setAttribute(ATTRIBUTE_OWNER, hiveTable.getOwner()); - - tableEntity.setAttribute(ATTRIBUTE_CREATE_TIME, createTime); - tableEntity.setAttribute(ATTRIBUTE_LAST_ACCESS_TIME, lastAccessTime); - tableEntity.setAttribute(ATTRIBUTE_RETENTION, hiveTable.getRetention()); - tableEntity.setAttribute(ATTRIBUTE_PARAMETERS, hiveTable.getParameters()); - tableEntity.setAttribute(ATTRIBUTE_COMMENT, hiveTable.getParameters().get(ATTRIBUTE_COMMENT)); - tableEntity.setAttribute(ATTRIBUTE_TABLE_TYPE, hiveTable.getTableType().name()); - tableEntity.setAttribute(ATTRIBUTE_TEMPORARY, hiveTable.isTemporary()); - - if (hiveTable.getViewOriginalText() != null) { - tableEntity.setAttribute(ATTRIBUTE_VIEW_ORIGINAL_TEXT, hiveTable.getViewOriginalText()); - } - - if (hiveTable.getViewExpandedText() != null) { - tableEntity.setAttribute(ATTRIBUTE_VIEW_EXPANDED_TEXT, hiveTable.getViewExpandedText()); - } - - AtlasEntity sdEntity = toStorageDescEntity(hiveTable.getSd(), getStorageDescQFName(tableQualifiedName), AtlasTypeUtil.getObjectId(tableEntity)); - - tableEntity.setRelationshipAttribute(ATTRIBUTE_STORAGEDESC, AtlasTypeUtil.getAtlasRelatedObjectId(sdEntity, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC)); - - table.addReferredEntity(database); - table.addReferredEntity(sdEntity); - table.setEntity(tableEntity); - - return table; - } - - private AtlasEntity toStorageDescEntity(StorageDescriptor storageDesc, String sdQualifiedName, AtlasObjectId tableId) { - AtlasEntity ret = new AtlasEntity(HiveDataTypes.HIVE_STORAGEDESC.getName()); - - ret.setGuid(getGuid(sdQualifiedName)); - ret.setRelationshipAttribute(ATTRIBUTE_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(tableId, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC)); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, sdQualifiedName); - ret.setAttribute(ATTRIBUTE_PARAMETERS, storageDesc.getParameters()); - ret.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(storageDesc.getLocation())); - ret.setAttribute(ATTRIBUTE_INPUT_FORMAT, storageDesc.getInputFormat()); - ret.setAttribute(ATTRIBUTE_OUTPUT_FORMAT, storageDesc.getOutputFormat()); - ret.setAttribute(ATTRIBUTE_COMPRESSED, storageDesc.isCompressed()); - ret.setAttribute(ATTRIBUTE_NUM_BUCKETS, storageDesc.getNumBuckets()); - ret.setAttribute(ATTRIBUTE_STORED_AS_SUB_DIRECTORIES, storageDesc.isStoredAsSubDirectories()); - - if (storageDesc.getBucketCols().size() > 0) { - ret.setAttribute(ATTRIBUTE_BUCKET_COLS, storageDesc.getBucketCols()); - } - - if (storageDesc.getSerdeInfo() != null) { - SerDeInfo serdeInfo = storageDesc.getSerdeInfo(); - - LOG.info("serdeInfo = {}", serdeInfo); - AtlasStruct serdeInfoStruct = new AtlasStruct(HiveDataTypes.HIVE_SERDE.getName()); - - serdeInfoStruct.setAttribute(ATTRIBUTE_NAME, serdeInfo.getName()); - serdeInfoStruct.setAttribute(ATTRIBUTE_SERIALIZATION_LIB, serdeInfo.getSerializationLib()); - serdeInfoStruct.setAttribute(ATTRIBUTE_PARAMETERS, serdeInfo.getParameters()); - - ret.setAttribute(ATTRIBUTE_SERDE_INFO, serdeInfoStruct); - } - - if (CollectionUtils.isNotEmpty(storageDesc.getSortCols())) { - List sortColsStruct = new ArrayList<>(); - - for (Order sortcol : storageDesc.getSortCols()) { - String hiveOrderName = HiveDataTypes.HIVE_ORDER.getName(); - AtlasStruct colStruct = new AtlasStruct(hiveOrderName); - colStruct.setAttribute("col", sortcol.getCol()); - colStruct.setAttribute("order", sortcol.getOrder()); - - sortColsStruct.add(colStruct); - } - - ret.setAttribute(ATTRIBUTE_SORT_COLS, sortColsStruct); - } - - return ret; - } - - private List toColumns(List schemaList, AtlasEntity table, String relationshipType) { - List ret = new ArrayList<>(); - - int columnPosition = 0; - for (FieldSchema fs : schemaList) { - LOG.debug("Processing field {}", fs); - - AtlasEntity column = new AtlasEntity(HiveDataTypes.HIVE_COLUMN.getName()); - - String columnQualifiedName = getColumnQualifiedName((String) table.getAttribute(ATTRIBUTE_QUALIFIED_NAME), fs.getName()); - - column.setAttribute(ATTRIBUTE_QUALIFIED_NAME, columnQualifiedName); - column.setGuid(getGuid(columnQualifiedName)); - - column.setRelationshipAttribute(ATTRIBUTE_TABLE, AtlasTypeUtil.getAtlasRelatedObjectId(table, relationshipType)); - - column.setAttribute(ATTRIBUTE_NAME, fs.getName()); - column.setAttribute(ATTRIBUTE_OWNER, table.getAttribute(ATTRIBUTE_OWNER)); - column.setAttribute(ATTRIBUTE_COL_TYPE, fs.getType()); - column.setAttribute(ATTRIBUTE_COL_POSITION, columnPosition++); - column.setAttribute(ATTRIBUTE_COMMENT, fs.getComment()); - - ret.add(column); - } - return ret; - } - - private String getCreateTableString(Table table, String location){ - String colString = ""; - List colList = table.getAllCols(); - - if (colList != null) { - for (FieldSchema col : colList) { - colString += col.getName() + " " + col.getType() + ","; - } - - if (colList.size() > 0) { - colString = colString.substring(0, colString.length() - 1); - colString = "(" + colString + ")"; - } - } - - String query = "create external table " + table.getTableName() + colString + " location '" + location + "'"; - - return query; - } - - private String lower(String str) { - if (StringUtils.isEmpty(str)) { - return ""; - } - - return str.toLowerCase().trim(); - } - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Metadata namespace of the cluster to which the Hive component belongs - * @param table hive table for which the qualified name is needed - * @return Unique qualified name to identify the Table instance in Atlas. - */ - private static String getTableQualifiedName(String metadataNamespace, Table table) { - return getTableQualifiedName(metadataNamespace, table.getDbName(), table.getTableName(), table.isTemporary()); - } - - /** - * Construct the qualified name used to uniquely identify a Database instance in Atlas. - * @param metadataNamespace Name of the cluster to which the Hive component belongs - * @param dbName Name of the Hive database - * @return Unique qualified name to identify the Database instance in Atlas. - */ - public static String getDBQualifiedName(String metadataNamespace, String dbName) { - return String.format("%s@%s", dbName.toLowerCase(), metadataNamespace); - } - - /** - * Construct the qualified name used to uniquely identify a Table instance in Atlas. - * @param metadataNamespace Name of the cluster to which the Hive component belongs - * @param dbName Name of the Hive database to which the Table belongs - * @param tableName Name of the Hive table - * @param isTemporaryTable is this a temporary table - * @return Unique qualified name to identify the Table instance in Atlas. - */ - public static String getTableQualifiedName(String metadataNamespace, String dbName, String tableName, boolean isTemporaryTable) { - String tableTempName = tableName; - - if (isTemporaryTable) { - if (SessionState.get() != null && SessionState.get().getSessionId() != null) { - tableTempName = tableName + TEMP_TABLE_PREFIX + SessionState.get().getSessionId(); - } else { - tableTempName = tableName + TEMP_TABLE_PREFIX + RandomStringUtils.random(10); - } - } - - return String.format("%s.%s@%s", dbName.toLowerCase(), tableTempName.toLowerCase(), metadataNamespace); - } - - public static String getTableProcessQualifiedName(String metadataNamespace, Table table) { - String tableQualifiedName = getTableQualifiedName(metadataNamespace, table); - long createdTime = getTableCreatedTime(table); - - return tableQualifiedName + SEP + createdTime; - } - - public static String getStorageDescQFName(String tableQualifiedName) { - return tableQualifiedName + "_storage"; - } - - public static String getColumnQualifiedName(final String tableQualifiedName, final String colName) { - final String[] parts = tableQualifiedName.split("@"); - final String tableName = parts[0]; - final String metadataNamespace = parts[1]; - - return String.format("%s.%s@%s", tableName, colName.toLowerCase(), metadataNamespace); - } - - public static long getTableCreatedTime(Table table) { - return table.getTTable().getCreateTime() * MILLIS_CONVERT_FACTOR; - } - - private void clearRelationshipAttributes(AtlasEntitiesWithExtInfo entities) { - if (entities != null) { - if (entities.getEntities() != null) { - for (AtlasEntity entity : entities.getEntities()) { - clearRelationshipAttributes(entity);; - } - } - - if (entities.getReferredEntities() != null) { - clearRelationshipAttributes(entities.getReferredEntities().values()); - } - } - } - - private void clearRelationshipAttributes(Collection entities) { - if (entities != null) { - for (AtlasEntity entity : entities) { - clearRelationshipAttributes(entity); - } - } - } - - private void clearRelationshipAttributes(AtlasEntity entity) { - if (entity != null && entity.getRelationshipAttributes() != null) { - entity.getRelationshipAttributes().clear(); - } - } - - private boolean isTableWithDatabaseName(String tableName) { - boolean ret = false; - if (tableName.contains(".")) { - ret = true; - } - return ret; - } - - private String getGuid(String qualifiedName) { - return getGuid(false, qualifiedName); - } - - private String getGuid(boolean isDBType, String qualifiedName) { - String guid = null; - - if (qualifiedNameGuidMap.containsKey(qualifiedName)) { - guid = qualifiedNameGuidMap.get(qualifiedName); - } else if (isDBType) { - guid = getDBGuidFromAtlas(qualifiedName); - } - - if (StringUtils.isBlank(guid)) { - guid = generateGuid(); - } - - return guid; - } - - private String generateGuid() { - return UUID.randomUUID().toString(); - } - - public void setStreamSize(long size) { - zipOutputStream.setComment(String.format(ZIP_FILE_COMMENT_FORMAT, size, -1)); - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/AtlasHiveHookContext.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/AtlasHiveHookContext.java deleted file mode 100644 index 14cc2f2017..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/AtlasHiveHookContext.java +++ /dev/null @@ -1,314 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook; - -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.hive.hook.HiveMetastoreHookImpl.HiveMetastoreHook; -import org.apache.atlas.hive.hook.HiveHook.PreprocessAction; -import org.apache.atlas.hive.hook.HiveHook.HiveHookObjectNamesCache; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.hadoop.hive.metastore.IHMSHandler; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.events.*; -import org.apache.hadoop.hive.ql.hooks.*; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.session.SessionState; - -import java.util.*; - -import static org.apache.atlas.hive.bridge.HiveMetaStoreBridge.getDatabaseName; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.toTable; - - -public class AtlasHiveHookContext { - public static final char QNAME_SEP_METADATA_NAMESPACE = '@'; - public static final char QNAME_SEP_ENTITY_NAME = '.'; - public static final char QNAME_SEP_PROCESS = ':'; - public static final String TEMP_TABLE_PREFIX = "_temp-"; - public static final String CREATE_OPERATION = "CREATE"; - public static final String ALTER_OPERATION = "ALTER"; - - private final HiveHook hook; - private final HiveOperation hiveOperation; - private final HookContext hiveContext; - private final Hive hive; - private final Map qNameEntityMap = new HashMap<>(); - private final HiveHookObjectNamesCache knownObjects; - private final HiveMetastoreHook metastoreHook; - private final ListenerEvent metastoreEvent; - private final IHMSHandler metastoreHandler; - - private boolean isSkippedInputEntity; - private boolean isSkippedOutputEntity; - private boolean skipTempTables; - - public AtlasHiveHookContext(HiveHook hook, HiveOperation hiveOperation, HookContext hiveContext, - HiveHookObjectNamesCache knownObjects, boolean skipTempTables) throws Exception { - this(hook, hiveOperation, hiveContext, knownObjects, null, null, skipTempTables); - } - - public AtlasHiveHookContext(HiveHook hook, HiveOperation hiveOperation, HiveHookObjectNamesCache knownObjects, - HiveMetastoreHook metastoreHook, ListenerEvent listenerEvent, boolean skipTempTables) throws Exception { - this(hook, hiveOperation, null, knownObjects, metastoreHook, listenerEvent, skipTempTables); - } - - public AtlasHiveHookContext(HiveHook hook, HiveOperation hiveOperation, HookContext hiveContext, HiveHookObjectNamesCache knownObjects, - HiveMetastoreHook metastoreHook, ListenerEvent listenerEvent, boolean skipTempTables) throws Exception { - this.hook = hook; - this.hiveOperation = hiveOperation; - this.hiveContext = hiveContext; - this.hive = hiveContext != null ? Hive.get(hiveContext.getConf()) : null; - this.knownObjects = knownObjects; - this.metastoreHook = metastoreHook; - this.metastoreEvent = listenerEvent; - this.metastoreHandler = (listenerEvent != null) ? metastoreEvent.getIHMSHandler() : null; - this.skipTempTables = skipTempTables; - - init(); - } - - public boolean isMetastoreHook() { - return metastoreHook != null; - } - - public ListenerEvent getMetastoreEvent() { - return metastoreEvent; - } - - public IHMSHandler getMetastoreHandler() { - return metastoreHandler; - } - - public Set getInputs() { - return hiveContext != null ? hiveContext.getInputs() : Collections.emptySet(); - } - - public Set getOutputs() { - return hiveContext != null ? hiveContext.getOutputs() : Collections.emptySet(); - } - - public boolean isSkippedInputEntity() { - return isSkippedInputEntity; - } - - public boolean isSkippedOutputEntity() { - return isSkippedOutputEntity; - } - - public void registerSkippedEntity(Entity entity) { - if (entity instanceof ReadEntity) { - registerSkippedInputEntity(); - } else if (entity instanceof WriteEntity) { - registerSkippedOutputEntity(); - } - } - - public void registerSkippedInputEntity() { - if (!isSkippedInputEntity) { - isSkippedInputEntity = true; - } - } - - public void registerSkippedOutputEntity() { - if (!isSkippedOutputEntity) { - isSkippedOutputEntity = true; - } - } - - public boolean isSkipTempTables() { - return skipTempTables; - } - - public LineageInfo getLineageInfo() { - return hiveContext != null ? hiveContext.getLinfo() : null; - } - - public HookContext getHiveContext() { - return hiveContext; - } - - public Hive getHive() { - return hive; - } - - public HiveOperation getHiveOperation() { - return hiveOperation; - } - - public void putEntity(String qualifiedName, AtlasEntity entity) { - qNameEntityMap.put(qualifiedName, entity); - } - - public AtlasEntity getEntity(String qualifiedName) { - return qNameEntityMap.get(qualifiedName); - } - - public Collection getEntities() { return qNameEntityMap.values(); } - - public Map getQNameToEntityMap() { return qNameEntityMap; } - - public String getMetadataNamespace() { - return hook.getMetadataNamespace(); - } - - public String getHostName() { return hook.getHostName(); } - - public boolean isConvertHdfsPathToLowerCase() { - return hook.isConvertHdfsPathToLowerCase(); - } - - public String getAwsS3AtlasModelVersion() { - return hook.getAwsS3AtlasModelVersion(); - } - - public boolean getSkipHiveColumnLineageHive20633() { - return hook.getSkipHiveColumnLineageHive20633(); - } - - public int getSkipHiveColumnLineageHive20633InputsThreshold() { - return hook.getSkipHiveColumnLineageHive20633InputsThreshold(); - } - - public PreprocessAction getPreprocessActionForHiveTable(String qualifiedName) { - return hook.getPreprocessActionForHiveTable(qualifiedName); - } - - public List getIgnoreDummyDatabaseName() { - return hook.getIgnoreDummyDatabaseName(); - } - - public List getIgnoreDummyTableName() { - return hook.getIgnoreDummyTableName(); - } - - public String getIgnoreValuesTmpTableNamePrefix() { - return hook.getIgnoreValuesTmpTableNamePrefix(); - } - - public String getQualifiedName(Database db) { - return getDatabaseName(db) + QNAME_SEP_METADATA_NAMESPACE + getMetadataNamespace(); - } - - public String getQualifiedName(Table table) { - String tableName = table.getTableName(); - - if (table.isTemporary()) { - if (SessionState.get() != null && SessionState.get().getSessionId() != null) { - tableName = tableName + TEMP_TABLE_PREFIX + SessionState.get().getSessionId(); - } else { - tableName = tableName + TEMP_TABLE_PREFIX + RandomStringUtils.random(10); - } - } - - return (table.getDbName() + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + getMetadataNamespace(); - } - - public boolean isKnownDatabase(String dbQualifiedName) { - return knownObjects != null && dbQualifiedName != null ? knownObjects.isKnownDatabase(dbQualifiedName) : false; - } - - public boolean isKnownTable(String tblQualifiedName) { - return knownObjects != null && tblQualifiedName != null ? knownObjects.isKnownTable(tblQualifiedName) : false; - } - - public void addToKnownEntities(Collection entities) { - if (knownObjects != null && entities != null) { - knownObjects.addToKnownEntities(entities); - } - } - - public void removeFromKnownDatabase(String dbQualifiedName) { - if (knownObjects != null && dbQualifiedName != null) { - knownObjects.removeFromKnownDatabase(dbQualifiedName); - } - } - - public void removeFromKnownTable(String tblQualifiedName) { - if (knownObjects != null && tblQualifiedName != null) { - knownObjects.removeFromKnownTable(tblQualifiedName); - } - } - - public boolean isHiveProcessPopulateDeprecatedAttributes() { - return hook.isHiveProcessPopulateDeprecatedAttributes(); - } - - private void init() { - if (hiveOperation == null) { - return; - } - - String operation = hiveOperation.getOperationName(); - - if (knownObjects == null || !isCreateAlterOperation(operation)) { - return; - } - - List databases = new ArrayList<>(); - List

tables = new ArrayList<>(); - - if (isMetastoreHook()) { - switch (hiveOperation) { - case CREATEDATABASE: - databases.add(((CreateDatabaseEvent) metastoreEvent).getDatabase()); - break; - case ALTERDATABASE: - databases.add(((AlterDatabaseEvent) metastoreEvent).getOldDatabase()); - databases.add(((AlterDatabaseEvent) metastoreEvent).getNewDatabase()); - break; - case CREATETABLE: - tables.add(toTable(((CreateTableEvent) metastoreEvent).getTable())); - break; - case ALTERTABLE_PROPERTIES: - case ALTERTABLE_RENAME: - case ALTERTABLE_RENAMECOL: - tables.add(toTable(((AlterTableEvent) metastoreEvent).getOldTable())); - tables.add(toTable(((AlterTableEvent) metastoreEvent).getNewTable())); - break; - } - } else { - if (getOutputs() != null) { - for (WriteEntity output : hiveContext.getOutputs()) { - switch (output.getType()) { - case DATABASE: - databases.add(output.getDatabase()); - break; - case TABLE: - tables.add(output.getTable()); - break; - } - } - } - } - - for (Database database : databases) { - knownObjects.removeFromKnownDatabase(getQualifiedName(database)); - } - - for (Table table : tables) { - knownObjects.removeFromKnownTable(getQualifiedName(table)); - } - } - - private static boolean isCreateAlterOperation(String operationName) { - return operationName != null && operationName.startsWith(CREATE_OPERATION) || operationName.startsWith(ALTER_OPERATION); - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java deleted file mode 100644 index 6ea48482ab..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java +++ /dev/null @@ -1,428 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook; - -import org.apache.atlas.hive.hook.events.*; -import org.apache.atlas.hive.hook.utils.ActiveEntityFilter; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.utils.LruCache; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext; -import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.shims.Utils; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.regex.Pattern; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.ATTRIBUTE_QUALIFIED_NAME; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.HIVE_TYPE_DB; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.HIVE_TYPE_TABLE; -import static org.apache.atlas.repository.Constants.HS2_SOURCE; - -public class HiveHook extends AtlasHook implements ExecuteWithHookContext { - private static final Logger LOG = LoggerFactory.getLogger(HiveHook.class); - - public enum PreprocessAction { NONE, IGNORE, PRUNE } - - public static final String CONF_PREFIX = "atlas.hook.hive."; - public static final String HDFS_PATH_CONVERT_TO_LOWER_CASE = CONF_PREFIX + "hdfs_path.convert_to_lowercase"; - public static final String HOOK_NAME_CACHE_ENABLED = CONF_PREFIX + "name.cache.enabled"; - public static final String HOOK_NAME_CACHE_DATABASE_COUNT = CONF_PREFIX + "name.cache.database.count"; - public static final String HOOK_NAME_CACHE_TABLE_COUNT = CONF_PREFIX + "name.cache.table.count"; - public static final String HOOK_NAME_CACHE_REBUID_INTERVAL_SEC = CONF_PREFIX + "name.cache.rebuild.interval.seconds"; - public static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION = CONF_PREFIX + "aws_s3.atlas.model.version"; - public static final String HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2 = "v2"; - public static final String HOOK_HIVE_PROCESS_POPULATE_DEPRECATED_ATTRIBUTES = CONF_PREFIX + "hive_process.populate.deprecated.attributes"; - public static final String HOOK_SKIP_HIVE_COLUMN_LINEAGE_HIVE_20633 = CONF_PREFIX + "skip.hive_column_lineage.hive-20633"; - public static final String HOOK_SKIP_HIVE_COLUMN_LINEAGE_HIVE_20633_INPUTS_THRESHOLD = CONF_PREFIX + "skip.hive_column_lineage.hive-20633.inputs.threshold"; - public static final String HOOK_HIVE_TABLE_IGNORE_PATTERN = CONF_PREFIX + "hive_table.ignore.pattern"; - public static final String HOOK_HIVE_TABLE_PRUNE_PATTERN = CONF_PREFIX + "hive_table.prune.pattern"; - public static final String HOOK_HIVE_TABLE_CACHE_SIZE = CONF_PREFIX + "hive_table.cache.size"; - public static final String HOOK_HIVE_IGNORE_DDL_OPERATIONS = CONF_PREFIX + "hs2.ignore.ddl.operations"; - public static final String HOOK_HIVE_FILTER_ENTITY_ADDITIONAL_TYPES_TO_RETAIN = CONF_PREFIX + "hs2.filter.entity.additional.types.to.retain"; - public static final String HOOK_HIVE_SKIP_TEMP_TABLES = CONF_PREFIX + "skip.temp.tables"; - public static final String DEFAULT_HOST_NAME = "localhost"; - - private static final Map OPERATION_MAP = new HashMap<>(); - - private static final boolean convertHdfsPathToLowerCase; - private static final boolean nameCacheEnabled; - private static final int nameCacheDatabaseMaxCount; - private static final int nameCacheTableMaxCount; - private static final int nameCacheRebuildIntervalSeconds; - private static final String awsS3AtlasModelVersion; - - private static final boolean skipHiveColumnLineageHive20633; - private static final int skipHiveColumnLineageHive20633InputsThreshold; - private static final List hiveTablesToIgnore = new ArrayList<>(); - private static final List hiveTablesToPrune = new ArrayList<>(); - private static final Map hiveTablesCache; - private static final List ignoreDummyDatabaseName; - private static final List ignoreDummyTableName; - private static final String ignoreValuesTmpTableNamePrefix; - private static final boolean hiveProcessPopulateDeprecatedAttributes; - private static HiveHookObjectNamesCache knownObjects = null; - private static String hostName; - private static boolean skipTempTables = true; - - static { - for (HiveOperation hiveOperation : HiveOperation.values()) { - OPERATION_MAP.put(hiveOperation.getOperationName(), hiveOperation); - } - - convertHdfsPathToLowerCase = atlasProperties.getBoolean(HDFS_PATH_CONVERT_TO_LOWER_CASE, false); - nameCacheEnabled = atlasProperties.getBoolean(HOOK_NAME_CACHE_ENABLED, true); - nameCacheDatabaseMaxCount = atlasProperties.getInt(HOOK_NAME_CACHE_DATABASE_COUNT, 10000); - nameCacheTableMaxCount = atlasProperties.getInt(HOOK_NAME_CACHE_TABLE_COUNT, 10000); - nameCacheRebuildIntervalSeconds = atlasProperties.getInt(HOOK_NAME_CACHE_REBUID_INTERVAL_SEC, 60 * 60); // 60 minutes default - awsS3AtlasModelVersion = atlasProperties.getString(HOOK_AWS_S3_ATLAS_MODEL_VERSION, HOOK_AWS_S3_ATLAS_MODEL_VERSION_V2); - skipHiveColumnLineageHive20633 = atlasProperties.getBoolean(HOOK_SKIP_HIVE_COLUMN_LINEAGE_HIVE_20633, false); - skipHiveColumnLineageHive20633InputsThreshold = atlasProperties.getInt(HOOK_SKIP_HIVE_COLUMN_LINEAGE_HIVE_20633_INPUTS_THRESHOLD, 15); // skip if avg # of inputs is > 15 - hiveProcessPopulateDeprecatedAttributes = atlasProperties.getBoolean(HOOK_HIVE_PROCESS_POPULATE_DEPRECATED_ATTRIBUTES, false); - String[] patternHiveTablesToIgnore = atlasProperties.getStringArray(HOOK_HIVE_TABLE_IGNORE_PATTERN); - String[] patternHiveTablesToPrune = atlasProperties.getStringArray(HOOK_HIVE_TABLE_PRUNE_PATTERN); - - if (patternHiveTablesToIgnore != null) { - for (String pattern : patternHiveTablesToIgnore) { - try { - hiveTablesToIgnore.add(Pattern.compile(pattern)); - - LOG.info("{}={}", HOOK_HIVE_TABLE_IGNORE_PATTERN, pattern); - } catch (Throwable t) { - LOG.warn("failed to compile pattern {}", pattern, t); - LOG.warn("Ignoring invalid pattern in configuration {}: {}", HOOK_HIVE_TABLE_IGNORE_PATTERN, pattern); - } - } - } - - if (patternHiveTablesToPrune != null) { - for (String pattern : patternHiveTablesToPrune) { - try { - hiveTablesToPrune.add(Pattern.compile(pattern)); - - LOG.info("{}={}", HOOK_HIVE_TABLE_PRUNE_PATTERN, pattern); - } catch (Throwable t) { - LOG.warn("failed to compile pattern {}", pattern, t); - LOG.warn("Ignoring invalid pattern in configuration {}: {}", HOOK_HIVE_TABLE_PRUNE_PATTERN, pattern); - } - } - } - - if (!hiveTablesToIgnore.isEmpty() || !hiveTablesToPrune.isEmpty()) { - hiveTablesCache = new LruCache<>(atlasProperties.getInt(HOOK_HIVE_TABLE_CACHE_SIZE, 10000), 0); - } else { - hiveTablesCache = Collections.emptyMap(); - } - - knownObjects = nameCacheEnabled ? new HiveHookObjectNamesCache(nameCacheDatabaseMaxCount, nameCacheTableMaxCount, nameCacheRebuildIntervalSeconds) : null; - - List defaultDummyDatabase = new ArrayList<>(); - List defaultDummyTable = new ArrayList<>(); - - defaultDummyDatabase.add(SemanticAnalyzer.DUMMY_DATABASE); - defaultDummyTable.add(SemanticAnalyzer.DUMMY_TABLE); - - ignoreDummyDatabaseName = atlasProperties.getList("atlas.hook.hive.ignore.dummy.database.name", defaultDummyDatabase); - ignoreDummyTableName = atlasProperties.getList("atlas.hook.hive.ignore.dummy.table.name", defaultDummyTable); - ignoreValuesTmpTableNamePrefix = atlasProperties.getString("atlas.hook.hive.ignore.values.tmp.table.name.prefix", "Values__Tmp__Table__"); - skipTempTables = atlasProperties.getBoolean(HOOK_HIVE_SKIP_TEMP_TABLES, true); - - try { - hostName = InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - LOG.warn("No hostname found. Setting the hostname to default value {}", DEFAULT_HOST_NAME, e); - hostName = DEFAULT_HOST_NAME; - } - - ActiveEntityFilter.init(atlasProperties); - } - - - public HiveHook() { - } - - public HiveHook(String name) { - super(name); - } - - public String getMessageSource() { - return HS2_SOURCE; - } - - @Override - public void run(HookContext hookContext) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveHook.run({})", hookContext.getOperationName()); - } - - try { - HiveOperation oper = OPERATION_MAP.get(hookContext.getOperationName()); - AtlasHiveHookContext context = new AtlasHiveHookContext(this, oper, hookContext, getKnownObjects(), isSkipTempTables()); - BaseHiveEvent event = null; - - switch (oper) { - case CREATEDATABASE: - event = new CreateDatabase(context); - break; - - case DROPDATABASE: - event = new DropDatabase(context); - break; - - case ALTERDATABASE: - case ALTERDATABASE_OWNER: - case ALTERDATABASE_LOCATION: - event = new AlterDatabase(context); - break; - - case CREATETABLE: - event = new CreateTable(context); - break; - - case DROPTABLE: - case DROPVIEW: - case DROP_MATERIALIZED_VIEW: - event = new DropTable(context); - break; - - case CREATETABLE_AS_SELECT: - case CREATE_MATERIALIZED_VIEW: - case CREATEVIEW: - case ALTERVIEW_AS: - case LOAD: - case EXPORT: - case IMPORT: - case QUERY: - event = new CreateHiveProcess(context); - break; - - case ALTERTABLE_FILEFORMAT: - case ALTERTABLE_CLUSTER_SORT: - case ALTERTABLE_BUCKETNUM: - case ALTERTABLE_PROPERTIES: - case ALTERVIEW_PROPERTIES: - case ALTERTABLE_SERDEPROPERTIES: - case ALTERTABLE_SERIALIZER: - case ALTERTABLE_ADDCOLS: - case ALTERTABLE_REPLACECOLS: - case ALTERTABLE_PARTCOLTYPE: - case ALTERTABLE_LOCATION: - event = new AlterTable(context); - break; - - case ALTERTABLE_RENAME: - case ALTERVIEW_RENAME: - event = new AlterTableRename(context); - break; - - case ALTERTABLE_RENAMECOL: - event = new AlterTableRenameCol(context); - break; - - default: - if (LOG.isDebugEnabled()) { - LOG.debug("HiveHook.run({}): operation ignored", hookContext.getOperationName()); - } - break; - } - - if (event != null) { - final UserGroupInformation ugi = hookContext.getUgi() == null ? Utils.getUGI() : hookContext.getUgi(); - - super.notifyEntities(ActiveEntityFilter.apply(event.getNotificationMessages()), ugi); - } - } catch (Throwable t) { - LOG.error("HiveHook.run(): failed to process operation {}", hookContext.getOperationName(), t); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveHook.run({})", hookContext.getOperationName()); - } - } - - public boolean isConvertHdfsPathToLowerCase() { - return convertHdfsPathToLowerCase; - } - - public String getAwsS3AtlasModelVersion() { - return awsS3AtlasModelVersion; - } - - public boolean getSkipHiveColumnLineageHive20633() { - return skipHiveColumnLineageHive20633; - } - - public int getSkipHiveColumnLineageHive20633InputsThreshold() { - return skipHiveColumnLineageHive20633InputsThreshold; - } - - public List getIgnoreDummyDatabaseName() { - return ignoreDummyDatabaseName; - } - - public List getIgnoreDummyTableName() { - return ignoreDummyTableName; - } - - public String getIgnoreValuesTmpTableNamePrefix() { - return ignoreValuesTmpTableNamePrefix; - } - - public boolean isHiveProcessPopulateDeprecatedAttributes() { - return hiveProcessPopulateDeprecatedAttributes; - } - - public static boolean isSkipTempTables() { - return skipTempTables; - } - - public PreprocessAction getPreprocessActionForHiveTable(String qualifiedName) { - PreprocessAction ret = PreprocessAction.NONE; - - if (qualifiedName != null && (CollectionUtils.isNotEmpty(hiveTablesToIgnore) || CollectionUtils.isNotEmpty(hiveTablesToPrune))) { - ret = hiveTablesCache.get(qualifiedName); - - if (ret == null) { - if (isMatch(qualifiedName, hiveTablesToIgnore)) { - ret = PreprocessAction.IGNORE; - } else if (isMatch(qualifiedName, hiveTablesToPrune)) { - ret = PreprocessAction.PRUNE; - } else { - ret = PreprocessAction.NONE; - } - - hiveTablesCache.put(qualifiedName, ret); - } - } - - return ret; - } - - private boolean isMatch(String name, List patterns) { - boolean ret = false; - - for (Pattern p : patterns) { - if (p.matcher(name).matches()) { - ret = true; - - break; - } - } - - return ret; - } - - public static HiveHookObjectNamesCache getKnownObjects() { - if (knownObjects != null && knownObjects.isCacheExpired()) { - LOG.info("HiveHook.run(): purging cached databaseNames ({}) and tableNames ({})", knownObjects.getCachedDbCount(), knownObjects.getCachedTableCount()); - - knownObjects = new HiveHook.HiveHookObjectNamesCache(nameCacheDatabaseMaxCount, nameCacheTableMaxCount, nameCacheRebuildIntervalSeconds); - } - - return knownObjects; - } - - public String getHostName() { - return hostName; - } - - public static class HiveHookObjectNamesCache { - private final int dbMaxCacheCount; - private final int tblMaxCacheCount; - private final long cacheExpiryTimeMs; - private final Set knownDatabases; - private final Set knownTables; - - public HiveHookObjectNamesCache(int dbMaxCacheCount, int tblMaxCacheCount, long nameCacheRebuildIntervalSeconds) { - this.dbMaxCacheCount = dbMaxCacheCount; - this.tblMaxCacheCount = tblMaxCacheCount; - this.cacheExpiryTimeMs = nameCacheRebuildIntervalSeconds <= 0 ? Long.MAX_VALUE : (System.currentTimeMillis() + (nameCacheRebuildIntervalSeconds * 1000)); - this.knownDatabases = Collections.synchronizedSet(new HashSet<>()); - this.knownTables = Collections.synchronizedSet(new HashSet<>()); - } - - public int getCachedDbCount() { - return knownDatabases.size(); - } - - public int getCachedTableCount() { - return knownTables.size(); - } - - public boolean isCacheExpired() { - return System.currentTimeMillis() > cacheExpiryTimeMs; - } - - public boolean isKnownDatabase(String dbQualifiedName) { - return knownDatabases.contains(dbQualifiedName); - } - - public boolean isKnownTable(String tblQualifiedName) { - return knownTables.contains(tblQualifiedName); - } - - public void addToKnownEntities(Collection entities) { - for (AtlasEntity entity : entities) { - if (StringUtils.equalsIgnoreCase(entity.getTypeName(), HIVE_TYPE_DB)) { - addToKnownDatabase((String) entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } else if (StringUtils.equalsIgnoreCase(entity.getTypeName(), HIVE_TYPE_TABLE)) { - addToKnownTable((String) entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } - } - } - - public void addToKnownDatabase(String dbQualifiedName) { - if (knownDatabases.size() < dbMaxCacheCount) { - knownDatabases.add(dbQualifiedName); - } - } - - public void addToKnownTable(String tblQualifiedName) { - if (knownTables.size() < tblMaxCacheCount) { - knownTables.add(tblQualifiedName); - } - } - - public void removeFromKnownDatabase(String dbQualifiedName) { - knownDatabases.remove(dbQualifiedName); - } - - public void removeFromKnownTable(String tblQualifiedName) { - if (tblQualifiedName != null) { - knownTables.remove(tblQualifiedName); - } - } - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHookImpl.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHookImpl.java deleted file mode 100644 index 33266ce0b3..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveMetastoreHookImpl.java +++ /dev/null @@ -1,216 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook; - -import org.apache.atlas.hive.hook.events.*; -import org.apache.atlas.hook.AtlasHook; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.MetaStoreEventListener; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.events.*; -import org.apache.hadoop.hive.metastore.utils.SecurityUtils; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.shims.Utils; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -import static org.apache.atlas.hive.hook.events.AlterTableRenameCol.findRenamedColumn; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.toTable; -import static org.apache.atlas.repository.Constants.HMS_SOURCE; -import static org.apache.hadoop.hive.ql.plan.HiveOperation.*; - -public class HiveMetastoreHookImpl extends MetaStoreEventListener { - private static final Logger LOG = LoggerFactory.getLogger(HiveMetastoreHookImpl.class); - private final HiveHook hiveHook; - private final HiveMetastoreHook hook; - - public HiveMetastoreHookImpl(Configuration config) { - super(config); - - this.hiveHook = new HiveHook(this.getClass().getSimpleName()); - this.hook = new HiveMetastoreHook(); - } - - @Override - public void onCreateDatabase(CreateDatabaseEvent dbEvent) { - HiveOperationContext context = new HiveOperationContext(CREATEDATABASE, dbEvent); - - hook.handleEvent(context); - } - - @Override - public void onDropDatabase(DropDatabaseEvent dbEvent) { - HiveOperationContext context = new HiveOperationContext(DROPDATABASE, dbEvent); - - hook.handleEvent(context); - } - - @Override - public void onAlterDatabase(AlterDatabaseEvent dbEvent) { - HiveOperationContext context = new HiveOperationContext(ALTERDATABASE, dbEvent); - - hook.handleEvent(context); - } - - @Override - public void onCreateTable(CreateTableEvent tableEvent) { - HiveOperationContext context = new HiveOperationContext(CREATETABLE, tableEvent); - - hook.handleEvent(context); - } - - @Override - public void onDropTable(DropTableEvent tableEvent) { - HiveOperationContext context = new HiveOperationContext(DROPTABLE, tableEvent); - - hook.handleEvent(context); - } - - @Override - public void onAlterTable(AlterTableEvent tableEvent) { - HiveOperationContext context = new HiveOperationContext(tableEvent); - Table oldTable = toTable(tableEvent.getOldTable()); - Table newTable = toTable(tableEvent.getNewTable()); - - if (isTableRename(oldTable, newTable)) { - context.setOperation(ALTERTABLE_RENAME); - } else if (isColumnRename(oldTable, newTable, context)) { - context.setOperation(ALTERTABLE_RENAMECOL); - } else if(isAlterTableProperty(tableEvent, "last_modified_time") || - isAlterTableProperty(tableEvent, "transient_lastDdlTime")) { - context.setOperation(ALTERTABLE_PROPERTIES); // map other alter table operations to ALTERTABLE_PROPERTIES - } - - hook.handleEvent(context); - } - - public class HiveMetastoreHook extends AtlasHook { - public HiveMetastoreHook() { - } - - @Override - public String getMessageSource() { - return HMS_SOURCE; - } - - public void handleEvent(HiveOperationContext operContext) { - ListenerEvent listenerEvent = operContext.getEvent(); - - if (!listenerEvent.getStatus()) { - return; - } - - try { - HiveOperation oper = operContext.getOperation(); - AtlasHiveHookContext context = new AtlasHiveHookContext(hiveHook, oper, hiveHook.getKnownObjects(), this, listenerEvent, hiveHook.isSkipTempTables()); - BaseHiveEvent event = null; - - switch (oper) { - case CREATEDATABASE: - event = new CreateDatabase(context); - break; - - case DROPDATABASE: - event = new DropDatabase(context); - break; - - case ALTERDATABASE: - event = new AlterDatabase(context); - break; - - case CREATETABLE: - event = new CreateTable(context); - break; - - case DROPTABLE: - event = new DropTable(context); - break; - - case ALTERTABLE_PROPERTIES: - event = new AlterTable(context); - break; - - case ALTERTABLE_RENAME: - event = new AlterTableRename(context); - break; - - case ALTERTABLE_RENAMECOL: - FieldSchema columnOld = operContext.getColumnOld(); - FieldSchema columnNew = operContext.getColumnNew(); - - event = new AlterTableRenameCol(columnOld, columnNew, context); - break; - - default: - if (LOG.isDebugEnabled()) { - LOG.debug("HiveMetastoreHook.handleEvent({}): operation ignored.", listenerEvent); - } - break; - } - - if (event != null) { - final UserGroupInformation ugi = SecurityUtils.getUGI() == null ? Utils.getUGI() : SecurityUtils.getUGI(); - - super.notifyEntities(event.getNotificationMessages(), ugi); - } - } catch (Throwable t) { - LOG.error("HiveMetastoreHook.handleEvent({}): failed to process operation {}", listenerEvent, t); - } - } - } - - private static boolean isTableRename(Table oldTable, Table newTable) { - String oldTableName = oldTable.getTableName(); - String newTableName = newTable.getTableName(); - - return !StringUtils.equalsIgnoreCase(oldTableName, newTableName); - } - - private static boolean isColumnRename(Table oldTable, Table newTable, HiveOperationContext context) { - FieldSchema columnOld = findRenamedColumn(oldTable, newTable); - FieldSchema columnNew = findRenamedColumn(newTable, oldTable); - boolean isColumnRename = columnOld != null && columnNew != null; - - if (isColumnRename) { - context.setColumnOld(columnOld); - context.setColumnNew(columnNew); - } - - return isColumnRename; - } - - private boolean isAlterTableProperty(AlterTableEvent tableEvent, String propertyToCheck) { - final boolean ret; - String oldTableModifiedTime = tableEvent.getOldTable().getParameters().get(propertyToCheck); - String newTableModifiedTime = tableEvent.getNewTable().getParameters().get(propertyToCheck); - - - if (oldTableModifiedTime == null) { - ret = newTableModifiedTime != null; - } else { - ret = !oldTableModifiedTime.equals(newTableModifiedTime); - } - - return ret; - - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveOperationContext.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveOperationContext.java deleted file mode 100644 index 23ea4be690..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveOperationContext.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook; - -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.events.ListenerEvent; -import org.apache.hadoop.hive.ql.plan.HiveOperation; - -public class HiveOperationContext { - HiveOperation operation; - ListenerEvent event; - FieldSchema columnOld; - FieldSchema columnNew; - - public HiveOperationContext(ListenerEvent event) { - this(null, event); - } - - public HiveOperationContext(HiveOperation operation, ListenerEvent event) { - setOperation(operation); - setEvent(event); - setColumnOld(null); - setColumnNew(null); - } - - public ListenerEvent getEvent() { - return event; - } - - public void setEvent(ListenerEvent event) { - this.event = event; - } - - public HiveOperation getOperation() { - return operation; - } - - public void setOperation(HiveOperation operation) { - this.operation = operation; - } - - public FieldSchema getColumnOld() { - return columnOld; - } - - public void setColumnOld(FieldSchema columnOld) { - this.columnOld = columnOld; - } - - public FieldSchema getColumnNew() { - return columnNew; - } - - public void setColumnNew(FieldSchema columnNew) { - this.columnNew = columnNew; - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterDatabase.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterDatabase.java deleted file mode 100644 index d2623b3636..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterDatabase.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.List; - -public class AlterDatabase extends CreateDatabase { - private static final Logger LOG = LoggerFactory.getLogger(AlterDatabase.class); - - public AlterDatabase(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityUpdateRequestV2(getUserName(), entities)); - } - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveMetastoreEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); - AlterDatabaseEvent dbEvent = (AlterDatabaseEvent) context.getMetastoreEvent(); - Database oldDb = dbEvent.getOldDatabase(); - Database newDb = dbEvent.getNewDatabase(); - - if (newDb != null) { - AtlasEntity dbEntity = toDbEntity(newDb); - - ret.addEntity(dbEntity); - - addLocationEntities(dbEntity, ret); - } else { - LOG.error("AlterDatabase.getEntities(): failed to retrieve db"); - } - - addProcessedEntities(ret); - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception { - return super.getHiveEntities(); - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTable.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTable.java deleted file mode 100644 index d2f09cc108..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTable.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2; -import org.apache.commons.collections.CollectionUtils; - -import java.util.Collections; -import java.util.List; - -public class AlterTable extends CreateTable { - public AlterTable(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityUpdateRequestV2(getUserName(), entities)); - } - - return ret; - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRename.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRename.java deleted file mode 100644 index 6961fa7c2c..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRename.java +++ /dev/null @@ -1,198 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityPartialUpdateRequestV2; -import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.hive.metastore.events.AlterTableEvent; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; - -public class AlterTableRename extends BaseHiveEvent { - private static final Logger LOG = LoggerFactory.getLogger(AlterTableRename.class); - - public AlterTableRename(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - return context.isMetastoreHook() ? getHiveMetastoreMessages() : getHiveMessages(); - } - - public List getHiveMetastoreMessages() throws Exception { - List ret = new ArrayList<>(); - AlterTableEvent tblEvent = (AlterTableEvent) context.getMetastoreEvent(); - Table oldTable = toTable(tblEvent.getOldTable()); - Table newTable = toTable(tblEvent.getNewTable()); - - if (newTable == null) { - LOG.error("AlterTableRename: renamed table not found in outputs list"); - - return ret; - } - - processTables(oldTable, newTable, ret); - - return ret; - } - - public List getHiveMessages() throws Exception { - List ret = new ArrayList<>(); - Table oldTable; - Table newTable; - - if (CollectionUtils.isEmpty(getInputs())) { - LOG.error("AlterTableRename: old-table not found in inputs list"); - - return ret; - } - - oldTable = getInputs().iterator().next().getTable(); - newTable = null; - - if (CollectionUtils.isNotEmpty(getOutputs())) { - for (WriteEntity entity : getOutputs()) { - if (entity.getType() == Entity.Type.TABLE) { - newTable = entity.getTable(); - - //Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check - if (StringUtils.equalsIgnoreCase(newTable.getDbName(), oldTable.getDbName()) && - StringUtils.equalsIgnoreCase(newTable.getTableName(), oldTable.getTableName())) { - newTable = null; - - continue; - } - - newTable = getHive().getTable(newTable.getDbName(), newTable.getTableName()); - - break; - } - } - } - - if (newTable == null) { - LOG.error("AlterTableRename: renamed table not found in outputs list"); - - return ret; - } - - processTables(oldTable, newTable, ret); - - return ret; - } - - private void processTables(Table oldTable, Table newTable, List ret) throws Exception { - AtlasEntityWithExtInfo oldTableEntity = toTableEntity(oldTable); - AtlasEntityWithExtInfo renamedTableEntity = toTableEntity(newTable); - - if (oldTableEntity == null || renamedTableEntity == null) { - return; - } - - // update qualifiedName for all columns, partitionKeys, storageDesc - String renamedTableQualifiedName = (String) renamedTableEntity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME); - - renameColumns((List) oldTableEntity.getEntity().getRelationshipAttribute(ATTRIBUTE_COLUMNS), oldTableEntity, renamedTableQualifiedName, ret); - renameColumns((List) oldTableEntity.getEntity().getRelationshipAttribute(ATTRIBUTE_PARTITION_KEYS), oldTableEntity, renamedTableQualifiedName, ret); - renameStorageDesc(oldTableEntity, renamedTableEntity, ret); - - // set previous name as the alias - renamedTableEntity.getEntity().setAttribute(ATTRIBUTE_ALIASES, Collections.singletonList(oldTable.getTableName())); - - // make a copy of renamedTableEntity to send as partial-update with no relationship attributes - AtlasEntity renamedTableEntityForPartialUpdate = new AtlasEntity(renamedTableEntity.getEntity()); - renamedTableEntityForPartialUpdate.setRelationshipAttributes(null); - - String oldTableQualifiedName = (String) oldTableEntity.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME); - AtlasObjectId oldTableId = new AtlasObjectId(oldTableEntity.getEntity().getTypeName(), ATTRIBUTE_QUALIFIED_NAME, oldTableQualifiedName); - - // update qualifiedName and other attributes (like params - which include lastModifiedTime, lastModifiedBy) of the table - ret.add(new EntityPartialUpdateRequestV2(getUserName(), oldTableId, new AtlasEntityWithExtInfo(renamedTableEntityForPartialUpdate))); - - // to handle cases where Atlas didn't have the oldTable, send a full update - ret.add(new EntityUpdateRequestV2(getUserName(), new AtlasEntitiesWithExtInfo(renamedTableEntity))); - - // partial update relationship attribute ddl - if (!context.isMetastoreHook()) { - AtlasEntity ddlEntity = createHiveDDLEntity(renamedTableEntity.getEntity(), true); - - if (ddlEntity != null) { - ret.add(new HookNotification.EntityCreateRequestV2(getUserName(), new AtlasEntitiesWithExtInfo(ddlEntity))); - } - } - - context.removeFromKnownTable(oldTableQualifiedName); - } - - private void renameColumns(List columns, AtlasEntityExtInfo oldEntityExtInfo, String newTableQualifiedName, List notifications) { - if (CollectionUtils.isNotEmpty(columns)) { - for (AtlasObjectId columnId : columns) { - AtlasEntity oldColumn = oldEntityExtInfo.getEntity(columnId.getGuid()); - AtlasObjectId oldColumnId = new AtlasObjectId(oldColumn.getTypeName(), ATTRIBUTE_QUALIFIED_NAME, oldColumn.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - AtlasEntity newColumn = new AtlasEntity(oldColumn.getTypeName(), ATTRIBUTE_QUALIFIED_NAME, getColumnQualifiedName(newTableQualifiedName, (String) oldColumn.getAttribute(ATTRIBUTE_NAME))); - - notifications.add(new EntityPartialUpdateRequestV2(getUserName(), oldColumnId, new AtlasEntityWithExtInfo(newColumn))); - } - } - } - - private void renameStorageDesc(AtlasEntityWithExtInfo oldEntityExtInfo, AtlasEntityWithExtInfo newEntityExtInfo, List notifications) { - AtlasEntity oldSd = getStorageDescEntity(oldEntityExtInfo); - AtlasEntity newSd = new AtlasEntity(getStorageDescEntity(newEntityExtInfo)); // make a copy of newSd, since we will be setting relationshipAttributes to 'null' below - // and we need relationship attributes later during entity full update - - if (oldSd != null && newSd != null) { - AtlasObjectId oldSdId = new AtlasObjectId(oldSd.getTypeName(), ATTRIBUTE_QUALIFIED_NAME, oldSd.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - - newSd.removeAttribute(ATTRIBUTE_TABLE); - newSd.setRelationshipAttributes(null); - - notifications.add(new EntityPartialUpdateRequestV2(getUserName(), oldSdId, new AtlasEntityWithExtInfo(newSd))); - } - } - - private AtlasEntity getStorageDescEntity(AtlasEntityWithExtInfo tableEntity) { - AtlasEntity ret = null; - - if (tableEntity != null && tableEntity.getEntity() != null) { - Object attrSdId = tableEntity.getEntity().getRelationshipAttribute(ATTRIBUTE_STORAGEDESC); - - if (attrSdId instanceof AtlasObjectId) { - ret = tableEntity.getReferredEntity(((AtlasObjectId) attrSdId).getGuid()); - } - } - - return ret; - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRenameCol.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRenameCol.java deleted file mode 100644 index 29ca920c64..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/AlterTableRenameCol.java +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityPartialUpdateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.events.AlterTableEvent; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; - -public class AlterTableRenameCol extends AlterTable { - private static final Logger LOG = LoggerFactory.getLogger(AlterTableRenameCol.class); - private final FieldSchema columnOld; - private final FieldSchema columnNew; - - public AlterTableRenameCol(AtlasHiveHookContext context) { - this(null, null, context); - } - - public AlterTableRenameCol(FieldSchema columnOld, FieldSchema columnNew, AtlasHiveHookContext context) { - super(context); - - this.columnOld = columnOld; - this.columnNew = columnNew; - } - - @Override - public List getNotificationMessages() throws Exception { - return context.isMetastoreHook() ? getHiveMetastoreMessages() : getHiveMessages(); - } - - public List getHiveMetastoreMessages() throws Exception { - List baseMsgs = super.getNotificationMessages(); - List ret = new ArrayList<>(baseMsgs); - AlterTableEvent tblEvent = (AlterTableEvent) context.getMetastoreEvent(); - Table oldTable = toTable(tblEvent.getOldTable()); - Table newTable = toTable(tblEvent.getNewTable()); - - processColumns(oldTable, newTable, ret); - - return ret; - } - - public List getHiveMessages() throws Exception { - List baseMsgs = super.getNotificationMessages(); - - if (CollectionUtils.isEmpty(getInputs())) { - LOG.error("AlterTableRenameCol: old-table not found in inputs list"); - - return null; - } - - if (CollectionUtils.isEmpty(getOutputs())) { - LOG.error("AlterTableRenameCol: new-table not found in outputs list"); - - return null; - } - - if (CollectionUtils.isEmpty(baseMsgs)) { - LOG.debug("Skipped processing of column-rename (on a temporary table?)"); - - return null; - } - - List ret = new ArrayList<>(baseMsgs); - Table oldTable = getInputs().iterator().next().getTable(); - Table newTable = getOutputs().iterator().next().getTable(); - - if (newTable != null) { - newTable = getHive().getTable(newTable.getDbName(), newTable.getTableName()); - } - - processColumns(oldTable, newTable, ret); - - return ret; - } - - private void processColumns(Table oldTable, Table newTable, List ret) { - FieldSchema changedColumnOld = (columnOld == null) ? findRenamedColumn(oldTable, newTable) : columnOld; - FieldSchema changedColumnNew = (columnNew == null) ? findRenamedColumn(newTable, oldTable) : columnNew; - - if (changedColumnOld != null && changedColumnNew != null) { - AtlasObjectId oldColumnId = new AtlasObjectId(HIVE_TYPE_COLUMN, ATTRIBUTE_QUALIFIED_NAME, getQualifiedName(oldTable, changedColumnOld)); - AtlasEntity newColumn = new AtlasEntity(HIVE_TYPE_COLUMN); - - newColumn.setAttribute(ATTRIBUTE_NAME, changedColumnNew.getName()); - newColumn.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getQualifiedName(newTable, changedColumnNew)); - - ret.add(0, new EntityPartialUpdateRequestV2(getUserName(), oldColumnId, new AtlasEntityWithExtInfo(newColumn))); - } else { - LOG.error("AlterTableRenameCol: no renamed column detected"); - } - } - - public static FieldSchema findRenamedColumn(Table inputTable, Table outputTable) { - FieldSchema ret = null; - List inputColumns = inputTable.getCols(); - List outputColumns = outputTable.getCols(); - - for (FieldSchema inputColumn : inputColumns) { - if (!outputColumns.contains(inputColumn)) { - ret = inputColumn; - - break; - } - } - - return ret; - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java deleted file mode 100644 index 3f358139be..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java +++ /dev/null @@ -1,1189 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.hive.hook.HiveHook.PreprocessAction; -import org.apache.atlas.utils.PathExtractorContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.AtlasRelatedObjectId; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AtlasPathExtractorUtil; -import org.apache.atlas.utils.HdfsNameServiceResolver; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.utils.SecurityUtils; -import org.apache.hadoop.hive.ql.hooks.*; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.net.URI; -import java.util.*; - -import static org.apache.atlas.hive.bridge.HiveMetaStoreBridge.getDatabaseName; -import static org.apache.atlas.hive.hook.AtlasHiveHookContext.QNAME_SEP_METADATA_NAMESPACE; -import static org.apache.atlas.hive.hook.AtlasHiveHookContext.QNAME_SEP_ENTITY_NAME; -import static org.apache.atlas.hive.hook.AtlasHiveHookContext.QNAME_SEP_PROCESS; - -public abstract class BaseHiveEvent { - private static final Logger LOG = LoggerFactory.getLogger(BaseHiveEvent.class); - - public static final String HIVE_TYPE_DB = "hive_db"; - public static final String HIVE_TYPE_TABLE = "hive_table"; - public static final String HIVE_TYPE_STORAGEDESC = "hive_storagedesc"; - public static final String HIVE_TYPE_COLUMN = "hive_column"; - public static final String HIVE_TYPE_PROCESS = "hive_process"; - public static final String HIVE_TYPE_COLUMN_LINEAGE = "hive_column_lineage"; - public static final String HIVE_TYPE_SERDE = "hive_serde"; - public static final String HIVE_TYPE_ORDER = "hive_order"; - public static final String HIVE_TYPE_PROCESS_EXECUTION = "hive_process_execution"; - public static final String HIVE_DB_DDL = "hive_db_ddl"; - public static final String HIVE_TABLE_DDL = "hive_table_ddl"; - public static final String HBASE_TYPE_TABLE = "hbase_table"; - public static final String HBASE_TYPE_NAMESPACE = "hbase_namespace"; - public static final String ATTRIBUTE_QUALIFIED_NAME = "qualifiedName"; - public static final String ATTRIBUTE_NAME = "name"; - public static final String ATTRIBUTE_DESCRIPTION = "description"; - public static final String ATTRIBUTE_OWNER = "owner"; - public static final String ATTRIBUTE_CLUSTER_NAME = "clusterName"; - public static final String ATTRIBUTE_LOCATION = "location"; - public static final String ATTRIBUTE_LOCATION_PATH = "locationPath"; - public static final String ATTRIBUTE_PARAMETERS = "parameters"; - public static final String ATTRIBUTE_OWNER_TYPE = "ownerType"; - public static final String ATTRIBUTE_COMMENT = "comment"; - public static final String ATTRIBUTE_CREATE_TIME = "createTime"; - public static final String ATTRIBUTE_LAST_ACCESS_TIME = "lastAccessTime"; - public static final String ATTRIBUTE_VIEW_ORIGINAL_TEXT = "viewOriginalText"; - public static final String ATTRIBUTE_VIEW_EXPANDED_TEXT = "viewExpandedText"; - public static final String ATTRIBUTE_TABLE_TYPE = "tableType"; - public static final String ATTRIBUTE_TEMPORARY = "temporary"; - public static final String ATTRIBUTE_RETENTION = "retention"; - public static final String ATTRIBUTE_DB = "db"; - public static final String ATTRIBUTE_HIVE_DB = "hiveDb"; - public static final String ATTRIBUTE_STORAGEDESC = "sd"; - public static final String ATTRIBUTE_PARTITION_KEYS = "partitionKeys"; - public static final String ATTRIBUTE_COLUMNS = "columns"; - public static final String ATTRIBUTE_INPUT_FORMAT = "inputFormat"; - public static final String ATTRIBUTE_OUTPUT_FORMAT = "outputFormat"; - public static final String ATTRIBUTE_COMPRESSED = "compressed"; - public static final String ATTRIBUTE_BUCKET_COLS = "bucketCols"; - public static final String ATTRIBUTE_NUM_BUCKETS = "numBuckets"; - public static final String ATTRIBUTE_STORED_AS_SUB_DIRECTORIES = "storedAsSubDirectories"; - public static final String ATTRIBUTE_TABLE = "table"; - public static final String ATTRIBUTE_SERDE_INFO = "serdeInfo"; - public static final String ATTRIBUTE_SERIALIZATION_LIB = "serializationLib"; - public static final String ATTRIBUTE_SORT_COLS = "sortCols"; - public static final String ATTRIBUTE_COL_TYPE = "type"; - public static final String ATTRIBUTE_COL_POSITION = "position"; - public static final String ATTRIBUTE_PATH = "path"; - public static final String ATTRIBUTE_NAMESERVICE_ID = "nameServiceId"; - public static final String ATTRIBUTE_INPUTS = "inputs"; - public static final String ATTRIBUTE_OUTPUTS = "outputs"; - public static final String ATTRIBUTE_OPERATION_TYPE = "operationType"; - public static final String ATTRIBUTE_START_TIME = "startTime"; - public static final String ATTRIBUTE_USER_NAME = "userName"; - public static final String ATTRIBUTE_QUERY_TEXT = "queryText"; - public static final String ATTRIBUTE_PROCESS = "process"; - public static final String ATTRIBUTE_PROCESS_EXECUTIONS = "processExecutions"; - public static final String ATTRIBUTE_QUERY_ID = "queryId"; - public static final String ATTRIBUTE_QUERY_PLAN = "queryPlan"; - public static final String ATTRIBUTE_END_TIME = "endTime"; - public static final String ATTRIBUTE_RECENT_QUERIES = "recentQueries"; - public static final String ATTRIBUTE_QUERY = "query"; - public static final String ATTRIBUTE_DEPENDENCY_TYPE = "depenendencyType"; - public static final String ATTRIBUTE_EXPRESSION = "expression"; - public static final String ATTRIBUTE_ALIASES = "aliases"; - public static final String ATTRIBUTE_URI = "uri"; - public static final String ATTRIBUTE_STORAGE_HANDLER = "storage_handler"; - public static final String ATTRIBUTE_NAMESPACE = "namespace"; - public static final String ATTRIBUTE_HOSTNAME = "hostName"; - public static final String ATTRIBUTE_EXEC_TIME = "execTime"; - public static final String ATTRIBUTE_DDL_QUERIES = "ddlQueries"; - public static final String ATTRIBUTE_SERVICE_TYPE = "serviceType"; - public static final String ATTRIBUTE_GUID = "guid"; - public static final String ATTRIBUTE_UNIQUE_ATTRIBUTES = "uniqueAttributes"; - public static final String HBASE_STORAGE_HANDLER_CLASS = "org.apache.hadoop.hive.hbase.HBaseStorageHandler"; - public static final String HBASE_DEFAULT_NAMESPACE = "default"; - public static final String HBASE_NAMESPACE_TABLE_DELIMITER = ":"; - public static final String HBASE_PARAM_TABLE_NAME = "hbase.table.name"; - public static final long MILLIS_CONVERT_FACTOR = 1000; - public static final String HDFS_PATH_PREFIX = "hdfs://"; - public static final String EMPTY_ATTRIBUTE_VALUE = ""; - - public static final String RELATIONSHIP_DATASET_PROCESS_INPUTS = "dataset_process_inputs"; - public static final String RELATIONSHIP_PROCESS_DATASET_OUTPUTS = "process_dataset_outputs"; - public static final String RELATIONSHIP_HIVE_PROCESS_COLUMN_LINEAGE = "hive_process_column_lineage"; - public static final String RELATIONSHIP_HIVE_TABLE_DB = "hive_table_db"; - public static final String RELATIONSHIP_HIVE_TABLE_PART_KEYS = "hive_table_partitionkeys"; - public static final String RELATIONSHIP_HIVE_TABLE_COLUMNS = "hive_table_columns"; - public static final String RELATIONSHIP_HIVE_TABLE_STORAGE_DESC = "hive_table_storagedesc"; - public static final String RELATIONSHIP_HIVE_PROCESS_PROCESS_EXE = "hive_process_process_executions"; - public static final String RELATIONSHIP_HIVE_DB_DDL_QUERIES = "hive_db_ddl_queries"; - public static final String RELATIONSHIP_HIVE_DB_LOCATION = "hive_db_location"; - public static final String RELATIONSHIP_HIVE_TABLE_DDL_QUERIES = "hive_table_ddl_queries"; - public static final String RELATIONSHIP_HBASE_TABLE_NAMESPACE = "hbase_table_namespace"; - - - public static final Map OWNER_TYPE_TO_ENUM_VALUE = new HashMap<>(); - - protected final boolean skipTempTables; - - static { - OWNER_TYPE_TO_ENUM_VALUE.put(1, "USER"); - OWNER_TYPE_TO_ENUM_VALUE.put(2, "ROLE"); - OWNER_TYPE_TO_ENUM_VALUE.put(3, "GROUP"); - } - - protected final AtlasHiveHookContext context; - - - protected BaseHiveEvent(AtlasHiveHookContext context) { - this.context = context; - this.skipTempTables = context.isSkipTempTables(); - } - - public AtlasHiveHookContext getContext() { - return context; - } - - public List getNotificationMessages() throws Exception { - return null; - } - - public static long getTableCreateTime(Table table) { - return table.getTTable() != null ? (table.getTTable().getCreateTime() * MILLIS_CONVERT_FACTOR) : System.currentTimeMillis(); - } - - public static String getTableOwner(Table table) { - return table.getTTable() != null ? (table.getOwner()): ""; - } - - - public static List getObjectIds(List entities) { - final List ret; - - if (CollectionUtils.isNotEmpty(entities)) { - ret = new ArrayList<>(entities.size()); - - for (AtlasEntity entity : entities) { - ret.add(AtlasTypeUtil.getObjectId(entity)); - } - } else { - ret = Collections.emptyList(); - } - - return ret; - } - - - protected void addProcessedEntities(AtlasEntitiesWithExtInfo entitiesWithExtInfo) { - for (AtlasEntity entity : context.getEntities()) { - entitiesWithExtInfo.addReferredEntity(entity); - } - - entitiesWithExtInfo.compact(); - - context.addToKnownEntities(entitiesWithExtInfo.getEntities()); - - if (entitiesWithExtInfo.getReferredEntities() != null) { - context.addToKnownEntities(entitiesWithExtInfo.getReferredEntities().values()); - } - } - - protected AtlasEntity getInputOutputEntity(Entity entity, AtlasEntityExtInfo entityExtInfo, boolean skipTempTables) throws Exception { - AtlasEntity ret = null; - - switch(entity.getType()) { - case TABLE: - case PARTITION: - case DFS_DIR: - case LOCAL_DIR: { - ret = toAtlasEntity(entity, entityExtInfo, skipTempTables); - } - break; - } - - return ret; - } - - protected AtlasEntity toAtlasEntity(Entity entity, AtlasEntityExtInfo entityExtInfo, boolean skipTempTables) throws Exception { - AtlasEntity ret = null; - - switch (entity.getType()) { - case DATABASE: { - String dbName = getDatabaseName(entity.getDatabase()); - - if (!context.getIgnoreDummyDatabaseName().contains(dbName)) { - Database db = getHive().getDatabase(dbName); - - ret = toDbEntity(db); - } - } - break; - - case TABLE: - case PARTITION: { - String dbName = entity.getTable().getDbName(); - String tableName = entity.getTable().getTableName(); - boolean skipTable = StringUtils.isNotEmpty(context.getIgnoreValuesTmpTableNamePrefix()) && tableName.toLowerCase().startsWith(context.getIgnoreValuesTmpTableNamePrefix()); - - if (!skipTable) { - skipTable = context.getIgnoreDummyTableName().contains(tableName) && context.getIgnoreDummyDatabaseName().contains(dbName); - } - - if (!skipTable) { - skipTable = skipTempTables && entity.getTable().isTemporary(); - } - - if (!skipTable) { - Table table = getHive().getTable(dbName, tableName); - - ret = toTableEntity(table, entityExtInfo); - } else { - context.registerSkippedEntity(entity); - } - } - break; - - case DFS_DIR: - case LOCAL_DIR: { - URI location = entity.getLocation(); - - if (location != null) { - ret = getPathEntity(new Path(entity.getLocation()), entityExtInfo); - } - } - break; - - default: - break; - } - - return ret; - } - - protected AtlasEntity toDbEntity(Database db) throws Exception { - String dbName = getDatabaseName(db); - String dbQualifiedName = getQualifiedName(db); - boolean isKnownDatabase = context.isKnownDatabase(dbQualifiedName); - AtlasEntity ret = context.getEntity(dbQualifiedName); - - if (ret == null) { - ret = new AtlasEntity(HIVE_TYPE_DB); - - // if this DB was sent in an earlier notification, set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - if (isKnownDatabase) { - ret.setGuid(null); - } - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, dbQualifiedName); - ret.setAttribute(ATTRIBUTE_NAME, dbName); - - if (StringUtils.isNotEmpty(db.getDescription())) { - ret.setAttribute(ATTRIBUTE_DESCRIPTION, db.getDescription()); - } - - ret.setAttribute(ATTRIBUTE_OWNER, db.getOwnerName()); - - ret.setAttribute(ATTRIBUTE_CLUSTER_NAME, getMetadataNamespace()); - ret.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(db.getLocationUri())); - ret.setAttribute(ATTRIBUTE_PARAMETERS, db.getParameters()); - - if (db.getOwnerType() != null) { - ret.setAttribute(ATTRIBUTE_OWNER_TYPE, OWNER_TYPE_TO_ENUM_VALUE.get(db.getOwnerType().getValue())); - } - - context.putEntity(dbQualifiedName, ret); - } - - return ret; - } - - protected AtlasEntityWithExtInfo toTableEntity(Table table) throws Exception { - AtlasEntityWithExtInfo ret = new AtlasEntityWithExtInfo(); - - AtlasEntity entity = toTableEntity(table, ret); - - if (entity != null) { - ret.setEntity(entity); - } else { - ret = null; - } - - return ret; - } - - protected AtlasEntity toTableEntity(Table table, AtlasEntitiesWithExtInfo entities) throws Exception { - AtlasEntity ret = toTableEntity(table, (AtlasEntityExtInfo) entities); - - if (ret != null) { - entities.addEntity(ret); - } - - return ret; - } - - protected AtlasEntity toTableEntity(Table table, AtlasEntityExtInfo entityExtInfo) throws Exception { - Database db = getDatabases(table.getDbName()); - AtlasEntity dbEntity = toDbEntity(db); - - if (entityExtInfo != null) { - if (dbEntity != null) { - entityExtInfo.addReferredEntity(dbEntity); - } - } - - AtlasEntity ret = toTableEntity(AtlasTypeUtil.getObjectId(dbEntity), table, entityExtInfo); - - return ret; - } - - protected AtlasEntity toTableEntity(AtlasObjectId dbId, Table table, AtlasEntityExtInfo entityExtInfo) throws Exception { - String tblQualifiedName = getQualifiedName(table); - boolean isKnownTable = context.isKnownTable(tblQualifiedName); - - AtlasEntity ret = context.getEntity(tblQualifiedName); - - if (ret == null) { - PreprocessAction action = context.getPreprocessActionForHiveTable(tblQualifiedName); - - if (action == PreprocessAction.IGNORE) { - LOG.info("ignoring table {}", tblQualifiedName); - } else { - ret = new AtlasEntity(HIVE_TYPE_TABLE); - - // if this table was sent in an earlier notification, set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - if (isKnownTable && !isAlterTableOperation()) { - ret.setGuid(null); - } - - long createTime = getTableCreateTime(table); - long lastAccessTime = table.getLastAccessTime() > 0 ? (table.getLastAccessTime() * MILLIS_CONVERT_FACTOR) : createTime; - - AtlasRelatedObjectId dbRelatedObject = new AtlasRelatedObjectId(dbId, RELATIONSHIP_HIVE_TABLE_DB); - - ret.setRelationshipAttribute(ATTRIBUTE_DB, dbRelatedObject ); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, tblQualifiedName); - ret.setAttribute(ATTRIBUTE_NAME, table.getTableName().toLowerCase()); - ret.setAttribute(ATTRIBUTE_OWNER, table.getOwner()); - ret.setAttribute(ATTRIBUTE_CREATE_TIME, createTime); - ret.setAttribute(ATTRIBUTE_LAST_ACCESS_TIME, lastAccessTime); - ret.setAttribute(ATTRIBUTE_RETENTION, table.getRetention()); - ret.setAttribute(ATTRIBUTE_PARAMETERS, table.getParameters()); - ret.setAttribute(ATTRIBUTE_COMMENT, table.getParameters().get(ATTRIBUTE_COMMENT)); - ret.setAttribute(ATTRIBUTE_TABLE_TYPE, table.getTableType().name()); - ret.setAttribute(ATTRIBUTE_TEMPORARY, table.isTemporary()); - - if (table.getViewOriginalText() != null) { - ret.setAttribute(ATTRIBUTE_VIEW_ORIGINAL_TEXT, table.getViewOriginalText()); - } - - if (table.getViewExpandedText() != null) { - ret.setAttribute(ATTRIBUTE_VIEW_EXPANDED_TEXT, table.getViewExpandedText()); - } - - boolean pruneTable = table.isTemporary() || action == PreprocessAction.PRUNE; - - if (pruneTable) { - LOG.info("ignoring details of table {}", tblQualifiedName); - } else { - AtlasObjectId tableId = AtlasTypeUtil.getObjectId(ret); - AtlasEntity sd = getStorageDescEntity(tableId, table); - List partitionKeys = getColumnEntities(tableId, table, table.getPartitionKeys(), RELATIONSHIP_HIVE_TABLE_PART_KEYS); - List columns = getColumnEntities(tableId, table, table.getCols(), RELATIONSHIP_HIVE_TABLE_COLUMNS); - - - - if (entityExtInfo != null) { - entityExtInfo.addReferredEntity(sd); - - if (partitionKeys != null) { - for (AtlasEntity partitionKey : partitionKeys) { - entityExtInfo.addReferredEntity(partitionKey); - } - } - - if (columns != null) { - for (AtlasEntity column : columns) { - entityExtInfo.addReferredEntity(column); - } - } - } - - - ret.setRelationshipAttribute(ATTRIBUTE_STORAGEDESC, AtlasTypeUtil.getAtlasRelatedObjectId(sd, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC)); - ret.setRelationshipAttribute(ATTRIBUTE_PARTITION_KEYS, AtlasTypeUtil.getAtlasRelatedObjectIds(partitionKeys, RELATIONSHIP_HIVE_TABLE_PART_KEYS)); - ret.setRelationshipAttribute(ATTRIBUTE_COLUMNS, AtlasTypeUtil.getAtlasRelatedObjectIds(columns, RELATIONSHIP_HIVE_TABLE_COLUMNS)); - } - - context.putEntity(tblQualifiedName, ret); - } - } - - return ret; - } - - protected AtlasEntity getStorageDescEntity(AtlasObjectId tableId, Table table) { - String sdQualifiedName = getQualifiedName(table, table.getSd()); - boolean isKnownTable = tableId.getGuid() == null; - - AtlasEntity ret = context.getEntity(sdQualifiedName); - - if (ret == null) { - ret = new AtlasEntity(HIVE_TYPE_STORAGEDESC); - - // if sd's table was sent in an earlier notification, set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - if (isKnownTable) { - ret.setGuid(null); - } - - StorageDescriptor sd = table.getSd(); - - AtlasRelatedObjectId tableRelatedObject = new AtlasRelatedObjectId(tableId, RELATIONSHIP_HIVE_TABLE_STORAGE_DESC); - - ret.setRelationshipAttribute(ATTRIBUTE_TABLE, tableRelatedObject); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, sdQualifiedName); - ret.setAttribute(ATTRIBUTE_PARAMETERS, sd.getParameters()); - ret.setAttribute(ATTRIBUTE_LOCATION, HdfsNameServiceResolver.getPathWithNameServiceID(sd.getLocation())); - ret.setAttribute(ATTRIBUTE_INPUT_FORMAT, sd.getInputFormat()); - ret.setAttribute(ATTRIBUTE_OUTPUT_FORMAT, sd.getOutputFormat()); - ret.setAttribute(ATTRIBUTE_COMPRESSED, sd.isCompressed()); - ret.setAttribute(ATTRIBUTE_NUM_BUCKETS, sd.getNumBuckets()); - ret.setAttribute(ATTRIBUTE_STORED_AS_SUB_DIRECTORIES, sd.isStoredAsSubDirectories()); - - if (sd.getBucketCols() != null && sd.getBucketCols().size() > 0) { - ret.setAttribute(ATTRIBUTE_BUCKET_COLS, sd.getBucketCols()); - } - - if (sd.getSerdeInfo() != null) { - AtlasStruct serdeInfo = new AtlasStruct(HIVE_TYPE_SERDE); - SerDeInfo sdSerDeInfo = sd.getSerdeInfo(); - - serdeInfo.setAttribute(ATTRIBUTE_NAME, sdSerDeInfo.getName()); - serdeInfo.setAttribute(ATTRIBUTE_SERIALIZATION_LIB, sdSerDeInfo.getSerializationLib()); - serdeInfo.setAttribute(ATTRIBUTE_PARAMETERS, sdSerDeInfo.getParameters()); - - ret.setAttribute(ATTRIBUTE_SERDE_INFO, serdeInfo); - } - - if (CollectionUtils.isNotEmpty(sd.getSortCols())) { - List sortCols = new ArrayList<>(sd.getSortCols().size()); - - for (Order sdSortCol : sd.getSortCols()) { - AtlasStruct sortcol = new AtlasStruct(HIVE_TYPE_ORDER); - - sortcol.setAttribute("col", sdSortCol.getCol()); - sortcol.setAttribute("order", sdSortCol.getOrder()); - - sortCols.add(sortcol); - } - - ret.setAttribute(ATTRIBUTE_SORT_COLS, sortCols); - } - - context.putEntity(sdQualifiedName, ret); - } - - return ret; - } - - protected List getColumnEntities(AtlasObjectId tableId, Table table, List fieldSchemas, String relationshipType) { - List ret = new ArrayList<>(); - boolean isKnownTable = tableId.getGuid() == null; - int columnPosition = 0; - - if (CollectionUtils.isNotEmpty(fieldSchemas)) { - for (FieldSchema fieldSchema : fieldSchemas) { - String colQualifiedName = getQualifiedName(table, fieldSchema); - AtlasEntity column = context.getEntity(colQualifiedName); - - if (column == null) { - column = new AtlasEntity(HIVE_TYPE_COLUMN); - - // if column's table was sent in an earlier notification, set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - if (isKnownTable) { - column.setGuid(null); - } - AtlasRelatedObjectId relatedObjectId = new AtlasRelatedObjectId(tableId, relationshipType); - column.setRelationshipAttribute(ATTRIBUTE_TABLE, (relatedObjectId)); - column.setAttribute(ATTRIBUTE_QUALIFIED_NAME, colQualifiedName); - column.setAttribute(ATTRIBUTE_NAME, fieldSchema.getName()); - column.setAttribute(ATTRIBUTE_OWNER, table.getOwner()); - column.setAttribute(ATTRIBUTE_COL_TYPE, fieldSchema.getType()); - column.setAttribute(ATTRIBUTE_COL_POSITION, columnPosition++); - column.setAttribute(ATTRIBUTE_COMMENT, fieldSchema.getComment()); - - context.putEntity(colQualifiedName, column); - } - - ret.add(column); - } - } - - return ret; - } - - protected AtlasEntity getPathEntity(Path path, AtlasEntityExtInfo extInfo) { - String strPath = path.toString(); - String metadataNamespace = getMetadataNamespace(); - boolean isConvertPathToLowerCase = strPath.startsWith(HDFS_PATH_PREFIX) && context.isConvertHdfsPathToLowerCase(); - PathExtractorContext pathExtractorContext = new PathExtractorContext(metadataNamespace, context.getQNameToEntityMap(), - isConvertPathToLowerCase, context.getAwsS3AtlasModelVersion()); - - AtlasEntityWithExtInfo entityWithExtInfo = AtlasPathExtractorUtil.getPathEntity(path, pathExtractorContext); - - if (entityWithExtInfo.getReferredEntities() != null){ - for (AtlasEntity entity : entityWithExtInfo.getReferredEntities().values()) { - extInfo.addReferredEntity(entity); - } - } - - return entityWithExtInfo.getEntity(); - } - - protected AtlasEntity getHiveProcessEntity(List inputs, List outputs) throws Exception { - AtlasEntity ret = new AtlasEntity(HIVE_TYPE_PROCESS); - String queryStr = getQueryString(); - String qualifiedName = getQualifiedName(inputs, outputs); - - if (queryStr != null) { - queryStr = queryStr.toLowerCase().trim(); - } - - ret.setAttribute(ATTRIBUTE_OPERATION_TYPE, getOperationName()); - - if (context.isMetastoreHook()) { - HiveOperation operation = context.getHiveOperation(); - - if (operation == HiveOperation.CREATETABLE || operation == HiveOperation.CREATETABLE_AS_SELECT) { - AtlasEntity table = outputs.get(0); - long createTime = Long.valueOf((Long)table.getAttribute(ATTRIBUTE_CREATE_TIME)); - qualifiedName = (String) table.getAttribute(ATTRIBUTE_QUALIFIED_NAME) + QNAME_SEP_PROCESS + createTime; - - ret.setAttribute(ATTRIBUTE_NAME, "dummyProcess:" + UUID.randomUUID()); - ret.setAttribute(ATTRIBUTE_OPERATION_TYPE, operation.getOperationName()); - } - } - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, qualifiedName); - ret.setAttribute(ATTRIBUTE_NAME, qualifiedName); - ret.setRelationshipAttribute(ATTRIBUTE_INPUTS, AtlasTypeUtil.getAtlasRelatedObjectIds(inputs, RELATIONSHIP_DATASET_PROCESS_INPUTS)); - ret.setRelationshipAttribute(ATTRIBUTE_OUTPUTS, AtlasTypeUtil.getAtlasRelatedObjectIds(outputs, RELATIONSHIP_PROCESS_DATASET_OUTPUTS)); - - // We are setting an empty value to these attributes, since now we have a new entity type called hive process - // execution which captures these values. We have to set empty values here because these attributes are - // mandatory attributes for hive process entity type. - ret.setAttribute(ATTRIBUTE_START_TIME, System.currentTimeMillis()); - ret.setAttribute(ATTRIBUTE_END_TIME, System.currentTimeMillis()); - - if (context.isHiveProcessPopulateDeprecatedAttributes()) { - ret.setAttribute(ATTRIBUTE_USER_NAME, getUserName()); - ret.setAttribute(ATTRIBUTE_QUERY_TEXT, queryStr); - ret.setAttribute(ATTRIBUTE_QUERY_ID, getQueryId()); - } else { - ret.setAttribute(ATTRIBUTE_USER_NAME, EMPTY_ATTRIBUTE_VALUE); - ret.setAttribute(ATTRIBUTE_QUERY_TEXT, EMPTY_ATTRIBUTE_VALUE); - ret.setAttribute(ATTRIBUTE_QUERY_ID, EMPTY_ATTRIBUTE_VALUE); - } - - ret.setAttribute(ATTRIBUTE_QUERY_PLAN, "Not Supported"); - ret.setAttribute(ATTRIBUTE_RECENT_QUERIES, Collections.singletonList(queryStr)); - ret.setAttribute(ATTRIBUTE_CLUSTER_NAME, getMetadataNamespace()); - - return ret; - } - - protected AtlasEntity getHiveProcessExecutionEntity(AtlasEntity hiveProcess) throws Exception { - AtlasEntity ret = new AtlasEntity(HIVE_TYPE_PROCESS_EXECUTION); - String queryStr = getQueryString(); - - if (queryStr != null) { - queryStr = queryStr.toLowerCase().trim(); - } - - Long endTime = System.currentTimeMillis(); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, hiveProcess.getAttribute(ATTRIBUTE_QUALIFIED_NAME).toString() + - QNAME_SEP_PROCESS + getQueryStartTime().toString() + - QNAME_SEP_PROCESS + endTime.toString()); - ret.setAttribute(ATTRIBUTE_NAME, ret.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - ret.setAttribute(ATTRIBUTE_START_TIME, getQueryStartTime()); - ret.setAttribute(ATTRIBUTE_END_TIME, endTime); - ret.setAttribute(ATTRIBUTE_USER_NAME, getUserName()); - ret.setAttribute(ATTRIBUTE_QUERY_TEXT, queryStr); - ret.setAttribute(ATTRIBUTE_QUERY_ID, getQueryId()); - ret.setAttribute(ATTRIBUTE_QUERY_PLAN, "Not Supported"); - ret.setAttribute(ATTRIBUTE_HOSTNAME, getContext().getHostName()); // - AtlasRelatedObjectId hiveProcessRelationObjectId = AtlasTypeUtil.toAtlasRelatedObjectId(hiveProcess, RELATIONSHIP_HIVE_PROCESS_PROCESS_EXE); - ret.setRelationshipAttribute(ATTRIBUTE_PROCESS, hiveProcessRelationObjectId); - return ret; - } - - protected AtlasEntity createHiveDDLEntity(AtlasEntity dbOrTable) { - return createHiveDDLEntity(dbOrTable, false); - } - - protected AtlasEntity createHiveDDLEntity(AtlasEntity dbOrTable, boolean excludeEntityGuid) { - AtlasObjectId objId = AtlasTypeUtil.getObjectId(dbOrTable); - AtlasEntity hiveDDL = null; - - if (excludeEntityGuid) { - objId.setGuid(null); - } - AtlasRelatedObjectId objIdRelatedObject = new AtlasRelatedObjectId(objId); - - if (StringUtils.equals(objId.getTypeName(), HIVE_TYPE_DB)) { - hiveDDL = new AtlasEntity(HIVE_DB_DDL); - objIdRelatedObject.setRelationshipType(RELATIONSHIP_HIVE_DB_DDL_QUERIES); - hiveDDL.setRelationshipAttribute(ATTRIBUTE_DB, objIdRelatedObject); - } else if (StringUtils.equals(objId.getTypeName(), HIVE_TYPE_TABLE)) { - hiveDDL = new AtlasEntity(HIVE_TABLE_DDL); - objIdRelatedObject.setRelationshipType(RELATIONSHIP_HIVE_TABLE_DDL_QUERIES); - hiveDDL.setRelationshipAttribute( ATTRIBUTE_TABLE, objIdRelatedObject); - } - - if (hiveDDL != null) { - hiveDDL.setAttribute(ATTRIBUTE_SERVICE_TYPE, "hive"); - hiveDDL.setAttribute(ATTRIBUTE_EXEC_TIME, getQueryStartTime()); - hiveDDL.setAttribute(ATTRIBUTE_QUERY_TEXT, getQueryString()); - hiveDDL.setAttribute(ATTRIBUTE_USER_NAME, getUserName()); - hiveDDL.setAttribute(ATTRIBUTE_NAME, getQueryString()); - hiveDDL.setAttribute(ATTRIBUTE_QUALIFIED_NAME, dbOrTable.getAttribute(ATTRIBUTE_QUALIFIED_NAME).toString() - + QNAME_SEP_PROCESS + getQueryStartTime().toString()); - } - - return hiveDDL; - } - - protected AtlasEntity createHiveLocationEntity(AtlasEntity dbEntity, AtlasEntitiesWithExtInfo extInfoEntity) { - AtlasEntity ret = null; - String locationUri = (String)dbEntity.getAttribute(ATTRIBUTE_LOCATION); - - if (StringUtils.isNotEmpty(locationUri)) { - Path path = null; - - try { - path = new Path(locationUri); - } catch (IllegalArgumentException excp) { - LOG.warn("failed to create Path from locationUri {}", locationUri, excp); - } - - if (path != null) { - ret = getPathEntity(path, extInfoEntity); - - if (ret != null) { - AtlasRelatedObjectId dbRelatedObjectId = AtlasTypeUtil.getAtlasRelatedObjectId(dbEntity, RELATIONSHIP_HIVE_DB_LOCATION); - - ret.setRelationshipAttribute(ATTRIBUTE_HIVE_DB, dbRelatedObjectId); - } - } - } - - return ret; - } - - protected String getMetadataNamespace() { - return context.getMetadataNamespace(); - } - - protected Database getDatabases(String dbName) throws Exception { - return context.isMetastoreHook() ? context.getMetastoreHandler().get_database(dbName) : - context.getHive().getDatabase(dbName); - } - - protected Hive getHive() { - return context.getHive(); - } - - protected Set getInputs() { - return context != null ? context.getInputs() : Collections.emptySet(); - } - - protected Set getOutputs() { - return context != null ? context.getOutputs() : Collections.emptySet(); - } - - protected LineageInfo getLineageInfo() { - return context != null ? context.getLineageInfo() : null; - } - - protected String getQueryString() { - return isHiveContextValid() ? context.getHiveContext().getQueryPlan().getQueryStr() : null; - } - - protected String getOperationName() { - return isHiveContextValid() ? context.getHiveContext().getOperationName() : null; - } - - protected String getHiveUserName() { - return isHiveContextValid() ? context.getHiveContext().getUserName() : null; - } - - protected UserGroupInformation getUgi() { - return isHiveContextValid() ? context.getHiveContext().getUgi() : null; - } - - protected Long getQueryStartTime() { - return isHiveContextValid() ? context.getHiveContext().getQueryPlan().getQueryStartTime() : System.currentTimeMillis(); - } - - protected String getQueryId() { - return isHiveContextValid() ? context.getHiveContext().getQueryPlan().getQueryId() : null; - } - - private boolean isHiveContextValid() { - return context != null && context.getHiveContext() != null; - } - - protected String getUserName() { - String ret = null; - UserGroupInformation ugi = null; - - if (context.isMetastoreHook()) { - try { - ugi = SecurityUtils.getUGI(); - } catch (Exception e) { - //do nothing - } - } else { - ret = getHiveUserName(); - - if (StringUtils.isEmpty(ret)) { - ugi = getUgi(); - } - } - - if (ugi != null) { - ret = ugi.getShortUserName(); - } - - if (StringUtils.isEmpty(ret)) { - try { - ret = UserGroupInformation.getCurrentUser().getShortUserName(); - } catch (IOException e) { - LOG.warn("Failed for UserGroupInformation.getCurrentUser() ", e); - - ret = System.getProperty("user.name"); - } - } - - return ret; - } - - protected String getQualifiedName(Entity entity) throws Exception { - switch (entity.getType()) { - case DATABASE: - return getQualifiedName(entity.getDatabase()); - - case TABLE: - case PARTITION: - return getQualifiedName(entity.getTable()); - - case DFS_DIR: - case LOCAL_DIR: - return getQualifiedName(entity.getLocation()); - } - - return null; - } - - protected String getQualifiedName(Database db) { - return context.getQualifiedName(db); - } - - protected String getQualifiedName(Table table) { - return context.getQualifiedName(table); - } - - protected String getQualifiedName(Table table, StorageDescriptor sd) { - return getQualifiedName(table) + "_storage"; - } - - protected String getQualifiedName(Table table, FieldSchema column) { - String tblQualifiedName = getQualifiedName(table); - - int sepPos = tblQualifiedName.lastIndexOf(QNAME_SEP_METADATA_NAMESPACE); - - if (sepPos == -1) { - return tblQualifiedName + QNAME_SEP_ENTITY_NAME + column.getName().toLowerCase(); - } else { - return tblQualifiedName.substring(0, sepPos) + QNAME_SEP_ENTITY_NAME + column.getName().toLowerCase() + tblQualifiedName.substring(sepPos); - } - } - - protected String getQualifiedName(DependencyKey column) { - String dbName = column.getDataContainer().getTable().getDbName(); - String tableName = column.getDataContainer().getTable().getTableName(); - String colName = column.getFieldSchema().getName(); - - return getQualifiedName(dbName, tableName, colName); - } - - protected String getQualifiedName(BaseColumnInfo column) { - String dbName = column.getTabAlias().getTable().getDbName(); - String tableName = column.getTabAlias().getTable().getTableName(); - String colName = column.getColumn() != null ? column.getColumn().getName() : null; - String metadataNamespace = getMetadataNamespace(); - - if (colName == null) { - return (dbName + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + metadataNamespace; - } else { - return (dbName + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_ENTITY_NAME + colName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + metadataNamespace; - } - } - - protected String getQualifiedName(String dbName, String tableName, String colName) { - return (dbName + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_ENTITY_NAME + colName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + getMetadataNamespace(); - } - - protected String getQualifiedName(URI location) { - String strPath = new Path(location).toString(); - - if (strPath.startsWith(HDFS_PATH_PREFIX) && context.isConvertHdfsPathToLowerCase()) { - strPath = strPath.toLowerCase(); - } - - String nameServiceID = HdfsNameServiceResolver.getNameServiceIDForPath(strPath); - String attrPath = StringUtils.isEmpty(nameServiceID) ? strPath : HdfsNameServiceResolver.getPathWithNameServiceID(strPath); - - return getQualifiedName(attrPath); - } - - protected String getQualifiedName(String path) { - if (path.startsWith(HdfsNameServiceResolver.HDFS_SCHEME)) { - return path + QNAME_SEP_METADATA_NAMESPACE + getMetadataNamespace(); - } - - return path.toLowerCase(); - } - - protected String getColumnQualifiedName(String tblQualifiedName, String columnName) { - int sepPos = tblQualifiedName.lastIndexOf(QNAME_SEP_METADATA_NAMESPACE); - - if (sepPos == -1) { - return tblQualifiedName + QNAME_SEP_ENTITY_NAME + columnName.toLowerCase(); - } else { - return tblQualifiedName.substring(0, sepPos) + QNAME_SEP_ENTITY_NAME + columnName.toLowerCase() + tblQualifiedName.substring(sepPos); - } - - } - - protected String getQualifiedName(List inputs, List outputs) throws Exception { - HiveOperation operation = context.getHiveOperation(); - - if (operation == HiveOperation.CREATETABLE || - operation == HiveOperation.CREATETABLE_AS_SELECT || - operation == HiveOperation.CREATEVIEW || - operation == HiveOperation.ALTERVIEW_AS || - operation == HiveOperation.ALTERTABLE_LOCATION) { - List sortedEntities = new ArrayList<>(getOutputs()); - - Collections.sort(sortedEntities, entityComparator); - - for (Entity entity : sortedEntities) { - if (entity.getType() == Entity.Type.TABLE) { - Table table = entity.getTable(); - - table = getHive().getTable(table.getDbName(), table.getTableName()); - - long createTime = getTableCreateTime(table); - - return getQualifiedName(table) + QNAME_SEP_PROCESS + createTime; - } - } - } - - String qualifiedName = null; - String operationName = getOperationName(); - - if (operationName != null) { - StringBuilder sb = new StringBuilder(operationName); - - boolean ignoreHDFSPaths = ignoreHDFSPathsinProcessQualifiedName(); - - addToProcessQualifiedName(sb, getInputs(), ignoreHDFSPaths); - sb.append("->"); - addToProcessQualifiedName(sb, getOutputs(), ignoreHDFSPaths); - - qualifiedName = sb.toString(); - } - - - return qualifiedName; - } - - protected AtlasEntity toReferencedHBaseTable(Table table, AtlasEntitiesWithExtInfo entities) { - AtlasEntity ret = null; - HBaseTableInfo hBaseTableInfo = new HBaseTableInfo(table); - String hbaseNameSpace = hBaseTableInfo.getHbaseNameSpace(); - String hbaseTableName = hBaseTableInfo.getHbaseTableName(); - String metadataNamespace = getMetadataNamespace(); - - if (hbaseTableName != null) { - AtlasEntity nsEntity = new AtlasEntity(HBASE_TYPE_NAMESPACE); - nsEntity.setAttribute(ATTRIBUTE_NAME, hbaseNameSpace); - nsEntity.setAttribute(ATTRIBUTE_CLUSTER_NAME, metadataNamespace); - nsEntity.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getHBaseNameSpaceQualifiedName(metadataNamespace, hbaseNameSpace)); - - ret = new AtlasEntity(HBASE_TYPE_TABLE); - - ret.setAttribute(ATTRIBUTE_NAME, hbaseTableName); - ret.setAttribute(ATTRIBUTE_URI, hbaseTableName); - - AtlasRelatedObjectId objIdRelatedObject = new AtlasRelatedObjectId(AtlasTypeUtil.getObjectId(nsEntity), RELATIONSHIP_HBASE_TABLE_NAMESPACE); - - ret.setRelationshipAttribute(ATTRIBUTE_NAMESPACE, objIdRelatedObject); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getHBaseTableQualifiedName(metadataNamespace, hbaseNameSpace, hbaseTableName)); - - entities.addReferredEntity(nsEntity); - entities.addEntity(ret); - } - - return ret; - } - - protected boolean isHBaseStore(Table table) { - boolean ret = false; - Map parameters = table.getParameters(); - - if (MapUtils.isNotEmpty(parameters)) { - String storageHandler = parameters.get(ATTRIBUTE_STORAGE_HANDLER); - - ret = (storageHandler != null && storageHandler.equals(HBASE_STORAGE_HANDLER_CLASS)); - } - - return ret; - } - - private static String getHBaseTableQualifiedName(String metadataNamespace, String nameSpace, String tableName) { - return String.format("%s:%s@%s", nameSpace.toLowerCase(), tableName.toLowerCase(), metadataNamespace); - } - - private static String getHBaseNameSpaceQualifiedName(String metadataNamespace, String nameSpace) { - return String.format("%s@%s", nameSpace.toLowerCase(), metadataNamespace); - } - - private boolean ignoreHDFSPathsinProcessQualifiedName() { - switch (context.getHiveOperation()) { - case LOAD: - case IMPORT: - return hasPartitionEntity(getOutputs()); - case EXPORT: - return hasPartitionEntity(getInputs()); - case QUERY: - return true; - } - - return false; - } - - private boolean hasPartitionEntity(Collection entities) { - if (entities != null) { - for (Entity entity : entities) { - if (entity.getType() == Entity.Type.PARTITION) { - return true; - } - } - } - - return false; - } - - private void addToProcessQualifiedName(StringBuilder processQualifiedName, Collection entities, boolean ignoreHDFSPaths) { - if (entities == null) { - return; - } - - List sortedEntities = new ArrayList<>(entities); - - Collections.sort(sortedEntities, entityComparator); - - Set dataSetsProcessed = new HashSet<>(); - Map tableMap = new HashMap<>(); - - for (Entity entity : sortedEntities) { - if (ignoreHDFSPaths && (Entity.Type.DFS_DIR.equals(entity.getType()) || Entity.Type.LOCAL_DIR.equals(entity.getType()))) { - continue; - } - - String qualifiedName = null; - long createTime = 0; - - try { - if (entity.getType() == Entity.Type.PARTITION || entity.getType() == Entity.Type.TABLE) { - String tableKey = entity.getTable().getDbName() + "." + entity.getTable().getTableName(); - Table table = tableMap.get(tableKey); - - if (table == null) { - table = getHive().getTable(entity.getTable().getDbName(), entity.getTable().getTableName()); - - tableMap.put(tableKey, table); //since there could be several partitions in a table, store it to avoid hive calls. - } - if (table != null) { - createTime = getTableCreateTime(table); - qualifiedName = getQualifiedName(table); - } - } else { - qualifiedName = getQualifiedName(entity); - } - } catch (Exception excp) { - LOG.error("error while computing qualifiedName for process", excp); - } - - if (qualifiedName == null || !dataSetsProcessed.add(qualifiedName)) { - continue; - } - - if (entity instanceof WriteEntity) { // output entity - WriteEntity writeEntity = (WriteEntity) entity; - - if (writeEntity.getWriteType() != null && HiveOperation.QUERY.equals(context.getHiveOperation())) { - boolean addWriteType = false; - - switch (((WriteEntity) entity).getWriteType()) { - case INSERT: - case INSERT_OVERWRITE: - case UPDATE: - case DELETE: - addWriteType = true; - break; - - case PATH_WRITE: - addWriteType = !Entity.Type.LOCAL_DIR.equals(entity.getType()); - break; - } - - if (addWriteType) { - processQualifiedName.append(QNAME_SEP_PROCESS).append(writeEntity.getWriteType().name()); - } - } - } - - processQualifiedName.append(QNAME_SEP_PROCESS).append(qualifiedName.toLowerCase().replaceAll("/", "")); - - if (createTime != 0) { - processQualifiedName.append(QNAME_SEP_PROCESS).append(createTime); - } - } - } - - private boolean isAlterTableOperation() { - switch (context.getHiveOperation()) { - case ALTERTABLE_FILEFORMAT: - case ALTERTABLE_CLUSTER_SORT: - case ALTERTABLE_BUCKETNUM: - case ALTERTABLE_PROPERTIES: - case ALTERTABLE_SERDEPROPERTIES: - case ALTERTABLE_SERIALIZER: - case ALTERTABLE_ADDCOLS: - case ALTERTABLE_REPLACECOLS: - case ALTERTABLE_PARTCOLTYPE: - case ALTERTABLE_LOCATION: - case ALTERTABLE_RENAME: - case ALTERTABLE_RENAMECOL: - case ALTERVIEW_PROPERTIES: - case ALTERVIEW_RENAME: - case ALTERVIEW_AS: - return true; - } - - return false; - } - - static final class EntityComparator implements Comparator { - @Override - public int compare(Entity entity1, Entity entity2) { - String name1 = entity1.getName(); - String name2 = entity2.getName(); - - if (name1 == null || name2 == null) { - name1 = entity1.getD().toString(); - name2 = entity2.getD().toString(); - } - - return name1.toLowerCase().compareTo(name2.toLowerCase()); - } - } - - static final Comparator entityComparator = new EntityComparator(); - - static final class HBaseTableInfo { - String hbaseNameSpace = null; - String hbaseTableName = null; - - HBaseTableInfo(Table table) { - Map parameters = table.getParameters(); - - if (MapUtils.isNotEmpty(parameters)) { - hbaseNameSpace = HBASE_DEFAULT_NAMESPACE; - hbaseTableName = parameters.get(HBASE_PARAM_TABLE_NAME); - - if (hbaseTableName != null) { - if (hbaseTableName.contains(HBASE_NAMESPACE_TABLE_DELIMITER)) { - String[] hbaseTableInfo = hbaseTableName.split(HBASE_NAMESPACE_TABLE_DELIMITER); - - if (hbaseTableInfo.length > 1) { - hbaseNameSpace = hbaseTableInfo[0]; - hbaseTableName = hbaseTableInfo[1]; - } - } - } - } - } - - public String getHbaseNameSpace() { - return hbaseNameSpace; - } - - public String getHbaseTableName() { - return hbaseTableName; - } - } - - public static Table toTable(org.apache.hadoop.hive.metastore.api.Table table) { - return new Table(table); - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateDatabase.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateDatabase.java deleted file mode 100644 index bf5f5620e7..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateDatabase.java +++ /dev/null @@ -1,122 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.List; - -import static org.apache.atlas.hive.bridge.HiveMetaStoreBridge.getDatabaseName; -import static org.apache.hadoop.hive.ql.hooks.Entity.Type.DATABASE; - -public class CreateDatabase extends BaseHiveEvent { - private static final Logger LOG = LoggerFactory.getLogger(CreateDatabase.class); - - public CreateDatabase(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityCreateRequestV2(getUserName(), entities)); - } - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveMetastoreEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); - CreateDatabaseEvent dbEvent = (CreateDatabaseEvent) context.getMetastoreEvent(); - Database db = dbEvent.getDatabase(); - - if (db != null) { - db = context.getMetastoreHandler().get_database(db.getName()); - } - - if (db != null) { - AtlasEntity dbEntity = toDbEntity(db); - - ret.addEntity(dbEntity); - - addLocationEntities(dbEntity, ret); - } else { - LOG.error("CreateDatabase.getEntities(): failed to retrieve db"); - } - - addProcessedEntities(ret); - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); - - for (Entity entity : getOutputs()) { - if (entity.getType() == DATABASE) { - Database db = entity.getDatabase(); - - if (db != null) { - db = getHive().getDatabase(getDatabaseName(db)); - } - - if (db != null) { - AtlasEntity dbEntity = toDbEntity(db); - AtlasEntity dbDDLEntity = createHiveDDLEntity(dbEntity); - - ret.addEntity(dbEntity); - - if (dbDDLEntity != null) { - ret.addEntity(dbDDLEntity); - } - - addLocationEntities(dbEntity, ret); - } else { - LOG.error("CreateDatabase.getEntities(): failed to retrieve db"); - } - } - } - - addProcessedEntities(ret); - - return ret; - } - - public void addLocationEntities(AtlasEntity dbEntity, AtlasEntitiesWithExtInfo ret) { - AtlasEntity dbLocationEntity = createHiveLocationEntity(dbEntity, ret); - - if (dbLocationEntity != null) { - ret.addEntity(dbLocationEntity); - } - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateHiveProcess.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateHiveProcess.java deleted file mode 100644 index 5787c9365a..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateHiveProcess.java +++ /dev/null @@ -1,295 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.LineageInfo; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey; -import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - - -public class CreateHiveProcess extends BaseHiveEvent { - private static final Logger LOG = LoggerFactory.getLogger(CreateHiveProcess.class); - - public CreateHiveProcess(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = getEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityCreateRequestV2(getUserName(), entities)); - } - - return ret; - } - - public AtlasEntitiesWithExtInfo getEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = null; - - if (!skipProcess()) { - List inputs = new ArrayList<>(); - List outputs = new ArrayList<>(); - Set processedNames = new HashSet<>(); - - ret = new AtlasEntitiesWithExtInfo(); - - if (getInputs() != null) { - for (ReadEntity input : getInputs()) { - String qualifiedName = getQualifiedName(input); - - if (qualifiedName == null || !processedNames.add(qualifiedName)) { - continue; - } - - AtlasEntity entity = getInputOutputEntity(input, ret, skipTempTables); - - if (!input.isDirect()) { - continue; - } - - if (entity != null) { - inputs.add(entity); - } - } - } - - if (getOutputs() != null) { - for (WriteEntity output : getOutputs()) { - String qualifiedName = getQualifiedName(output); - - if (qualifiedName == null || !processedNames.add(qualifiedName)) { - continue; - } - - AtlasEntity entity = getInputOutputEntity(output, ret, skipTempTables); - - if (entity != null) { - outputs.add(entity); - } - - if (isDdlOperation(entity)) { - - AtlasEntity ddlEntity = createHiveDDLEntity(entity); - - if (ddlEntity != null) { - ret.addEntity(ddlEntity); - } - } - } - } - - boolean skipProcess = inputs.isEmpty() && outputs.isEmpty(); - - if (!skipProcess) { - if (inputs.isEmpty() && context.isSkippedInputEntity()) { - skipProcess = true; - } else if (outputs.isEmpty() && context.isSkippedOutputEntity()) { - skipProcess = true; - } - } - - if (!skipProcess && !context.isMetastoreHook()) { - AtlasEntity process = getHiveProcessEntity(inputs, outputs); - - ret.addEntity(process); - - AtlasEntity processExecution = getHiveProcessExecutionEntity(process); - ret.addEntity(processExecution); - - processColumnLineage(process, ret); - - addProcessedEntities(ret); - } else { - ret = null; - } - } - - return ret; - } - - private void processColumnLineage(AtlasEntity hiveProcess, AtlasEntitiesWithExtInfo entities) { - LineageInfo lineageInfo = getLineageInfo(); - - if (lineageInfo == null || CollectionUtils.isEmpty(lineageInfo.entrySet())) { - return; - } - - final List columnLineages = new ArrayList<>(); - int lineageInputsCount = 0; - final Set processedOutputCols = new HashSet<>(); - - for (Map.Entry entry : lineageInfo.entrySet()) { - String outputColName = getQualifiedName(entry.getKey()); - AtlasEntity outputColumn = context.getEntity(outputColName); - - if (LOG.isDebugEnabled()) { - LOG.debug("processColumnLineage(): DependencyKey={}; Dependency={}", entry.getKey(), entry.getValue()); - } - - if (outputColumn == null) { - LOG.warn("column-lineage: non-existing output-column {}", outputColName); - - continue; - } - - if (processedOutputCols.contains(outputColName)) { - LOG.warn("column-lineage: duplicate for output-column {}", outputColName); - - continue; - } else { - processedOutputCols.add(outputColName); - } - - List inputColumns = new ArrayList<>(); - - for (BaseColumnInfo baseColumn : getBaseCols(entry.getValue())) { - String inputColName = getQualifiedName(baseColumn); - AtlasEntity inputColumn = context.getEntity(inputColName); - - if (inputColumn == null) { - LOG.warn("column-lineage: non-existing input-column {} for output-column={}", inputColName, outputColName); - - continue; - } - - inputColumns.add(inputColumn); - } - - if (inputColumns.isEmpty()) { - continue; - } - - lineageInputsCount += inputColumns.size(); - - AtlasEntity columnLineageProcess = new AtlasEntity(HIVE_TYPE_COLUMN_LINEAGE); - - columnLineageProcess.setAttribute(ATTRIBUTE_NAME, hiveProcess.getAttribute(ATTRIBUTE_QUALIFIED_NAME) + ":" + outputColumn.getAttribute(ATTRIBUTE_NAME)); - columnLineageProcess.setAttribute(ATTRIBUTE_QUALIFIED_NAME, hiveProcess.getAttribute(ATTRIBUTE_QUALIFIED_NAME) + ":" + outputColumn.getAttribute(ATTRIBUTE_NAME)); - columnLineageProcess.setRelationshipAttribute(ATTRIBUTE_INPUTS, AtlasTypeUtil.getAtlasRelatedObjectIds(inputColumns, BaseHiveEvent.RELATIONSHIP_DATASET_PROCESS_INPUTS)); - columnLineageProcess.setRelationshipAttribute(ATTRIBUTE_OUTPUTS, Collections.singletonList(AtlasTypeUtil.getAtlasRelatedObjectId(outputColumn, BaseHiveEvent.RELATIONSHIP_PROCESS_DATASET_OUTPUTS))); - columnLineageProcess.setRelationshipAttribute(ATTRIBUTE_QUERY, AtlasTypeUtil.getAtlasRelatedObjectId(hiveProcess, BaseHiveEvent.RELATIONSHIP_HIVE_PROCESS_COLUMN_LINEAGE)); - columnLineageProcess.setAttribute(ATTRIBUTE_DEPENDENCY_TYPE, entry.getValue().getType()); - columnLineageProcess.setAttribute(ATTRIBUTE_EXPRESSION, entry.getValue().getExpr()); - - columnLineages.add(columnLineageProcess); - } - - float avgInputsCount = columnLineages.size() > 0 ? (((float) lineageInputsCount) / columnLineages.size()) : 0; - boolean skipColumnLineage = context.getSkipHiveColumnLineageHive20633() && avgInputsCount > context.getSkipHiveColumnLineageHive20633InputsThreshold(); - - if (!skipColumnLineage) { - for (AtlasEntity columnLineage : columnLineages) { - entities.addEntity(columnLineage); - } - } else { - LOG.warn("skipped {} hive_column_lineage entities. Average # of inputs={}, threshold={}, total # of inputs={}", columnLineages.size(), avgInputsCount, context.getSkipHiveColumnLineageHive20633InputsThreshold(), lineageInputsCount); - } - } - - private Collection getBaseCols(Dependency lInfoDep) { - Collection ret = Collections.emptyList(); - - if (lInfoDep != null) { - try { - Method getBaseColsMethod = lInfoDep.getClass().getMethod("getBaseCols"); - - Object retGetBaseCols = getBaseColsMethod.invoke(lInfoDep); - - if (retGetBaseCols != null) { - if (retGetBaseCols instanceof Collection) { - ret = (Collection) retGetBaseCols; - } else { - LOG.warn("{}: unexpected return type from LineageInfo.Dependency.getBaseCols(), expected type {}", - retGetBaseCols.getClass().getName(), "Collection"); - } - } - } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException ex) { - LOG.warn("getBaseCols()", ex); - } - } - - return ret; - } - - - private boolean skipProcess() { - Set inputs = getInputs(); - Set outputs = getOutputs(); - - boolean ret = CollectionUtils.isEmpty(inputs) && CollectionUtils.isEmpty(outputs); - - if (!ret) { - if (getContext().getHiveOperation() == HiveOperation.QUERY) { - // Select query has only one output - if (outputs.size() == 1) { - WriteEntity output = outputs.iterator().next(); - - if (output.getType() == Entity.Type.DFS_DIR || output.getType() == Entity.Type.LOCAL_DIR) { - if (output.getWriteType() == WriteEntity.WriteType.PATH_WRITE && output.isTempURI()) { - ret = true; - } - } - // DELETE and UPDATE initially have one input and one output. - // Since they do not support sub-query, they won't create a lineage that have one input and one output. (One input only) - // It's safe to filter them out here. - if (output.getWriteType() == WriteEntity.WriteType.DELETE || output.getWriteType() == WriteEntity.WriteType.UPDATE) { - ret = true; - } - } - } - } - - return ret; - } - - private boolean isDdlOperation(AtlasEntity entity) { - return entity != null && !context.isMetastoreHook() - && (context.getHiveOperation().equals(HiveOperation.CREATETABLE_AS_SELECT) - || context.getHiveOperation().equals(HiveOperation.CREATEVIEW) - || context.getHiveOperation().equals(HiveOperation.ALTERVIEW_AS) - || context.getHiveOperation().equals(HiveOperation.CREATE_MATERIALIZED_VIEW)); - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateTable.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateTable.java deleted file mode 100644 index 91611de88e..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/CreateTable.java +++ /dev/null @@ -1,202 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.events.AlterTableEvent; -import org.apache.hadoop.hive.metastore.events.CreateTableEvent; -import org.apache.hadoop.hive.metastore.events.ListenerEvent; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.List; - -import static org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE; -import static org.apache.hadoop.hive.ql.plan.HiveOperation.*; - -public class CreateTable extends BaseHiveEvent { - private static final Logger LOG = LoggerFactory.getLogger(CreateTable.class); - - public CreateTable(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityCreateRequestV2(getUserName(), entities)); - } - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveMetastoreEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); - ListenerEvent event = context.getMetastoreEvent(); - HiveOperation oper = context.getHiveOperation(); - Table table; - - if (isAlterTable(oper)) { - table = toTable(((AlterTableEvent) event).getNewTable()); - } else { - table = toTable(((CreateTableEvent) event).getTable()); - } - - if (skipTemporaryTable(table)) { - table = null; - } - - processTable(table, ret); - - addProcessedEntities(ret); - - return ret; - } - - public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); - Table table = null; - - if (CollectionUtils.isNotEmpty(getOutputs())) { - for (Entity entity : getOutputs()) { - if (entity.getType() == Entity.Type.TABLE) { - table = entity.getTable(); - - if (table != null) { - table = getHive().getTable(table.getDbName(), table.getTableName()); - - if (table != null) { - if (skipTemporaryTable(table)) { - table = null; - } else { - break; - } - } - } - } - } - } - - processTable(table, ret); - - addProcessedEntities(ret); - - return ret; - } - - // create process entities for lineages from HBase/HDFS to hive table - private void processTable(Table table, AtlasEntitiesWithExtInfo ret) throws Exception { - if (table != null) { - AtlasEntity tblEntity = toTableEntity(table, ret); - - if (tblEntity != null) { - if (isHBaseStore(table)) { - if (context.isMetastoreHook()) { - //do nothing - } else { - // This create lineage to HBase table in case of Hive on HBase - AtlasEntity hbaseTableEntity = toReferencedHBaseTable(table, ret); - - //not a hive metastore hook - //it is running in the context of Hbase. - if (hbaseTableEntity != null) { - final AtlasEntity processEntity; - - if (EXTERNAL_TABLE.equals(table.getTableType())) { - processEntity = getHiveProcessEntity(Collections.singletonList(hbaseTableEntity), Collections.singletonList(tblEntity)); - } else { - processEntity = getHiveProcessEntity(Collections.singletonList(tblEntity), Collections.singletonList(hbaseTableEntity)); - } - ret.addEntity(processEntity); - - AtlasEntity processExecution = getHiveProcessExecutionEntity(processEntity); - ret.addEntity(processExecution); - } - } - - } else { - if (context.isMetastoreHook()) { - //it is running in the context of HiveMetastore - //not a hive metastore hook - if (isCreateExtTableOperation(table)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Creating a dummy process with lineage from hdfs path to hive table"); - } - AtlasEntity hdfsPathEntity = getPathEntity(table.getDataLocation(), ret); - AtlasEntity processEntity = getHiveProcessEntity(Collections.singletonList(hdfsPathEntity), Collections.singletonList(tblEntity)); - - ret.addEntity(processEntity); - ret.addReferredEntity(hdfsPathEntity); - } - } else { - //not a hive metastore hook - //it is running in the context of HiveServer2 - if (EXTERNAL_TABLE.equals(table.getTableType())) { - AtlasEntity hdfsPathEntity = getPathEntity(table.getDataLocation(), ret); - AtlasEntity processEntity = getHiveProcessEntity(Collections.singletonList(hdfsPathEntity), Collections.singletonList(tblEntity)); - - ret.addEntity(processEntity); - ret.addReferredEntity(hdfsPathEntity); - - AtlasEntity processExecution = getHiveProcessExecutionEntity(processEntity); - ret.addEntity(processExecution); - } - } - } - - if (!context.isMetastoreHook()) { - AtlasEntity tableDDLEntity = createHiveDDLEntity(tblEntity); - - if (tableDDLEntity != null) { - ret.addEntity(tableDDLEntity); - } - } - } - } - } - - private static boolean isAlterTable(HiveOperation oper) { - return (oper == ALTERTABLE_PROPERTIES || oper == ALTERTABLE_RENAME || oper == ALTERTABLE_RENAMECOL); - } - - private boolean skipTemporaryTable(Table table) { - // If its an external table, even though the temp table skip flag is on, we create the table since we need the HDFS path to temp table lineage. - return table != null && skipTempTables && table.isTemporary() && !EXTERNAL_TABLE.equals(table.getTableType()); - } - - private boolean isCreateExtTableOperation(Table table) { - HiveOperation oper = context.getHiveOperation(); - TableType tableType = table.getTableType(); - - return EXTERNAL_TABLE.equals(tableType) && (oper == CREATETABLE || oper == CREATETABLE_AS_SELECT); - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropDatabase.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropDatabase.java deleted file mode 100644 index 20019d2ef4..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropDatabase.java +++ /dev/null @@ -1,93 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent; -import org.apache.hadoop.hive.ql.hooks.Entity; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.apache.hadoop.hive.ql.hooks.Entity.Type.DATABASE; -import static org.apache.hadoop.hive.ql.hooks.Entity.Type.TABLE; - -public class DropDatabase extends BaseHiveEvent { - public DropDatabase(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() { - List ret = null; - List entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - ret = new ArrayList<>(entities.size()); - - for (AtlasObjectId entity : entities) { - ret.add(new EntityDeleteRequestV2(getUserName(), Collections.singletonList(entity))); - } - } - - return ret; - } - - private List getHiveMetastoreEntities() { - List ret = new ArrayList<>(); - DropDatabaseEvent dbEvent = (DropDatabaseEvent) context.getMetastoreEvent(); - String dbQName = getQualifiedName(dbEvent.getDatabase()); - AtlasObjectId dbId = new AtlasObjectId(HIVE_TYPE_DB, ATTRIBUTE_QUALIFIED_NAME, dbQName); - - context.removeFromKnownDatabase(dbQName); - - ret.add(dbId); - - return ret; - } - - private List getHiveEntities() { - List ret = new ArrayList<>(); - - for (Entity entity : getOutputs()) { - if (entity.getType() == DATABASE) { - String dbQName = getQualifiedName(entity.getDatabase()); - AtlasObjectId dbId = new AtlasObjectId(HIVE_TYPE_DB, ATTRIBUTE_QUALIFIED_NAME, dbQName); - - context.removeFromKnownDatabase(dbQName); - - ret.add(dbId); - } else if (entity.getType() == TABLE) { - String tblQName = getQualifiedName(entity.getTable()); - AtlasObjectId tblId = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName); - - context.removeFromKnownTable(tblQName); - - ret.add(tblId); - } - } - - return ret; - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropTable.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropTable.java deleted file mode 100644 index 440c08affa..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/DropTable.java +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook.events; - -import org.apache.atlas.hive.hook.AtlasHiveHookContext; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.hive.metastore.events.DropTableEvent; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.metadata.Table; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -public class DropTable extends BaseHiveEvent { - public DropTable(AtlasHiveHookContext context) { - super(context); - } - - @Override - public List getNotificationMessages() { - List ret = null; - List entities = context.isMetastoreHook() ? getHiveMetastoreEntities() : getHiveEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - ret = new ArrayList<>(entities.size()); - - for (AtlasObjectId entity : entities) { - ret.add(new EntityDeleteRequestV2(getUserName(), Collections.singletonList(entity))); - } - } - - return ret; - } - - public List getHiveMetastoreEntities() { - List ret = new ArrayList<>(); - DropTableEvent tblEvent = (DropTableEvent) context.getMetastoreEvent(); - Table table = new Table(tblEvent.getTable()); - String tblQName = getQualifiedName(table); - AtlasObjectId tblId = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName); - - context.removeFromKnownTable(tblQName); - - ret.add(tblId); - - return ret; - } - - public List getHiveEntities() { - List ret = new ArrayList<>(); - - for (Entity entity : getOutputs()) { - if (entity.getType() == Entity.Type.TABLE) { - String tblQName = getQualifiedName(entity.getTable()); - AtlasObjectId tblId = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName); - - context.removeFromKnownTable(tblQName); - - ret.add(tblId); - } - } - - return ret; - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilter.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilter.java deleted file mode 100644 index 0b0d4d6b26..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilter.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook.utils; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.hive.hook.HiveHook; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.commons.configuration.Configuration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.List; - -import static org.apache.atlas.hive.hook.HiveHook.HOOK_HIVE_IGNORE_DDL_OPERATIONS; - -public class ActiveEntityFilter { - private static final Logger LOG = LoggerFactory.getLogger(ActiveEntityFilter.class); - - private static EntityFilter entityFilter; - - public static void init(Configuration configuration) { - boolean skipDdlOperations = configuration.getBoolean(HOOK_HIVE_IGNORE_DDL_OPERATIONS, false); - List additionalTypesToRetain = getConfiguredTypesToRetainForDDLEntityFilter(configuration); - - init(skipDdlOperations, additionalTypesToRetain); - LOG.info("atlas.hook.hive.ignore.ddl.operations={} - {}", skipDdlOperations, entityFilter.getClass().getSimpleName()); - } - - @VisibleForTesting - static void init(boolean lineageOnlyFilter, List additionalTypesToRetain) { - entityFilter = lineageOnlyFilter ? new HiveDDLEntityFilter(additionalTypesToRetain) : new PassthroughFilter(); - } - - public static List apply(List incoming) { - return entityFilter.apply(incoming); - } - - private static List getConfiguredTypesToRetainForDDLEntityFilter(Configuration configuration) { - try { - if (configuration.containsKey(HiveHook.HOOK_HIVE_FILTER_ENTITY_ADDITIONAL_TYPES_TO_RETAIN)) { - String[] configuredTypes = configuration.getStringArray(HiveHook.HOOK_HIVE_FILTER_ENTITY_ADDITIONAL_TYPES_TO_RETAIN); - - return Arrays.asList(configuredTypes); - } - } catch (Exception e) { - LOG.error("Failed to load application properties", e); - } - - return null; - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/EntityFilter.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/EntityFilter.java deleted file mode 100644 index f02ee52dac..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/EntityFilter.java +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook.utils; - -import org.apache.atlas.model.notification.HookNotification; - -import java.util.List; - -public interface EntityFilter { - List apply(List incoming); -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/HiveDDLEntityFilter.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/HiveDDLEntityFilter.java deleted file mode 100644 index 0f9aa458cc..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/HiveDDLEntityFilter.java +++ /dev/null @@ -1,286 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook.utils; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.hive.hook.events.BaseHiveEvent; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.utils.AtlasPathExtractorUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class HiveDDLEntityFilter implements EntityFilter { - private static final Logger LOG = LoggerFactory.getLogger(HiveDDLEntityFilter.class); - - private static final Set defaultPathTypesToRetain = new HashSet() {{ - add(AtlasPathExtractorUtil.HDFS_TYPE_PATH); - add(AtlasPathExtractorUtil.ADLS_GEN2_ACCOUNT); - add(AtlasPathExtractorUtil.ADLS_GEN2_CONTAINER); - add(AtlasPathExtractorUtil.ADLS_GEN2_DIRECTORY); - add(AtlasPathExtractorUtil.GCS_VIRTUAL_DIR); - add(AtlasPathExtractorUtil.AWS_S3_BUCKET); - add(AtlasPathExtractorUtil.AWS_S3_V2_BUCKET); - add(AtlasPathExtractorUtil.AWS_S3_V2_PSEUDO_DIR); - add(AtlasPathExtractorUtil.AWS_S3_PSEUDO_DIR); - add(AtlasPathExtractorUtil.OZONE_KEY); - add(AtlasPathExtractorUtil.OZONE_BUCKET); - add(AtlasPathExtractorUtil.OZONE_VOLUME); - }}; - - private static final Set typesToRetain = new HashSet() {{ - add(BaseHiveEvent.HIVE_TYPE_PROCESS); - add(BaseHiveEvent.HIVE_TYPE_PROCESS_EXECUTION); - add(BaseHiveEvent.HIVE_TYPE_COLUMN_LINEAGE); - add(BaseHiveEvent.HIVE_DB_DDL); - add(BaseHiveEvent.HIVE_TABLE_DDL); - addAll(defaultPathTypesToRetain); - }}; - - public HiveDDLEntityFilter(List additionalTypesToRetain) { - if (CollectionUtils.isEmpty(additionalTypesToRetain)) { - return; - } - - typesToRetain.addAll(additionalTypesToRetain); - LOG.info("Types retained: {}", typesToRetain.toArray()); - } - - public List apply(List incoming) { - if (CollectionUtils.isEmpty(incoming)) { - return incoming; - } - - List ret = new ArrayList<>(); - for (HookNotification notification : incoming) { - HookNotification filteredNotification = apply(notification); - if (filteredNotification == null) { - continue; - } - - ret.add(filteredNotification); - } - - return ret; - } - - @VisibleForTesting - AtlasEntity.AtlasEntityWithExtInfo apply(AtlasEntity.AtlasEntityWithExtInfo incoming) { - AtlasEntity.AtlasEntityWithExtInfo ret = new AtlasEntity.AtlasEntityWithExtInfo(); - - AtlasEntity entity = filter(incoming.getEntity()); - if (entity == null) { - return null; - } - - ret.setEntity(entity); - - Map refEntities = filter(incoming.getReferredEntities()); - if (!MapUtils.isEmpty(refEntities)) { - ret.setReferredEntities(refEntities); - } - - return ret; - } - - @VisibleForTesting - public AtlasEntity.AtlasEntitiesWithExtInfo apply(AtlasEntity.AtlasEntitiesWithExtInfo incoming) { - if (incoming == null) { - return incoming; - } - - AtlasEntity.AtlasEntitiesWithExtInfo ret = new AtlasEntity.AtlasEntitiesWithExtInfo(); - - filterEntities(incoming, ret); - filterReferences(incoming, ret); - - return ret; - } - - @VisibleForTesting - List applyForObjectIds(List incoming) { - if (incoming == null || CollectionUtils.isEmpty(incoming)) { - return null; - } - - List ret = new ArrayList<>(); - for (AtlasObjectId o : incoming) { - if (filterObjectId(o) != null) { - ret.add(o); - } - } - - return ret; - } - - private AtlasObjectId filterObjectId(AtlasObjectId o) { - if (o != null && typesToRetain.contains(o.getTypeName())) { - return o; - } - - return null; - } - - private static void filterEntities(AtlasEntity.AtlasEntitiesWithExtInfo incoming, AtlasEntity.AtlasEntitiesWithExtInfo ret) { - ret.setEntities(filter(incoming.getEntities())); - } - - private static void filterReferences(AtlasEntity.AtlasEntitiesWithExtInfo incoming, AtlasEntity.AtlasEntitiesWithExtInfo ret) { - ret.setReferredEntities(filter(incoming.getReferredEntities())); - } - - private static Map filter(Map incoming) { - if (incoming == null || MapUtils.isEmpty(incoming)) { - return null; - } - - return incoming.values() - .stream() - .filter(x -> typesToRetain.contains(x.getTypeName())) - .collect(Collectors.toMap(AtlasEntity::getGuid, Function.identity())); - } - - private static List filter(List incoming) { - if (incoming == null) { - return null; - } - - List ret = incoming.stream() - .filter(x -> typesToRetain.contains(x.getTypeName())) - .collect(Collectors.toList()); - - for (AtlasEntity e : ret) { - for (Object o : e.getRelationshipAttributes().values()) { - if (o instanceof List) { - List list = (List) o; - for (Object ox : list) { - inferObjectTypeResetGuid(ox); - } - } else { - inferObjectTypeResetGuid(o); - } - } - } - - return ret; - } - - private static void inferObjectTypeResetGuid(Object o) { - if (o instanceof AtlasObjectId) { - AtlasObjectId oid = (AtlasObjectId) o; - String typeName = oid.getTypeName(); - - if (oid.getUniqueAttributes() != null && !typesToRetain.contains(typeName)) { - oid.setGuid(null); - } - } else { - LinkedHashMap hm = (LinkedHashMap) o; - if (!hm.containsKey(BaseHiveEvent.ATTRIBUTE_GUID)) { - return; - } - - String typeName = hm.containsKey(AtlasObjectId.KEY_TYPENAME) ? (String) hm.get(AtlasObjectId.KEY_TYPENAME) : null; - - if (hm.containsKey(BaseHiveEvent.ATTRIBUTE_UNIQUE_ATTRIBUTES) && !typesToRetain.contains(typeName)) { - hm.remove(BaseHiveEvent.ATTRIBUTE_GUID); - } - } - } - - private static AtlasEntity filter(AtlasEntity incoming) { - if (incoming == null) { - return null; - } - - return typesToRetain.contains(incoming.getTypeName()) ? incoming : null; - } - - private HookNotification apply(HookNotification notification) { - if (notification instanceof HookNotification.EntityCreateRequestV2) { - return apply((HookNotification.EntityCreateRequestV2) notification); - } - - if (notification instanceof HookNotification.EntityUpdateRequestV2) { - return apply((HookNotification.EntityUpdateRequestV2) notification); - } - - if (notification instanceof HookNotification.EntityPartialUpdateRequestV2) { - return apply((HookNotification.EntityPartialUpdateRequestV2) notification); - } - - if (notification instanceof HookNotification.EntityDeleteRequestV2) { - return apply((HookNotification.EntityDeleteRequestV2) notification); - } - - return null; - } - - private HookNotification.EntityCreateRequestV2 apply(HookNotification.EntityCreateRequestV2 notification) { - AtlasEntity.AtlasEntitiesWithExtInfo entities = apply(notification.getEntities()); - if (entities == null || CollectionUtils.isEmpty(entities.getEntities())) { - return null; - } - - return new HookNotification.EntityCreateRequestV2(notification.getUser(), entities); - } - - private HookNotification.EntityUpdateRequestV2 apply(HookNotification.EntityUpdateRequestV2 notification) { - AtlasEntity.AtlasEntitiesWithExtInfo entities = apply(notification.getEntities()); - if (entities == null || CollectionUtils.isEmpty(entities.getEntities())) { - return null; - } - - return new HookNotification.EntityUpdateRequestV2(notification.getUser(), entities); - } - - private HookNotification.EntityPartialUpdateRequestV2 apply(HookNotification.EntityPartialUpdateRequestV2 notification) { - AtlasObjectId objectId = filterObjectId(notification.getEntityId()); - if (objectId == null) { - return null; - } - - AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = apply(notification.getEntity()); - if (entityWithExtInfo == null) { - return null; - } - - return new HookNotification.EntityPartialUpdateRequestV2(notification.getUser(), objectId, entityWithExtInfo); - } - - private HookNotification.EntityDeleteRequestV2 apply(HookNotification.EntityDeleteRequestV2 notification) { - List objectIds = applyForObjectIds(notification.getEntities()); - if (CollectionUtils.isEmpty(objectIds)) { - return null; - } - - return new HookNotification.EntityDeleteRequestV2(notification.getUser(), objectIds); - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/PassthroughFilter.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/PassthroughFilter.java deleted file mode 100644 index f61c120ea2..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/utils/PassthroughFilter.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook.utils; - -import org.apache.atlas.model.notification.HookNotification; - -import java.util.List; - -class PassthroughFilter implements EntityFilter { - @Override - public List apply(List incoming) { - return incoming; - } -} diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataTypes.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataTypes.java deleted file mode 100755 index f3691e6259..0000000000 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataTypes.java +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.model; - -/** - * Hive Data Types for model and bridge. - */ -public enum HiveDataTypes { - - // Enums - HIVE_OBJECT_TYPE, - HIVE_PRINCIPAL_TYPE, - HIVE_RESOURCE_TYPE, - - // Structs - HIVE_SERDE, - HIVE_ORDER, - HIVE_RESOURCEURI, - - // Classes - HIVE_DB, - HIVE_STORAGEDESC, - HIVE_TABLE, - HIVE_COLUMN, - HIVE_PARTITION, - HIVE_INDEX, - HIVE_ROLE, - HIVE_TYPE, - HIVE_PROCESS, - HIVE_COLUMN_LINEAGE, - HIVE_PROCESS_EXECUTION, - // HIVE_VIEW, - ; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/hive-bridge/src/main/resources/atlas-hive-import-log4j.xml b/addons/hive-bridge/src/main/resources/atlas-hive-import-log4j.xml deleted file mode 100644 index 22a8cc9d43..0000000000 --- a/addons/hive-bridge/src/main/resources/atlas-hive-import-log4j.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/hive-bridge/src/patches/001-hive_column_add_position.json b/addons/hive-bridge/src/patches/001-hive_column_add_position.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java deleted file mode 100644 index 0875afa392..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java +++ /dev/null @@ -1,796 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.hive.bridge.ColumnLineageUtils; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.hook.HiveHookIT; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.HookContext; -import org.apache.hadoop.hive.ql.hooks.LineageInfo; -import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.security.UserGroupInformation; -import org.json.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.annotations.BeforeClass; - -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.SortedSet; - -import static com.sun.jersey.api.client.ClientResponse.Status.NOT_FOUND; -import static org.apache.atlas.hive.bridge.HiveMetaStoreBridge.HDFS_PATH; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.ATTRIBUTE_QUALIFIED_NAME; -import static org.apache.atlas.hive.model.HiveDataTypes.HIVE_DB; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -public class HiveITBase { - private static final Logger LOG = LoggerFactory.getLogger(HiveITBase.class); - - public static final String DEFAULT_DB = "default"; - public static final String SEP = ":".intern(); - public static final String IO_SEP = "->".intern(); - protected static final String DGI_URL = "http://localhost:21000/"; - protected static final String CLUSTER_NAME = "primary"; - protected static final String PART_FILE = "2015-01-01"; - protected static final String INPUTS = "inputs"; - protected static final String OUTPUTS = "outputs"; - - - protected Driver driver; - protected AtlasClient atlasClient; - protected AtlasClientV2 atlasClientV2; - protected HiveMetaStoreBridge hiveMetaStoreBridge; - protected SessionState ss; - protected HiveConf conf; - protected Driver driverWithoutContext; - - private static final String REFERENCEABLE_ATTRIBUTE_NAME = "qualifiedName"; - private static final String ATTR_NAME = "name"; - - - @BeforeClass - public void setUp() throws Exception { - //Set-up hive session - conf = new HiveConf(); - conf.setClassLoader(Thread.currentThread().getContextClassLoader()); - conf.set("hive.metastore.event.listeners", ""); - - // 'driver' using this configuration will be used for tests in HiveHookIT - // HiveHookIT will use this driver to test post-execution hooks in HiveServer2. - // initialize 'driver' with HMS hook disabled. - driver = new Driver(conf); - ss = new SessionState(conf); - ss = SessionState.start(ss); - - SessionState.setCurrentSessionState(ss); - - Configuration configuration = ApplicationProperties.get(); - - String[] atlasEndPoint = configuration.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT); - - if (atlasEndPoint == null || atlasEndPoint.length == 0) { - atlasEndPoint = new String[] { DGI_URL }; - } - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClientV2 = new AtlasClientV2(atlasEndPoint, new String[]{"admin", "admin"}); - atlasClient = new AtlasClient(atlasEndPoint, new String[]{"admin", "admin"}); - } else { - atlasClientV2 = new AtlasClientV2(atlasEndPoint); - atlasClient = new AtlasClient(atlasEndPoint); - } - - hiveMetaStoreBridge = new HiveMetaStoreBridge(configuration, conf, atlasClientV2); - - HiveConf conf = new HiveConf(); - - conf.set("hive.exec.post.hooks", ""); - - SessionState ss = new SessionState(conf); - ss = SessionState.start(ss); - SessionState.setCurrentSessionState(ss); - - // 'driverWithoutContext' using this configuration will be used for tests in HiveMetastoreHookIT - // HiveMetastoreHookIT will use this driver to test event listeners in HiveMetastore. - // initialize 'driverWithoutContext' with HiveServer2 post execution hook disabled. - driverWithoutContext = new Driver(conf); - } - - protected void runCommand(String cmd) throws Exception { - runCommandWithDelay(cmd, 0); - } - - protected void runCommand(Driver driver, String cmd) throws Exception { - runCommandWithDelay(driver, cmd, 0); - } - - protected void runCommandWithDelay(String cmd, int sleepMs) throws Exception { - runCommandWithDelay(driver, cmd, sleepMs); - } - - protected void runCommandWithDelay(Driver driver, String cmd, int sleepMs) throws Exception { - LOG.debug("Running command '{}'", cmd); - - CommandProcessorResponse response = driver.run(cmd); - - assertEquals(response.getResponseCode(), 0); - - if (sleepMs != 0) { - Thread.sleep(sleepMs); - } - } - - protected String createTestDFSPath(String path) throws Exception { - return "file://" + mkdir(path); - } - - protected String file(String tag) throws Exception { - String filename = System.getProperty("user.dir") + "/target/" + tag + "-data-" + random(); - File file = new File(filename); - file.createNewFile(); - return file.getAbsolutePath(); - } - - protected String mkdir(String tag) throws Exception { - String filename = "./target/" + tag + "-data-" + random(); - File file = new File(filename); - file.mkdirs(); - return file.getAbsolutePath(); - } - - public static String lower(String str) { - if (StringUtils.isEmpty(str)) { - return null; - } - return str.toLowerCase().trim(); - } - - protected String random() { - return RandomStringUtils.randomAlphanumeric(10).toLowerCase(); - } - - protected String tableName() { - return "table_" + random(); - } - - protected String dbName() { - return "db_" + random(); - } - - protected String assertTableIsRegistered(String dbName, String tableName) throws Exception { - return assertTableIsRegistered(dbName, tableName, null, false); - } - - protected String assertTableIsRegistered(String dbName, String tableName, HiveHookIT.AssertPredicate assertPredicate, boolean isTemporary) throws Exception { - LOG.debug("Searching for table {}.{}", dbName, tableName); - String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporary); - return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName, - assertPredicate); - } - - protected String assertEntityIsRegistered(final String typeName, final String property, final String value, - final HiveHookIT.AssertPredicate assertPredicate) throws Exception { - waitFor(100000, new HiveHookIT.Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - } - }); - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - return (String) entity.getGuid(); - } - - protected String assertEntityIsRegisteredViaGuid(String guid, - final HiveHookIT.AssertPredicate assertPredicate) throws Exception { - waitFor(100000, new HiveHookIT.Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByGuid(guid); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - - } - }); - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByGuid(guid); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - return (String) entity.getGuid(); - } - - protected AtlasEntity assertEntityIsRegistedViaEntity(final String typeName, final String property, final String value, - final HiveHookIT.AssertPredicate assertPredicate) throws Exception { - waitFor(80000, new HiveHookIT.Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - } - }); - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - return entity; - } - - public interface AssertPredicate { - void assertOnEntity(AtlasEntity entity) throws Exception; - } - - public interface Predicate { - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - void evaluate() throws Exception; - } - - /** - * Wait for a condition, expressed via a {@link Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected void waitFor(int timeout, Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - predicate.evaluate(); - return; - } catch(Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - Thread.sleep(5000); - } - } - } - - protected String getTableProcessQualifiedName(String dbName, String tableName) throws Exception { - return HiveMetaStoreBridge.getTableProcessQualifiedName(CLUSTER_NAME, - hiveMetaStoreBridge.getHiveClient().getTable(dbName, tableName)); - } - - protected void validateHDFSPaths(AtlasEntity processEntity, String attributeName, String... testPaths) throws Exception { - List hdfsPathIds = toAtlasObjectIdList(processEntity.getAttribute(attributeName)); - - for (String testPath : testPaths) { - Path path = new Path(testPath); - String testPathNormed = lower(path.toString()); - String hdfsPathId = assertHDFSPathIsRegistered(testPathNormed); - - assertHDFSPathIdsContain(hdfsPathIds, hdfsPathId); - } - } - - private void assertHDFSPathIdsContain(List hdfsPathObjectIds, String hdfsPathId) { - Set hdfsPathGuids = new HashSet<>(); - - for (AtlasObjectId hdfsPathObjectId : hdfsPathObjectIds) { - hdfsPathGuids.add(hdfsPathObjectId.getGuid()); - } - - assertTrue(hdfsPathGuids.contains(hdfsPathId)); - } - - protected String assertHDFSPathIsRegistered(String path) throws Exception { - LOG.debug("Searching for hdfs path {}", path); - // ATLAS-2444 HDFS name node federation adds the cluster name to the qualifiedName - if (path.startsWith("hdfs://")) { - String pathWithCluster = path + "@" + CLUSTER_NAME; - return assertEntityIsRegistered(HDFS_PATH, REFERENCEABLE_ATTRIBUTE_NAME, pathWithCluster, null); - } else { - return assertEntityIsRegistered(HDFS_PATH, REFERENCEABLE_ATTRIBUTE_NAME, path, null); - } - } - - protected String assertDatabaseIsRegistered(String dbName) throws Exception { - return assertDatabaseIsRegistered(dbName, null); - } - - protected String assertDatabaseIsRegistered(String dbName, AssertPredicate assertPredicate) throws Exception { - LOG.debug("Searching for database: {}", dbName); - - String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(CLUSTER_NAME, dbName); - - return assertEntityIsRegistered(HIVE_DB.getName(), REFERENCEABLE_ATTRIBUTE_NAME, dbQualifiedName, assertPredicate); - } - - public void assertDatabaseIsNotRegistered(String dbName) throws Exception { - LOG.debug("Searching for database {}", dbName); - String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(CLUSTER_NAME, dbName); - assertEntityIsNotRegistered(HIVE_DB.getName(), ATTRIBUTE_QUALIFIED_NAME, dbQualifiedName); - } - - protected void assertEntityIsNotRegistered(final String typeName, final String property, final String value) throws Exception { - // wait for sufficient time before checking if entity is not available. - long waitTime = 10000; - LOG.debug("Waiting for {} msecs, before asserting entity is not registered.", waitTime); - Thread.sleep(waitTime); - - try { - atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property, value)); - - fail(String.format("Entity was not supposed to exist for typeName = %s, attributeName = %s, attributeValue = %s", typeName, property, value)); - } catch (AtlasServiceException e) { - if (e.getStatus() == NOT_FOUND) { - return; - } - } - } - - protected AtlasEntity getAtlasEntityByType(String type, String id) throws Exception { - AtlasEntity atlasEntity = null; - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfoForProcess = atlasClientV2.getEntityByAttribute(type, - Collections.singletonMap(AtlasClient.GUID, id)); - atlasEntity = atlasEntityWithExtInfoForProcess.getEntity(); - return atlasEntity; - } - - - public static class HiveEventContext { - private Set inputs; - private Set outputs; - - private String user; - private UserGroupInformation ugi; - private HiveOperation operation; - private HookContext.HookType hookType; - private JSONObject jsonPlan; - private String queryId; - private String queryStr; - private Long queryStartTime; - - public Map> lineageInfo; - - private List messages = new ArrayList<>(); - - public void setInputs(Set inputs) { - this.inputs = inputs; - } - - public void setOutputs(Set outputs) { - this.outputs = outputs; - } - - public void setUser(String user) { - this.user = user; - } - - public void setUgi(UserGroupInformation ugi) { - this.ugi = ugi; - } - - public void setOperation(HiveOperation operation) { - this.operation = operation; - } - - public void setHookType(HookContext.HookType hookType) { - this.hookType = hookType; - } - - public void setQueryId(String queryId) { - this.queryId = queryId; - } - - public void setQueryStr(String queryStr) { - this.queryStr = queryStr; - } - - public void setQueryStartTime(Long queryStartTime) { - this.queryStartTime = queryStartTime; - } - - public void setLineageInfo(LineageInfo lineageInfo){ - try { - this.lineageInfo = ColumnLineageUtils.buildLineageMap(lineageInfo); - LOG.debug("Column Lineage Map => {} ", this.lineageInfo.entrySet()); - }catch (Throwable e){ - LOG.warn("Column Lineage Map build failed with exception {}", e); - } - } - - public Set getInputs() { - return inputs; - } - - public Set getOutputs() { - return outputs; - } - - public String getUser() { - return user; - } - - public UserGroupInformation getUgi() { - return ugi; - } - - public HiveOperation getOperation() { - return operation; - } - - public HookContext.HookType getHookType() { - return hookType; - } - - public String getQueryId() { - return queryId; - } - - public String getQueryStr() { - return queryStr; - } - - public Long getQueryStartTime() { - return queryStartTime; - } - - public void addMessage(HookNotification message) { - messages.add(message); - } - - public List getMessages() { - return messages; - } - } - - - @VisibleForTesting - protected static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext, - final SortedSet sortedHiveInputs, - final SortedSet sortedHiveOutputs, - SortedMap hiveInputsMap, - SortedMap hiveOutputsMap) throws HiveException { - HiveOperation op = eventContext.getOperation(); - if (isCreateOp(eventContext)) { - Entity entity = getEntityByType(sortedHiveOutputs, Entity.Type.TABLE); - - if (entity != null) { - Table outTable = entity.getTable(); - //refresh table - outTable = dgiBridge.getHiveClient().getTable(outTable.getDbName(), outTable.getTableName()); - return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getMetadataNamespace(), outTable); - } - } - - StringBuilder buffer = new StringBuilder(op.getOperationName()); - - boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs); - if ( ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) { - LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr()); - } - - addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName); - buffer.append(IO_SEP); - addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName); - LOG.info("Setting process qualified name to {}", buffer); - return buffer.toString(); - } - - protected static Entity getEntityByType(Set entities, Entity.Type entityType) { - for (Entity entity : entities) { - if (entity.getType() == entityType) { - return entity; - } - } - return null; - } - - - protected static boolean ignoreHDFSPathsinQFName(final HiveOperation op, final Set inputs, final Set outputs) { - switch (op) { - case LOAD: - case IMPORT: - return isPartitionBasedQuery(outputs); - case EXPORT: - return isPartitionBasedQuery(inputs); - case QUERY: - return true; - } - return false; - } - - protected static boolean isPartitionBasedQuery(Set entities) { - for (Entity entity : entities) { - if (Entity.Type.PARTITION.equals(entity.getType())) { - return true; - } - } - return false; - } - - protected static boolean isCreateOp(HiveEventContext hiveEvent) { - return HiveOperation.CREATETABLE.equals(hiveEvent.getOperation()) - || HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation()) - || HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation()) - || HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation()) - || HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation()); - } - - protected static void addInputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet sortedInputs, StringBuilder buffer, final Map refs, final boolean ignoreHDFSPathsInQFName) throws HiveException { - if (refs != null) { - if (sortedInputs != null) { - Set dataSetsProcessed = new LinkedHashSet<>(); - for (Entity input : sortedInputs) { - - if (!dataSetsProcessed.contains(input.getName().toLowerCase())) { - //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations - if (ignoreHDFSPathsInQFName && - (Entity.Type.DFS_DIR.equals(input.getType()) || Entity.Type.LOCAL_DIR.equals(input.getType()))) { - LOG.debug("Skipping dfs dir input addition to process qualified name {} ", input.getName()); - } else if (refs.containsKey(input)) { - if ( input.getType() == Entity.Type.PARTITION || input.getType() == Entity.Type.TABLE) { - Table inputTable = refreshTable(hiveBridge, input.getTable().getDbName(), input.getTable().getTableName()); - - if (inputTable != null) { - addDataset(buffer, refs.get(input), HiveMetaStoreBridge.getTableCreatedTime(inputTable)); - } - } else { - addDataset(buffer, refs.get(input)); - } - } - - dataSetsProcessed.add(input.getName().toLowerCase()); - } - } - - } - } - } - - protected static void addDataset(StringBuilder buffer, AtlasEntity ref, final long createTime) { - addDataset(buffer, ref); - buffer.append(SEP); - buffer.append(createTime); - } - - protected static void addDataset(StringBuilder buffer, AtlasEntity ref) { - buffer.append(SEP); - String dataSetQlfdName = (String) ref.getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME); - // '/' breaks query parsing on ATLAS - buffer.append(dataSetQlfdName.toLowerCase().replaceAll("/", "")); - } - - protected static void addOutputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet sortedOutputs, StringBuilder buffer, final Map refs, final boolean ignoreHDFSPathsInQFName) throws HiveException { - if (refs != null) { - Set dataSetsProcessed = new LinkedHashSet<>(); - if (sortedOutputs != null) { - for (WriteEntity output : sortedOutputs) { - final Entity entity = output; - if (!dataSetsProcessed.contains(output.getName().toLowerCase())) { - if (ignoreHDFSPathsInQFName && - (Entity.Type.DFS_DIR.equals(output.getType()) || Entity.Type.LOCAL_DIR.equals(output.getType()))) { - LOG.debug("Skipping dfs dir output addition to process qualified name {} ", output.getName()); - } else if (refs.containsKey(output)) { - //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations - if (addQueryType(op, (WriteEntity) entity)) { - buffer.append(SEP); - buffer.append(((WriteEntity) entity).getWriteType().name()); - } - - if ( output.getType() == Entity.Type.PARTITION || output.getType() == Entity.Type.TABLE) { - Table outputTable = refreshTable(hiveBridge, output.getTable().getDbName(), output.getTable().getTableName()); - - if (outputTable != null) { - addDataset(buffer, refs.get(output), HiveMetaStoreBridge.getTableCreatedTime(outputTable)); - } - } else { - addDataset(buffer, refs.get(output)); - } - } - - dataSetsProcessed.add(output.getName().toLowerCase()); - } - } - } - } - } - - protected static Table refreshTable(HiveMetaStoreBridge dgiBridge, String dbName, String tableName) { - try { - return dgiBridge.getHiveClient().getTable(dbName, tableName); - } catch (HiveException excp) { // this might be the case for temp tables - LOG.warn("failed to get details for table {}.{}. Ignoring. {}: {}", dbName, tableName, excp.getClass().getCanonicalName(), excp.getMessage()); - } - - return null; - } - - protected static boolean addQueryType(HiveOperation op, WriteEntity entity) { - if (entity.getWriteType() != null && HiveOperation.QUERY.equals(op)) { - switch (entity.getWriteType()) { - case INSERT: - case INSERT_OVERWRITE: - case UPDATE: - case DELETE: - return true; - case PATH_WRITE: - //Add query type only for DFS paths and ignore local paths since they are not added as outputs - if ( !Entity.Type.LOCAL_DIR.equals(entity.getType())) { - return true; - } - break; - default: - } - } - return false; - } - - - @VisibleForTesting - protected static final class EntityComparator implements Comparator { - @Override - public int compare(Entity o1, Entity o2) { - String s1 = o1.getName(); - String s2 = o2.getName(); - if (s1 == null || s2 == null){ - s1 = o1.getD().toString(); - s2 = o2.getD().toString(); - } - return s1.toLowerCase().compareTo(s2.toLowerCase()); - } - } - - @VisibleForTesting - protected static final Comparator entityComparator = new EntityComparator(); - - protected AtlasObjectId toAtlasObjectId(Object obj) { - final AtlasObjectId ret; - - if (obj instanceof AtlasObjectId) { - ret = (AtlasObjectId) obj; - } else if (obj instanceof Map) { - ret = new AtlasObjectId((Map) obj); - } else if (obj != null) { - ret = new AtlasObjectId(obj.toString()); // guid - } else { - ret = null; - } - - return ret; - } - - protected List toAtlasObjectIdList(Object obj) { - final List ret; - - if (obj instanceof Collection) { - Collection coll = (Collection) obj; - - ret = new ArrayList<>(coll.size()); - - for (Object item : coll) { - AtlasObjectId objId = toAtlasObjectId(item); - - if (objId != null) { - ret.add(objId); - } - } - } else { - AtlasObjectId objId = toAtlasObjectId(obj); - - if (objId != null) { - ret = new ArrayList<>(1); - - ret.add(objId); - } else { - ret = null; - } - } - - return ret; - } - - protected AtlasStruct toAtlasStruct(Object obj) { - final AtlasStruct ret; - - if (obj instanceof AtlasStruct) { - ret = (AtlasStruct) obj; - } else if (obj instanceof Map) { - ret = new AtlasStruct((Map) obj); - } else { - ret = null; - } - - return ret; - } - - protected List toAtlasStructList(Object obj) { - final List ret; - - if (obj instanceof Collection) { - Collection coll = (Collection) obj; - - ret = new ArrayList<>(coll.size()); - - for (Object item : coll) { - AtlasStruct struct = toAtlasStruct(item); - - if (struct != null) { - ret.add(struct); - } - } - } else { - AtlasStruct struct = toAtlasStruct(obj); - - if (struct != null) { - ret = new ArrayList<>(1); - - ret.add(struct); - } else { - ret = null; - } - } - - return ret; - } -} diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java deleted file mode 100644 index 22397f1e1a..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.bridge; - -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.hadoop.hive.ql.hooks.LineageInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.ATTRIBUTE_COLUMNS; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.ATTRIBUTE_QUALIFIED_NAME; - - -public class ColumnLineageUtils { - public static final Logger LOG = LoggerFactory.getLogger(ColumnLineageUtils.class); - public static class HiveColumnLineageInfo { - public final String depenendencyType; - public final String expr; - public final String inputColumn; - - HiveColumnLineageInfo(LineageInfo.Dependency d, String inputCol) { - depenendencyType = d.getType().name(); - expr = d.getExpr(); - inputColumn = inputCol; - } - - @Override - public String toString(){ - return inputColumn; - } - } - - public static String getQualifiedName(LineageInfo.DependencyKey key){ - String db = key.getDataContainer().getTable().getDbName(); - String table = key.getDataContainer().getTable().getTableName(); - String col = key.getFieldSchema().getName(); - return db + "." + table + "." + col; - } - - public static Map> buildLineageMap(LineageInfo lInfo) { - Map> m = new HashMap<>(); - - for (Map.Entry e : lInfo.entrySet()) { - List l = new ArrayList<>(); - String k = getQualifiedName(e.getKey()); - - if (LOG.isDebugEnabled()) { - LOG.debug("buildLineageMap(): key={}; value={}", e.getKey(), e.getValue()); - } - - Collection baseCols = getBaseCols(e.getValue()); - - if (baseCols != null) { - for (LineageInfo.BaseColumnInfo iCol : baseCols) { - String db = iCol.getTabAlias().getTable().getDbName(); - String table = iCol.getTabAlias().getTable().getTableName(); - String colQualifiedName = iCol.getColumn() == null ? db + "." + table : db + "." + table + "." + iCol.getColumn().getName(); - l.add(new HiveColumnLineageInfo(e.getValue(), colQualifiedName)); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Setting lineage --> Input: {} ==> Output : {}", l, k); - } - m.put(k, l); - } - } - return m; - } - - static Collection getBaseCols(LineageInfo.Dependency lInfoDep) { - Collection ret = null; - - if (lInfoDep != null) { - try { - Method getBaseColsMethod = lInfoDep.getClass().getMethod("getBaseCols"); - - Object retGetBaseCols = getBaseColsMethod.invoke(lInfoDep); - - if (retGetBaseCols != null) { - if (retGetBaseCols instanceof Collection) { - ret = (Collection) retGetBaseCols; - } else { - LOG.warn("{}: unexpected return type from LineageInfo.Dependency.getBaseCols(), expected type {}", - retGetBaseCols.getClass().getName(), "Collection"); - } - } - } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException ex) { - LOG.warn("getBaseCols()", ex); - } - } - - return ret; - } - - static String[] extractComponents(String qualifiedName) { - String[] comps = qualifiedName.split("\\."); - int lastIdx = comps.length - 1; - int atLoc = comps[lastIdx].indexOf('@'); - if (atLoc > 0) { - comps[lastIdx] = comps[lastIdx].substring(0, atLoc); - } - return comps; - } - - static void populateColumnReferenceableMap(Map m, - Referenceable r) { - if (r.getTypeName().equals(HiveDataTypes.HIVE_TABLE.getName())) { - String qName = (String) r.get(ATTRIBUTE_QUALIFIED_NAME); - String[] qNameComps = extractComponents(qName); - for (Referenceable col : (List) r.get(ATTRIBUTE_COLUMNS)) { - String cName = (String) col.get(ATTRIBUTE_QUALIFIED_NAME); - String[] colQNameComps = extractComponents(cName); - String colQName = colQNameComps[0] + "." + colQNameComps[1] + "." + colQNameComps[2]; - m.put(colQName, col); - } - String tableQName = qNameComps[0] + "." + qNameComps[1]; - m.put(tableQName, r); - } - } - - - public static Map buildColumnReferenceableMap(List inputs, - List outputs) { - Map m = new HashMap<>(); - - for (Referenceable r : inputs) { - populateColumnReferenceableMap(m, r); - } - - for (Referenceable r : outputs) { - populateColumnReferenceableMap(m, r); - } - - return m; - } -} diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java deleted file mode 100644 index ae7ab1a220..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java +++ /dev/null @@ -1,327 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.bridge; - -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.mapred.TextInputFormat; -import org.mockito.ArgumentMatcher; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.Arrays; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.*; -import static org.mockito.Matchers.anyObject; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class HiveMetaStoreBridgeTest { - private static final String TEST_DB_NAME = "default"; - public static final String METADATA_NAMESPACE = "primary"; - public static final String TEST_TABLE_NAME = "test_table"; - - @Mock - private Hive hiveClient; - - @Mock - private AtlasClient atlasClient; - - @Mock - private AtlasClientV2 atlasClientV2; - - @Mock - private AtlasEntity atlasEntity; - - @Mock - private AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo; - - @Mock - EntityMutationResponse entityMutationResponse; - - @BeforeMethod - public void initializeMocks() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void testImportThatUpdatesRegisteredDatabase() throws Exception { - // setup database - when(hiveClient.getAllDatabases()).thenReturn(Arrays.asList(new String[]{TEST_DB_NAME})); - String description = "This is a default database"; - Database db = new Database(TEST_DB_NAME, description, "/user/hive/default", null); - when(hiveClient.getDatabase(TEST_DB_NAME)).thenReturn(db); - when(hiveClient.getAllTables(TEST_DB_NAME)).thenReturn(Arrays.asList(new String[]{})); - - returnExistingDatabase(TEST_DB_NAME, atlasClientV2, METADATA_NAMESPACE); - - when(atlasEntityWithExtInfo.getEntity("72e06b34-9151-4023-aa9d-b82103a50e76")) - .thenReturn((new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_DB.getName(), AtlasClient.GUID, "72e06b34-9151-4023-aa9d-b82103a50e76"))).getEntity()); - - HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(METADATA_NAMESPACE, hiveClient, atlasClientV2); - bridge.importHiveMetadata(null, null, true); - - // verify update is called - verify(atlasClientV2).updateEntity(anyObject()); - } - - @Test - public void testImportThatUpdatesRegisteredTable() throws Exception { - setupDB(hiveClient, TEST_DB_NAME); - - List

hiveTables = setupTables(hiveClient, TEST_DB_NAME, TEST_TABLE_NAME); - - returnExistingDatabase(TEST_DB_NAME, atlasClientV2, METADATA_NAMESPACE); - - // return existing table - - when(atlasEntityWithExtInfo.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")) - .thenReturn((new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))).getEntity()); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_TABLE.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getTableQualifiedName(METADATA_NAMESPACE, TEST_DB_NAME, TEST_TABLE_NAME)), true, true )) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - when(atlasEntityWithExtInfo.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")) - .thenReturn(createTableReference()); - - Table testTable = hiveTables.get(0); - String processQualifiedName = HiveMetaStoreBridge.getTableProcessQualifiedName(METADATA_NAMESPACE, testTable); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_PROCESS.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - processQualifiedName), true ,true)) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - - HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(METADATA_NAMESPACE, hiveClient, atlasClientV2); - bridge.importHiveMetadata(null, null, true); - - // verify update is called on table - verify(atlasClientV2, times(2)).updateEntity(anyObject()); - - } - - private void returnExistingDatabase(String databaseName, AtlasClientV2 atlasClientV2, String metadataNamespace) - throws AtlasServiceException { - //getEntity(HiveDataTypes.HIVE_DB.getName(), AtlasClient.GUID, "72e06b34-9151-4023-aa9d-b82103a50e76"); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_DB.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getDBQualifiedName(METADATA_NAMESPACE, TEST_DB_NAME)), true, true)) - .thenReturn((new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_DB.getName(), AtlasClient.GUID, "72e06b34-9151-4023-aa9d-b82103a50e76")))); - - } - - private List
setupTables(Hive hiveClient, String databaseName, String... tableNames) throws HiveException { - List
tables = new ArrayList<>(); - when(hiveClient.getAllTables(databaseName)).thenReturn(Arrays.asList(tableNames)); - for(String tableName : tableNames) { - Table testTable = createTestTable(databaseName, tableName); - when(hiveClient.getTable(databaseName, tableName)).thenReturn(testTable); - tables.add(testTable); - } - return tables; - } - - private void setupDB(Hive hiveClient, String databaseName) throws HiveException { - when(hiveClient.getAllDatabases()).thenReturn(Arrays.asList(new String[]{databaseName})); - when(hiveClient.getDatabase(databaseName)).thenReturn( - new Database(databaseName, "Default database", "/user/hive/default", null)); - } - - @Test - public void testImportWhenPartitionKeysAreNull() throws Exception { - setupDB(hiveClient, TEST_DB_NAME); - List
hiveTables = setupTables(hiveClient, TEST_DB_NAME, TEST_TABLE_NAME); - Table hiveTable = hiveTables.get(0); - - returnExistingDatabase(TEST_DB_NAME, atlasClientV2, METADATA_NAMESPACE); - - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_TABLE.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getTableQualifiedName(METADATA_NAMESPACE, TEST_DB_NAME, TEST_TABLE_NAME)), true, true)) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - String processQualifiedName = HiveMetaStoreBridge.getTableProcessQualifiedName(METADATA_NAMESPACE, hiveTable); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_PROCESS.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - processQualifiedName), true, true)) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - when(atlasEntityWithExtInfo.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")) - .thenReturn(createTableReference()); - - Partition partition = mock(Partition.class); - when(partition.getTable()).thenReturn(hiveTable); - List partitionValues = Arrays.asList(new String[]{}); - when(partition.getValues()).thenReturn(partitionValues); - - when(hiveClient.getPartitions(hiveTable)).thenReturn(Arrays.asList(new Partition[]{partition})); - - HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(METADATA_NAMESPACE, hiveClient, atlasClientV2); - try { - bridge.importHiveMetadata(null, null, true); - } catch (Exception e) { - Assert.fail("Partition with null key caused import to fail with exception ", e); - } - } - - @Test - public void testImportContinuesWhenTableRegistrationFails() throws Exception { - setupDB(hiveClient, TEST_DB_NAME); - final String table2Name = TEST_TABLE_NAME + "_1"; - List
hiveTables = setupTables(hiveClient, TEST_DB_NAME, TEST_TABLE_NAME, table2Name); - - returnExistingDatabase(TEST_DB_NAME, atlasClientV2, METADATA_NAMESPACE); - when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore")); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_TABLE.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getTableQualifiedName(METADATA_NAMESPACE, TEST_DB_NAME, TEST_TABLE_NAME)))) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - when(atlasEntityWithExtInfo.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")) - .thenReturn(createTableReference()); - - Table testTable = hiveTables.get(1); - String processQualifiedName = HiveMetaStoreBridge.getTableProcessQualifiedName(METADATA_NAMESPACE, testTable); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_PROCESS.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - processQualifiedName))) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(METADATA_NAMESPACE, hiveClient, atlasClientV2); - try { - bridge.importHiveMetadata(null, null, false); - } catch (Exception e) { - Assert.fail("Table registration failed with exception", e); - } - } - - @Test - public void testImportFailsWhenTableRegistrationFails() throws Exception { - setupDB(hiveClient, TEST_DB_NAME); - final String table2Name = TEST_TABLE_NAME + "_1"; - List
hiveTables = setupTables(hiveClient, TEST_DB_NAME, TEST_TABLE_NAME, table2Name); - - returnExistingDatabase(TEST_DB_NAME, atlasClientV2, METADATA_NAMESPACE); - when(hiveClient.getTable(TEST_DB_NAME, TEST_TABLE_NAME)).thenThrow(new RuntimeException("Timeout while reading data from hive metastore")); - - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_TABLE.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - HiveMetaStoreBridge.getTableQualifiedName(METADATA_NAMESPACE, TEST_DB_NAME, TEST_TABLE_NAME)))) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - - when(atlasEntityWithExtInfo.getEntity("82e06b34-9151-4023-aa9d-b82103a50e77")) - .thenReturn(createTableReference()); - - Table testTable = hiveTables.get(1); - String processQualifiedName = HiveMetaStoreBridge.getTableProcessQualifiedName(METADATA_NAMESPACE, testTable); - - when(atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_PROCESS.getName(), - Collections.singletonMap(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - processQualifiedName))) - .thenReturn(new AtlasEntity.AtlasEntityWithExtInfo( - getEntity(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.GUID, "82e06b34-9151-4023-aa9d-b82103a50e77"))); - - HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(METADATA_NAMESPACE, hiveClient, atlasClientV2); - try { - bridge.importHiveMetadata(null, null, true); - Assert.fail("Table registration is supposed to fail"); - } catch (Exception e) { - //Expected - } - } - - private AtlasEntity getEntity(String typeName, String attr, String value) { - return new AtlasEntity(typeName, attr, value); - } - - private AtlasEntity createTableReference() { - AtlasEntity tableEntity = new AtlasEntity(HiveDataTypes.HIVE_TABLE.getName()); - AtlasEntity sdEntity = new AtlasEntity(HiveDataTypes.HIVE_STORAGEDESC.getName()); - tableEntity.setAttribute(ATTRIBUTE_STORAGEDESC, AtlasTypeUtil.getObjectId(sdEntity)); - return tableEntity; - } - - private Table createTestTable(String databaseName, String tableName) throws HiveException { - Table table = new Table(databaseName, tableName); - table.setInputFormatClass(TextInputFormat.class); - table.setFields(new ArrayList() {{ - add(new FieldSchema("col1", "string", "comment1")); - } - }); - table.setTableType(TableType.EXTERNAL_TABLE); - table.setDataLocation(new Path("somehdfspath")); - return table; - } - - private class MatchesReferenceableProperty implements ArgumentMatcher { - private final String attrName; - private final Object attrValue; - - public MatchesReferenceableProperty(String attrName, Object attrValue) { - this.attrName = attrName; - this.attrValue = attrValue; - } - - @Override - public boolean matches(Object o) { - return attrValue.equals(((AtlasEntity) o).getAttribute(attrName)); - } - } -} diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java deleted file mode 100644 index 981600c4e1..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.bridge; - -import org.apache.atlas.AtlasClient; -import org.apache.atlas.hive.HiveITBase; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.testng.annotations.Test; - -import java.util.List; - -import static org.testng.Assert.assertEquals; - -public class HiveMetastoreBridgeIT extends HiveITBase { - - @Test - public void testCreateTableAndImport() throws Exception { - String tableName = tableName(); - String pFile = createTestDFSPath("parentPath"); - String query = String.format("create EXTERNAL table %s(id string, cnt int) location '%s'", tableName, pFile); - - runCommand(query); - - String dbId = assertDatabaseIsRegistered(DEFAULT_DB); - String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); - - //verify lineage is created - String processId = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getTableProcessQualifiedName(DEFAULT_DB, tableName), null); - AtlasEntity processsEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - validateHDFSPaths(processsEntity, INPUTS, pFile); - - List outputs = toAtlasObjectIdList(processsEntity.getAttribute(OUTPUTS)); - - assertEquals(outputs.size(), 1); - assertEquals(outputs.get(0).getGuid(), tableId); - - int tableCount = atlasClient.listEntities(HiveDataTypes.HIVE_TABLE.getName()).size(); - - //Now import using import tool - should be no-op. This also tests update since table exists - AtlasEntity dbEntity = atlasClientV2.getEntityByGuid(dbId).getEntity(); - - hiveMetaStoreBridge.importTable(dbEntity, DEFAULT_DB, tableName, true); - - String tableId2 = assertTableIsRegistered(DEFAULT_DB, tableName); - assertEquals(tableId2, tableId); - - String processId2 = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getTableProcessQualifiedName(DEFAULT_DB, tableName), null); - assertEquals(processId2, processId); - - //assert that table is de-duped and no new entity is created - int newTableCount = atlasClient.listEntities(HiveDataTypes.HIVE_TABLE.getName()).size(); - assertEquals(newTableCount, tableCount); - } - - @Test - public void testImportCreatedTable() throws Exception { - String tableName = tableName(); - String pFile = createTestDFSPath("parentPath"); - - runCommandWithDelay(driverWithoutContext, String.format("create EXTERNAL table %s(id string) location '%s'", tableName, pFile), 3000); - - String dbId = assertDatabaseIsRegistered(DEFAULT_DB); - - AtlasEntity dbEntity = atlasClientV2.getEntityByGuid(dbId).getEntity(); - - hiveMetaStoreBridge.importTable(dbEntity, DEFAULT_DB, tableName, true); - - String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); - - String processId = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getTableProcessQualifiedName(DEFAULT_DB, tableName), null); - AtlasEntity processEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - List outputs = toAtlasObjectIdList(processEntity.getAttribute(OUTPUTS)); - - assertEquals(outputs.size(), 1); - assertEquals(outputs.get(0).getGuid(), tableId); - } - - //TODO enable this test - //@Test - public void testCreateTableHiveProcessNameAttribute() throws Exception { - //test if \n is trimmed from name attribute of the process entity - String tableName = tableName(); - String processNameQuery = String.format("create table %s (id string)", tableName); - //add \n at the beginning of the query - String query = String.format("%n%n%s", processNameQuery); - - runCommand(query); - - String dbId = assertDatabaseIsRegistered(DEFAULT_DB); - String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); - - //verify lineage is created and the name attribute is the query without \n - String processId = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getTableProcessQualifiedName(DEFAULT_DB, tableName), null); - AtlasEntity processsEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - assertEquals(processsEntity.getAttribute("name"), processNameQuery); - } -} diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java deleted file mode 100755 index 1db73e5a7c..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java +++ /dev/null @@ -1,2554 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.base.Joiner; -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.hive.HiveITBase; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.hook.events.BaseHiveEvent; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.model.instance.*; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.lineage.AtlasLineageInfo; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.text.ParseException; -import java.util.*; - -import static org.apache.atlas.AtlasClient.NAME; -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.*; -import static org.testng.Assert.*; -import static org.testng.AssertJUnit.assertEquals; - -public class HiveHookIT extends HiveITBase { - private static final Logger LOG = LoggerFactory.getLogger(HiveHookIT.class); - - private static final String PART_FILE = "2015-01-01"; - private static final String PATH_TYPE_NAME = "Path"; - - private Driver driverWithNoHook; - - @BeforeClass - public void setUp() throws Exception { - // initialize 'driverWithNoHook' with HiveServer2 hook and HiveMetastore hook disabled - HiveConf conf = new HiveConf(); - conf.set("hive.exec.post.hooks", ""); - conf.set("hive.metastore.event.listeners", ""); - - SessionState ss = new SessionState(conf); - ss = SessionState.start(ss); - SessionState.setCurrentSessionState(ss); - - // Initialize 'driverWithNoHook' with HS2 hook disabled and HMS hook disabled. - driverWithNoHook = new Driver(conf); - - super.setUp(); - } - - @Test - public void testCreateDatabase() throws Exception { - String dbName = "db" + random(); - - runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')"); - - String dbId = assertDatabaseIsRegistered(dbName); - AtlasEntity dbEntity = atlasClientV2.getEntityByGuid(dbId).getEntity(); - Map params = (Map) dbEntity.getAttribute(ATTRIBUTE_PARAMETERS); - List ddlQueries = (List) dbEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(),1); - - Assert.assertNotNull(params); - Assert.assertEquals(params.size(), 2); - Assert.assertEquals(params.get("p1"), "v1"); - - //There should be just one entity per dbname - runCommandWithDelay("drop database " + dbName, 3000); - assertDatabaseIsNotRegistered(dbName); - - runCommandWithDelay("create database " + dbName, 3000); - dbId = assertDatabaseIsRegistered(dbName); - - //assert on qualified name - dbEntity = atlasClientV2.getEntityByGuid(dbId).getEntity(); - - Assert.assertEquals(dbEntity.getAttribute(ATTRIBUTE_QUALIFIED_NAME) , dbName.toLowerCase() + "@" + CLUSTER_NAME); - } - - @Test - public void testPathEntityDefAvailable() throws Exception { - //Check if Path entity definition created or not - AtlasEntityDef pathEntityDef = atlasClientV2.getEntityDefByName("Path"); - assertNotNull(pathEntityDef); - } - - @Test - public void testCreateDatabaseWithLocation() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - - //HDFS Location - String hdfsLocation = "hdfs://localhost:8020/warehouse/tablespace/external/hive/reports.db"; - alterDatabaseLocation(dbName, hdfsLocation); - assertDatabaseLocationRelationship(dbId); - } - - //alter database location - public void alterDatabaseLocation(String dbName, String location) throws Exception { - int timeDelay = 5000; - String query = String.format("ALTER DATABASE %s SET LOCATION \"%s\"", dbName, location); - runCommandWithDelay(query, timeDelay); - } - - public void assertDatabaseLocationRelationship(String dbId) throws Exception { - AtlasEntity dbEntity = atlasClientV2.getEntityByGuid(dbId).getEntity(); - AtlasEntityDef pathEntityDef = getPathEntityDefWithAllSubTypes(); - - assertTrue(dbEntity.hasAttribute(ATTRIBUTE_LOCATION)); - - assertNotNull(dbEntity.getAttribute(ATTRIBUTE_LOCATION)); - - assertNotNull(dbEntity.getRelationshipAttribute(ATTRIBUTE_LOCATION_PATH)); - - AtlasObjectId locationEntityObject = toAtlasObjectId(dbEntity.getRelationshipAttribute(ATTRIBUTE_LOCATION_PATH)); - assertTrue(pathEntityDef.getSubTypes().contains(locationEntityObject.getTypeName())); - } - - public AtlasEntityDef getPathEntityDefWithAllSubTypes() throws Exception { - Set possiblePathSubTypes = new HashSet<>(Arrays.asList("fs_path", "hdfs_path", "aws_s3_pseudo_dir", "aws_s3_v2_directory", "adls_gen2_directory")); - AtlasEntityDef pathEntityDef = atlasClientV2.getEntityDefByName(PATH_TYPE_NAME); - - if(pathEntityDef == null) { - pathEntityDef = new AtlasEntityDef(PATH_TYPE_NAME); - } - - pathEntityDef.setSubTypes(possiblePathSubTypes); - - return pathEntityDef; - } - - @Test - public void testCreateTable() throws Exception { - String tableName = tableName(); - String dbName = createDatabase(); - String colName = columnName(); - - runCommand("create table " + dbName + "." + tableName + "(" + colName + " int, name string)"); - - String tableId = assertTableIsRegistered(dbName, tableName); - String colId = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName), colName)); //there is only one instance of column registered - AtlasEntity colEntity = atlasClientV2.getEntityByGuid(colId).getEntity(); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tableId).getEntity(); - - Assert.assertEquals(colEntity.getAttribute(ATTRIBUTE_QUALIFIED_NAME), String.format("%s.%s.%s@%s", dbName.toLowerCase(), tableName.toLowerCase(), colName.toLowerCase(), CLUSTER_NAME)); - Assert.assertNotNull(colEntity.getAttribute(ATTRIBUTE_TABLE)); - - Assert.assertNotNull(tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)); - Assert.assertEquals(((List)tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 1); - - AtlasObjectId tblObjId = toAtlasObjectId(colEntity.getAttribute(ATTRIBUTE_TABLE)); - - Assert.assertEquals(tblObjId.getGuid(), tableId); - - //assert that column.owner = table.owner - AtlasEntity tblEntity1 = atlasClientV2.getEntityByGuid(tableId).getEntity(); - AtlasEntity colEntity1 = atlasClientV2.getEntityByGuid(colId).getEntity(); - - assertEquals(tblEntity1.getAttribute(ATTRIBUTE_OWNER), colEntity1.getAttribute(ATTRIBUTE_OWNER)); - - //create table where db is not registered - tableName = createTable(); - tableId = assertTableIsRegistered(DEFAULT_DB, tableName); - - AtlasEntity tblEntity2 = atlasClientV2.getEntityByGuid(tableId).getEntity(); - - Assert.assertEquals(tblEntity2.getAttribute(ATTRIBUTE_TABLE_TYPE), TableType.MANAGED_TABLE.name()); - Assert.assertEquals(tblEntity2.getAttribute(ATTRIBUTE_COMMENT), "table comment"); - - String entityName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName); - - Assert.assertEquals(tblEntity2.getAttribute(AtlasClient.NAME), tableName.toLowerCase()); - Assert.assertEquals(tblEntity2.getAttribute(ATTRIBUTE_QUALIFIED_NAME), entityName); - - Table t = hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, tableName); - long createTime = Long.parseLong(t.getMetadata().getProperty(hive_metastoreConstants.DDL_TIME)) * MILLIS_CONVERT_FACTOR; - - verifyTimestamps(tblEntity2, ATTRIBUTE_CREATE_TIME, createTime); - verifyTimestamps(tblEntity2, ATTRIBUTE_LAST_ACCESS_TIME, createTime); - - final AtlasObjectId sdEntity = toAtlasObjectId(tblEntity2.getAttribute(ATTRIBUTE_STORAGEDESC)); - - Assert.assertNotNull(sdEntity); - - // Assert.assertEquals(((Id) sdRef.getAttribute(HiveMetaStoreBridge.TABLE))._getId(), tableId); - - //Create table where database doesn't exist, will create database instance as well - assertDatabaseIsRegistered(DEFAULT_DB); - } - - - private void verifyTimestamps(AtlasEntity ref, String property, long expectedTime) throws ParseException { - //Verify timestamps. - Object createTime = ref.getAttribute(property); - - Assert.assertNotNull(createTime); - - if (expectedTime > 0) { - Assert.assertEquals(expectedTime, createTime); - } - } - - private void verifyTimestamps(AtlasEntity ref, String property) throws ParseException { - verifyTimestamps(ref, property, 0); - } - - //ATLAS-1321: Disable problematic tests. Need to revisit and fix them later - @Test(enabled = false) - public void testCreateExternalTable() throws Exception { - String tableName = tableName(); - String colName = columnName(); - String pFile = createTestDFSPath("parentPath"); - String query = String.format("create EXTERNAL table %s.%s(%s, %s) location '%s'", DEFAULT_DB , tableName , colName + " int", "name string", pFile); - - runCommand(query); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName, null, true); - AtlasEntity tblEnity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlList = (List) tblEnity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlList); - assertEquals(ddlList.size(), 1); - - String processId = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, getTableProcessQualifiedName(DEFAULT_DB, tableName), null); - - AtlasEntity processsEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - assertEquals(processsEntity.getAttribute("userName"), UserGroupInformation.getCurrentUser().getShortUserName()); - - verifyTimestamps(processsEntity, "startTime"); - verifyTimestamps(processsEntity, "endTime"); - - validateHDFSPaths(processsEntity, INPUTS, pFile); - } - - private Set getInputs(String inputName, Entity.Type entityType) throws HiveException { - final ReadEntity entity; - - if (Entity.Type.DFS_DIR.equals(entityType) || Entity.Type.LOCAL_DIR.equals(entityType)) { - entity = new TestReadEntity(lower(new Path(inputName).toString()), entityType); - } else { - entity = new TestReadEntity(getQualifiedTblName(inputName), entityType); - } - - if (entityType == Entity.Type.TABLE) { - entity.setT(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, inputName)); - } - - return new LinkedHashSet() {{ add(entity); }}; - } - - private Set getOutputs(String inputName, Entity.Type entityType) throws HiveException { - final WriteEntity entity; - - if (Entity.Type.DFS_DIR.equals(entityType) || Entity.Type.LOCAL_DIR.equals(entityType)) { - entity = new TestWriteEntity(lower(new Path(inputName).toString()), entityType); - } else { - entity = new TestWriteEntity(getQualifiedTblName(inputName), entityType); - } - - if (entityType == Entity.Type.TABLE) { - entity.setT(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, inputName)); - } - - return new LinkedHashSet() {{ add(entity); }}; - } - - private void validateOutputTables(AtlasEntity processEntity, Set expectedTables) throws Exception { - validateTables(toAtlasObjectIdList(processEntity.getAttribute(ATTRIBUTE_OUTPUTS)), expectedTables); - } - - private void validateInputTables(AtlasEntity processEntity, Set expectedTables) throws Exception { - validateTables(toAtlasObjectIdList(processEntity.getAttribute(ATTRIBUTE_INPUTS)), expectedTables); - } - - private void validateTables(List tableIds, Set expectedTables) throws Exception { - if (tableIds == null) { - Assert.assertTrue(CollectionUtils.isEmpty(expectedTables)); - } else if (expectedTables == null) { - Assert.assertTrue(CollectionUtils.isEmpty(tableIds)); - } else { - Assert.assertEquals(tableIds.size(), expectedTables.size()); - - List entityQualifiedNames = new ArrayList<>(tableIds.size()); - List expectedTableNames = new ArrayList<>(expectedTables.size()); - - for (AtlasObjectId tableId : tableIds) { - AtlasEntity atlasEntity = atlasClientV2.getEntityByGuid(tableId.getGuid()).getEntity(); - - entityQualifiedNames.add((String) atlasEntity.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } - - for (Iterator iterator = expectedTables.iterator(); iterator.hasNext(); ) { - Entity hiveEntity = iterator.next(); - - expectedTableNames.add(hiveEntity.getName()); - } - - for (String entityQualifiedName : entityQualifiedNames) { - boolean found = false; - - for (String expectedTableName : expectedTableNames) { - if (entityQualifiedName.startsWith(expectedTableName)) { - found = true; - - break; - } - } - - assertTrue(found, "Table name '" + entityQualifiedName + "' does not start with any name in the expected list " + expectedTableNames); - } - } - } - - private String assertColumnIsRegistered(String colName) throws Exception { - return assertColumnIsRegistered(colName, null); - } - - private String assertColumnIsRegistered(String colName, AssertPredicate assertPredicate) throws Exception { - LOG.debug("Searching for column {}", colName); - - return assertEntityIsRegistered(HiveDataTypes.HIVE_COLUMN.getName(), ATTRIBUTE_QUALIFIED_NAME, colName, assertPredicate); - } - - private String assertSDIsRegistered(String sdQFName, AssertPredicate assertPredicate) throws Exception { - LOG.debug("Searching for sd {}", sdQFName.toLowerCase()); - - return assertEntityIsRegistered(HiveDataTypes.HIVE_STORAGEDESC.getName(), ATTRIBUTE_QUALIFIED_NAME, sdQFName.toLowerCase(), assertPredicate); - } - - private void assertColumnIsNotRegistered(String colName) throws Exception { - LOG.debug("Searching for column {}", colName); - - assertEntityIsNotRegistered(HiveDataTypes.HIVE_COLUMN.getName(), ATTRIBUTE_QUALIFIED_NAME, colName); - } - - @Test - public void testCTAS() throws Exception { - String tableName = createTable(); - String ctasTableName = "table" + random(); - String query = "create table " + ctasTableName + " as select * from " + tableName; - - runCommand(query); - - final Set readEntities = getInputs(tableName, Entity.Type.TABLE); - final Set writeEntities = getOutputs(ctasTableName, Entity.Type.TABLE); - - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATETABLE_AS_SELECT, readEntities, - writeEntities); - AtlasEntity processEntity1 = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext); - AtlasObjectId process = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity1.getGuid()); - - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - assertTableIsRegistered(DEFAULT_DB, ctasTableName); - } - - private HiveEventContext constructEvent(String query, HiveOperation op, Set inputs, Set outputs) { - HiveEventContext event = new HiveEventContext(); - - event.setQueryStr(query); - event.setOperation(op); - event.setInputs(inputs); - event.setOutputs(outputs); - - return event; - } - - @Test - public void testEmptyStringAsValue() throws Exception{ - String tableName = tableName(); - String command = "create table " + tableName + "(id int, name string) row format delimited lines terminated by '\n' null defined as ''"; - - runCommandWithDelay(command, 3000); - - assertTableIsRegistered(DEFAULT_DB, tableName); - } - - @Test - public void testDropAndRecreateCTASOutput() throws Exception { - String tableName = createTable(); - String ctasTableName = "table" + random(); - String query = "create table " + ctasTableName + " as select * from " + tableName; - - runCommand(query); - - assertTableIsRegistered(DEFAULT_DB, ctasTableName); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(ctasTableName, Entity.Type.TABLE); - - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATETABLE_AS_SELECT, inputs, outputs); - AtlasEntity processEntity1 = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext); - AtlasObjectId process = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity1.getGuid()); - - String dropQuery = String.format("drop table %s ", ctasTableName); - - runCommandWithDelay(dropQuery, 5000); - - assertTableIsNotRegistered(DEFAULT_DB, ctasTableName); - - runCommand(query); - - String tblId = assertTableIsRegistered(DEFAULT_DB, ctasTableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlList = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlList); - assertEquals(ddlList.size(), 1); - - outputs = getOutputs(ctasTableName, Entity.Type.TABLE); - - AtlasEntity processEntity2 = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity2 = validateProcessExecution(processEntity2, hiveEventContext); - AtlasObjectId process2 = toAtlasObjectId(processExecutionEntity2.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process2.getGuid(), processEntity2.getGuid()); - - assertNotEquals(processEntity1.getGuid(), processEntity2.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - Assert.assertEquals(numberOfProcessExecutions(processEntity2), 1); - - validateOutputTables(processEntity1, outputs); - } - - @Test - public void testCreateView() throws Exception { - String tableName = createTable(); - String viewName = tableName(); - String query = "create view " + viewName + " as select * from " + tableName; - - runCommand(query); - - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATEVIEW, getInputs(tableName, - Entity.Type.TABLE), getOutputs(viewName, Entity.Type.TABLE)); - AtlasEntity processEntity1 = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - assertTableIsRegistered(DEFAULT_DB, viewName); - - String viewId = assertTableIsRegistered(DEFAULT_DB, viewName); - AtlasEntity viewEntity = atlasClientV2.getEntityByGuid(viewId).getEntity(); - List ddlQueries = (List) viewEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - } - - @Test - public void testAlterViewAsSelect() throws Exception { - //Create the view from table1 - String table1Name = createTable(); - String viewName = tableName(); - String query = "create view " + viewName + " as select * from " + table1Name; - - runCommand(query); - - String table1Id = assertTableIsRegistered(DEFAULT_DB, table1Name); - - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATEVIEW, getInputs(table1Name, - Entity.Type.TABLE), getOutputs(viewName, Entity.Type.TABLE)); - String processId1 = assertProcessIsRegistered(hiveEventContext); - AtlasEntity processEntity1 = atlasClientV2.getEntityByGuid(processId1).getEntity(); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String viewId = assertTableIsRegistered(DEFAULT_DB, viewName); - - //Check lineage which includes table1 - String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName); - String tableId = assertTableIsRegistered(DEFAULT_DB, viewName); - AtlasLineageInfo inputLineageInfo = atlasClientV2.getLineageInfo(tableId, AtlasLineageInfo.LineageDirection.INPUT, 0); - Map entityMap = inputLineageInfo.getGuidEntityMap(); - - assertTrue(entityMap.containsKey(viewId)); - assertTrue(entityMap.containsKey(table1Id)); - - //Alter the view from table2 - String table2Name = createTable(); - - query = "alter view " + viewName + " as select * from " + table2Name; - - runCommand(query); - - HiveEventContext hiveEventContext2 = constructEvent(query, HiveOperation.CREATEVIEW, getInputs(table2Name, - Entity.Type.TABLE), getOutputs(viewName, Entity.Type.TABLE)); - String processId2 = assertProcessIsRegistered(hiveEventContext2); - AtlasEntity processEntity2 = atlasClientV2.getEntityByGuid(processId2).getEntity(); - AtlasEntity processExecutionEntity2 = validateProcessExecution(processEntity2, hiveEventContext2); - AtlasObjectId process2 = toAtlasObjectId(processExecutionEntity2.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process2.getGuid(), processEntity2.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity2), 2); - Assert.assertEquals(processEntity1.getGuid(), processEntity2.getGuid()); - - String table2Id = assertTableIsRegistered(DEFAULT_DB, table2Name); - String viewId2 = assertTableIsRegistered(DEFAULT_DB, viewName); - - Assert.assertEquals(viewId2, viewId); - - AtlasEntity viewEntity = atlasClientV2.getEntityByGuid(viewId2).getEntity(); - List ddlQueries = (List) viewEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - - datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName); - - String tableId1 = assertTableIsRegistered(DEFAULT_DB, viewName); - AtlasLineageInfo inputLineageInfo1 = atlasClientV2.getLineageInfo(tableId1, AtlasLineageInfo.LineageDirection.INPUT, 0); - Map entityMap1 = inputLineageInfo1.getGuidEntityMap(); - - assertTrue(entityMap1.containsKey(viewId)); - - //This is through the alter view process - assertTrue(entityMap1.containsKey(table2Id)); - - //This is through the Create view process - assertTrue(entityMap1.containsKey(table1Id)); - - //Outputs dont exist - AtlasLineageInfo outputLineageInfo = atlasClientV2.getLineageInfo(tableId1, AtlasLineageInfo.LineageDirection.OUTPUT, 0); - Map entityMap2 = outputLineageInfo.getGuidEntityMap(); - - assertEquals(entityMap2.size(),0); - } - - private String createTestDFSFile(String path) throws Exception { - return "pfile://" + file(path); - } - - @Test - public void testLoadLocalPath() throws Exception { - String tableName = createTable(false); - String loadFile = file("load"); - String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName; - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - runCommand(query); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, getInputs("file://" + loadFile, Entity.Type.LOCAL_DIR), getOutputs(tableName, Entity.Type.TABLE))); - } - - @Test - public void testLoadLocalPathIntoPartition() throws Exception { - String tableName = createTable(true); - String loadFile = file("load"); - String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - runCommand(query); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE))); - } - - @Test - public void testLoadDFSPathPartitioned() throws Exception { - String tableName = createTable(true, true, false); - - assertTableIsRegistered(DEFAULT_DB, tableName); - - String loadFile = createTestDFSFile("loadDFSFile"); - String query = "load data inpath '" + loadFile + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; - - runCommand(query); - - Set outputs = getOutputs(tableName, Entity.Type.TABLE); - Set inputs = getInputs(loadFile, Entity.Type.DFS_DIR); - Set partitionOps = new LinkedHashSet<>(outputs); - - partitionOps.addAll(getOutputs(DEFAULT_DB + "@" + tableName + "@dt=" + PART_FILE, Entity.Type.PARTITION)); - - AtlasEntity processReference = validateProcess(constructEvent(query, HiveOperation.LOAD, inputs, partitionOps), inputs, outputs); - - validateHDFSPaths(processReference, INPUTS, loadFile); - validateOutputTables(processReference, outputs); - - String loadFile2 = createTestDFSFile("loadDFSFile1"); - - query = "load data inpath '" + loadFile2 + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; - - runCommand(query); - - Set process2Inputs = getInputs(loadFile2, Entity.Type.DFS_DIR); - Set expectedInputs = new LinkedHashSet<>(); - - expectedInputs.addAll(process2Inputs); - expectedInputs.addAll(inputs); - - validateProcess(constructEvent(query, HiveOperation.LOAD, expectedInputs, partitionOps), expectedInputs, outputs); - } - - private String getQualifiedTblName(String inputTable) { - String inputtblQlfdName = inputTable; - - if (inputTable != null && !inputTable.contains("@")) { - inputtblQlfdName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, inputTable); - } - return inputtblQlfdName; - } - - private AtlasEntity validateProcess(HiveEventContext event, Set inputTables, Set outputTables) throws Exception { - String processId = assertProcessIsRegistered(event, inputTables, outputTables); - AtlasEntity processEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - validateInputTables(processEntity, inputTables); - validateOutputTables(processEntity, outputTables); - - return processEntity; - } - - private AtlasEntity validateProcess(HiveEventContext event) throws Exception { - return validateProcess(event, event.getInputs(), event.getOutputs()); - } - - private AtlasEntity validateProcessExecution(AtlasEntity hiveProcess, HiveEventContext event) throws Exception { - String processExecutionId = assertProcessExecutionIsRegistered(hiveProcess, event); - AtlasEntity processExecutionEntity = atlasClientV2.getEntityByGuid(processExecutionId).getEntity(); - return processExecutionEntity; - } - - @Test - public void testInsertIntoTable() throws Exception { - String inputTable1Name = createTable(); - String inputTable2Name = createTable(); - String insertTableName = createTable(); - - assertTableIsRegistered(DEFAULT_DB, inputTable1Name); - assertTableIsRegistered(DEFAULT_DB, insertTableName); - - String query = "insert into " + insertTableName + " select t1.id, t1.name from " + inputTable2Name + " as t2, " + inputTable1Name + " as t1 where t1.id=t2.id"; - - runCommand(query); - - Set inputs = getInputs(inputTable1Name, Entity.Type.TABLE); - - inputs.addAll(getInputs(inputTable2Name, Entity.Type.TABLE)); - - Set outputs = getOutputs(insertTableName, Entity.Type.TABLE); - - (outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT); - - HiveEventContext event = constructEvent(query, HiveOperation.QUERY, inputs, outputs); - - Set expectedInputs = new TreeSet(entityComparator) {{ - addAll(inputs); - }}; - - String tblId = assertTableIsRegistered(DEFAULT_DB, insertTableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - AtlasEntity processEntity1 = validateProcess(event, expectedInputs, outputs); - - //Test sorting of tbl names - SortedSet sortedTblNames = new TreeSet<>(); - - sortedTblNames.add(inputTable1Name.toLowerCase()); - sortedTblNames.add(inputTable2Name.toLowerCase()); - - //Verify sorted order of inputs in qualified name - Assert.assertEquals(processEntity1.getAttribute(ATTRIBUTE_QUALIFIED_NAME), - Joiner.on(SEP).join("QUERY", - getQualifiedTblName(sortedTblNames.first()), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, sortedTblNames.first())), - getQualifiedTblName(sortedTblNames.last()), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, sortedTblNames.last()))) - + IO_SEP + SEP - + Joiner.on(SEP). - join(WriteEntity.WriteType.INSERT.name(), - getQualifiedTblName(insertTableName), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, insertTableName))) - ); - - //Rerun same query. Should result in same process - runCommandWithDelay(query, 3000); - - AtlasEntity processEntity2 = validateProcess(event, expectedInputs, outputs); - Assert.assertEquals(numberOfProcessExecutions(processEntity2), 2); - Assert.assertEquals(processEntity1.getGuid(), processEntity2.getGuid()); - } - - @Test - public void testInsertIntoTableProcessExecution() throws Exception { - String inputTable1Name = createTable(); - String inputTable2Name = createTable(); - String insertTableName = createTable(); - - assertTableIsRegistered(DEFAULT_DB, inputTable1Name); - assertTableIsRegistered(DEFAULT_DB, insertTableName); - - String query = "insert into " + insertTableName + " select t1.id, t1.name from " + inputTable2Name + " as t2, " + inputTable1Name + " as t1 where t1.id=t2.id"; - - runCommand(query); - - Set inputs = getInputs(inputTable1Name, Entity.Type.TABLE); - - inputs.addAll(getInputs(inputTable2Name, Entity.Type.TABLE)); - - Set outputs = getOutputs(insertTableName, Entity.Type.TABLE); - - (outputs.iterator().next()).setWriteType(WriteEntity.WriteType.INSERT); - - HiveEventContext event = constructEvent(query, HiveOperation.QUERY, inputs, outputs); - - Set expectedInputs = new TreeSet(entityComparator) {{ - addAll(inputs); - }}; - - assertTableIsRegistered(DEFAULT_DB, insertTableName); - - AtlasEntity processEntity1 = validateProcess(event, expectedInputs, outputs); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, event); - AtlasObjectId process = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity1.getGuid()); - - //Test sorting of tbl names - SortedSet sortedTblNames = new TreeSet<>(); - - sortedTblNames.add(inputTable1Name.toLowerCase()); - sortedTblNames.add(inputTable2Name.toLowerCase()); - - //Verify sorted order of inputs in qualified name - Assert.assertEquals(processEntity1.getAttribute(ATTRIBUTE_QUALIFIED_NAME), - Joiner.on(SEP).join("QUERY", - getQualifiedTblName(sortedTblNames.first()), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, sortedTblNames.first())), - getQualifiedTblName(sortedTblNames.last()), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, sortedTblNames.last()))) - + IO_SEP + SEP - + Joiner.on(SEP). - join(WriteEntity.WriteType.INSERT.name(), - getQualifiedTblName(insertTableName), - HiveMetaStoreBridge.getTableCreatedTime(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, insertTableName))) - ); - - //Rerun same query. Should result in same process - runCommandWithDelay(query, 3000); - - AtlasEntity processEntity2 = validateProcess(event, expectedInputs, outputs); - AtlasEntity processExecutionEntity2 = validateProcessExecution(processEntity2, event); - process = toAtlasObjectId(processExecutionEntity2.getRelationshipAttribute(BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity2.getGuid()); - Assert.assertEquals(processEntity1.getGuid(), processEntity2.getGuid()); - - String queryWithDifferentPredicate = "insert into " + insertTableName + " select t1.id, t1.name from " + - inputTable2Name + " as t2, " + inputTable1Name + " as t1 where t1.id=100"; - runCommandWithDelay(queryWithDifferentPredicate, 1000); - - HiveEventContext event3 = constructEvent(queryWithDifferentPredicate, HiveOperation.QUERY, inputs, outputs); - AtlasEntity processEntity3 = validateProcess(event3, expectedInputs, outputs); - AtlasEntity processExecutionEntity3 = validateProcessExecution(processEntity3, event3); - process = toAtlasObjectId(processExecutionEntity3.getRelationshipAttribute(BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity3.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity3), 3); - Assert.assertEquals(processEntity2.getGuid(), processEntity3.getGuid()); - } - - @Test - public void testInsertIntoLocalDir() throws Exception { - String tableName = createTable(); - String randomLocalPath = mkdir("hiverandom.tmp"); - String query = "insert overwrite LOCAL DIRECTORY '" + randomLocalPath + "' select id, name from " + tableName; - - runCommand(query); - - HiveEventContext event = constructEvent(query, HiveOperation.QUERY, - getInputs(tableName, Entity.Type.TABLE), getOutputs(randomLocalPath, Entity.Type.LOCAL_DIR)); - AtlasEntity hiveProcess = validateProcess(event); - AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event); - AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), hiveProcess.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(hiveProcess), 1); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - } - - @Test - public void testUpdateProcess() throws Exception { - String tableName = createTable(); - String pFile1 = createTestDFSPath("somedfspath1"); - String query = "insert overwrite DIRECTORY '" + pFile1 + "' select id, name from " + tableName; - - runCommand(query); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(pFile1, Entity.Type.DFS_DIR); - - outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE); - - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.QUERY, inputs, outputs); - AtlasEntity processEntity = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity, hiveEventContext); - AtlasObjectId process = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity.getGuid()); - - validateHDFSPaths(processEntity, OUTPUTS, pFile1); - - assertTableIsRegistered(DEFAULT_DB, tableName); - - validateInputTables(processEntity, inputs); - - //Rerun same query with same HDFS path - runCommandWithDelay(query, 3000); - - assertTableIsRegistered(DEFAULT_DB, tableName); - - AtlasEntity process2Entity = validateProcess(hiveEventContext); - AtlasEntity processExecutionEntity2 = validateProcessExecution(processEntity, hiveEventContext); - AtlasObjectId process2 = toAtlasObjectId(processExecutionEntity2.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process2.getGuid(), process2Entity.getGuid()); - - - validateHDFSPaths(process2Entity, OUTPUTS, pFile1); - - Assert.assertEquals(process2Entity.getGuid(), processEntity.getGuid()); - - //Rerun same query with a new HDFS path. Will result in same process since HDFS paths is not part of qualified name for QUERY operations - String pFile2 = createTestDFSPath("somedfspath2"); - - query = "insert overwrite DIRECTORY '" + pFile2 + "' select id, name from " + tableName; - - runCommandWithDelay(query, 3000); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - Set p3Outputs = new LinkedHashSet() {{ - addAll(getOutputs(pFile2, Entity.Type.DFS_DIR)); - addAll(outputs); - }}; - - AtlasEntity process3Entity = validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, p3Outputs)); - AtlasEntity processExecutionEntity3 = validateProcessExecution(processEntity, hiveEventContext); - AtlasObjectId process3 = toAtlasObjectId(processExecutionEntity3.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process3.getGuid(), process3Entity.getGuid()); - validateHDFSPaths(process3Entity, OUTPUTS, pFile2); - - Assert.assertEquals(numberOfProcessExecutions(process3Entity), 3); - Assert.assertEquals(process3Entity.getGuid(), processEntity.getGuid()); - } - - @Test - public void testInsertIntoDFSDirPartitioned() throws Exception { - //Test with partitioned table - String tableName = createTable(true); - String pFile1 = createTestDFSPath("somedfspath1"); - String query = "insert overwrite DIRECTORY '" + pFile1 + "' select id, name from " + tableName + " where dt = '" + PART_FILE + "'"; - - runCommand(query); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(pFile1, Entity.Type.DFS_DIR); - - outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE); - - Set partitionIps = new LinkedHashSet<>(inputs); - - partitionIps.addAll(getInputs(DEFAULT_DB + "@" + tableName + "@dt='" + PART_FILE + "'", Entity.Type.PARTITION)); - - AtlasEntity processEntity = validateProcess(constructEvent(query, HiveOperation.QUERY, partitionIps, outputs), inputs, outputs); - - //Rerun same query with different HDFS path. Should not create another process and should update it. - - String pFile2 = createTestDFSPath("somedfspath2"); - query = "insert overwrite DIRECTORY '" + pFile2 + "' select id, name from " + tableName + " where dt = '" + PART_FILE + "'"; - - runCommand(query); - - Set pFile2Outputs = getOutputs(pFile2, Entity.Type.DFS_DIR); - - pFile2Outputs.iterator().next().setWriteType(WriteEntity.WriteType.PATH_WRITE); - - //Now the process has 2 paths - one older with deleted reference to partition and another with the the latest partition - Set p2Outputs = new LinkedHashSet() {{ - addAll(pFile2Outputs); - addAll(outputs); - }}; - - AtlasEntity process2Entity = validateProcess(constructEvent(query, HiveOperation.QUERY, partitionIps, pFile2Outputs), inputs, p2Outputs); - - validateHDFSPaths(process2Entity, OUTPUTS, pFile2); - - Assert.assertEquals(process2Entity.getGuid(), processEntity.getGuid()); - } - - //Disabling test as temporary table is not captured by hiveHook(https://issues.apache.org/jira/browse/ATLAS-1274) - @Test(enabled = false) - public void testInsertIntoTempTable() throws Exception { - String tableName = createTable(); - String insertTableName = createTable(false, false, true); - - assertTableIsRegistered(DEFAULT_DB, tableName); - assertTableIsNotRegistered(DEFAULT_DB, insertTableName, true); - - String query = "insert into " + insertTableName + " select id, name from " + tableName; - - runCommand(query); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(insertTableName, Entity.Type.TABLE); - - outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT); - - HiveEventContext event = constructEvent(query, HiveOperation.QUERY, inputs, outputs); - AtlasEntity hiveProcess = validateProcess(event); - AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event); - AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), hiveProcess.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(hiveProcess), 1); - - assertTableIsRegistered(DEFAULT_DB, tableName); - assertTableIsRegistered(DEFAULT_DB, insertTableName, null, true); - } - - @Test - public void testInsertIntoPartition() throws Exception { - boolean isPartitionedTable = true; - String tableName = createTable(isPartitionedTable); - String insertTableName = createTable(isPartitionedTable); - String query = "insert into " + insertTableName + " partition(dt = '"+ PART_FILE + "') select id, name from " + tableName + " where dt = '"+ PART_FILE + "'"; - - runCommand(query); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(insertTableName, Entity.Type.TABLE); - - outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT); - - Set partitionIps = new LinkedHashSet() { - { - addAll(inputs); - add(getPartitionInput()); - } - }; - - Set partitionOps = new LinkedHashSet() { - { - addAll(outputs); - add(getPartitionOutput()); - } - }; - - HiveEventContext event = constructEvent(query, HiveOperation.QUERY, partitionIps, partitionOps); - AtlasEntity hiveProcess = validateProcess(event, inputs, outputs); - AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event); - AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), hiveProcess.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(hiveProcess), 1); - assertTableIsRegistered(DEFAULT_DB, tableName); - - String tblId = assertTableIsRegistered(DEFAULT_DB, insertTableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - //TODO -Add update test case - } - - @Test - public void testExportImportUnPartitionedTable() throws Exception { - String tableName = createTable(false); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - String filename = "file://" + mkdir("exportUnPartitioned"); - String query = "export table " + tableName + " to \"" + filename + "\""; - - runCommand(query); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(filename, Entity.Type.DFS_DIR); - - HiveEventContext event = constructEvent(query, HiveOperation.EXPORT, inputs, outputs); - AtlasEntity processEntity = validateProcess(event); - AtlasEntity hiveProcessExecution = validateProcessExecution(processEntity, event); - AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process.getGuid(), processEntity.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity), 1); - validateHDFSPaths(processEntity, OUTPUTS, filename); - validateInputTables(processEntity, inputs); - - //Import - String importTableName = createTable(false); - - String importTblId = assertTableIsRegistered(DEFAULT_DB, importTableName); - - query = "import table " + importTableName + " from '" + filename + "'"; - - runCommand(query); - - AtlasEntity importTblEntity = atlasClientV2.getEntityByGuid(importTblId).getEntity(); - List importTblddlQueries = (List) importTblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(importTblddlQueries); - Assert.assertEquals(importTblddlQueries.size(), 1); - - outputs = getOutputs(importTableName, Entity.Type.TABLE); - - HiveEventContext event2 = constructEvent(query, HiveOperation.IMPORT, - getInputs(filename, Entity.Type.DFS_DIR), outputs); - AtlasEntity processEntity2 = validateProcess(event2); - AtlasEntity hiveProcessExecution2 = validateProcessExecution(processEntity2, event2); - AtlasObjectId process2 = toAtlasObjectId(hiveProcessExecution2.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process2.getGuid(), processEntity2.getGuid()); - - Assert.assertEquals(numberOfProcessExecutions(processEntity2), 1); - Assert.assertNotEquals(processEntity.getGuid(), processEntity2.getGuid()); - - //Should create another process - filename = "file://" + mkdir("export2UnPartitioned"); - query = "export table " + tableName + " to \"" + filename + "\""; - - runCommand(query); - - AtlasEntity tblEntity2 = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries2 = (List) tblEntity2.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries2); - Assert.assertEquals(ddlQueries2.size(), 1); - - inputs = getInputs(tableName, Entity.Type.TABLE); - outputs = getOutputs(filename, Entity.Type.DFS_DIR); - - HiveEventContext event3 = constructEvent(query, HiveOperation.EXPORT, inputs, outputs); - AtlasEntity processEntity3 = validateProcess(event3); - AtlasEntity hiveProcessExecution3 = validateProcessExecution(processEntity3, event3); - AtlasObjectId process3 = toAtlasObjectId(hiveProcessExecution3.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process3.getGuid(), processEntity3.getGuid()); - - Assert.assertEquals(numberOfProcessExecutions(processEntity3), 1); - - // Should be a different process compared to the previous ones - Assert.assertNotEquals(processEntity.getGuid(), processEntity3.getGuid()); - Assert.assertNotEquals(processEntity2.getGuid(), processEntity3.getGuid()); - - //import again shouyld create another process - query = "import table " + importTableName + " from '" + filename + "'"; - - runCommand(query); - - AtlasEntity tblEntity3 = atlasClientV2.getEntityByGuid(importTblId).getEntity(); - List ddlQueries3 = (List) tblEntity3.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries3); - Assert.assertEquals(ddlQueries3.size(), 1); - - outputs = getOutputs(importTableName, Entity.Type.TABLE); - - HiveEventContext event4 = constructEvent(query, HiveOperation.IMPORT, getInputs(filename, - Entity.Type.DFS_DIR), outputs); - AtlasEntity processEntity4 = validateProcess(event4); - AtlasEntity hiveProcessExecution4 = validateProcessExecution(processEntity4, event4); - AtlasObjectId process4 = toAtlasObjectId(hiveProcessExecution4.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process4.getGuid(), processEntity4.getGuid()); - - Assert.assertEquals(numberOfProcessExecutions(processEntity4), 1); - - // Should be a different process compared to the previous ones - Assert.assertNotEquals(processEntity.getGuid(), processEntity4.getGuid()); - Assert.assertNotEquals(processEntity2.getGuid(), processEntity4.getGuid()); - Assert.assertNotEquals(processEntity3.getGuid(), processEntity4.getGuid()); - } - - @Test - public void testExportImportPartitionedTable() throws Exception { - boolean isPartitionedTable = true; - String tableName = createTable(isPartitionedTable); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - - //Add a partition - String partFile = "file://" + mkdir("partition"); - String query = "alter table " + tableName + " add partition (dt='"+ PART_FILE + "') location '" + partFile + "'"; - - runCommand(query); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 1); - - String filename = "pfile://" + mkdir("export"); - - query = "export table " + tableName + " to \"" + filename + "\""; - - runCommand(query); - - AtlasEntity tblEntity2 = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries2 = (List) tblEntity2.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries2); - Assert.assertEquals(ddlQueries2.size(), 1); - - Set expectedExportInputs = getInputs(tableName, Entity.Type.TABLE); - Set outputs = getOutputs(filename, Entity.Type.DFS_DIR); - Set partitionIps = getInputs(DEFAULT_DB + "@" + tableName + "@dt=" + PART_FILE, Entity.Type.PARTITION); //Note that export has only partition as input in this case - - partitionIps.addAll(expectedExportInputs); - - HiveEventContext event1 = constructEvent(query, HiveOperation.EXPORT, partitionIps, outputs); - AtlasEntity processEntity1 = validateProcess(event1, expectedExportInputs, outputs); - AtlasEntity hiveProcessExecution1 = validateProcessExecution(processEntity1, event1); - AtlasObjectId process1 = toAtlasObjectId(hiveProcessExecution1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - validateHDFSPaths(processEntity1, OUTPUTS, filename); - - //Import - String importTableName = createTable(true); - - String tblId2 = assertTableIsRegistered(DEFAULT_DB, tableName); - - query = "import table " + importTableName + " from '" + filename + "'"; - - runCommand(query); - - AtlasEntity tblEntity3 = atlasClientV2.getEntityByGuid(tblId2).getEntity(); - List ddlQueries3 = (List) tblEntity3.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries3); - Assert.assertEquals(ddlQueries3.size(), 1); - - Set expectedImportInputs = getInputs(filename, Entity.Type.DFS_DIR); - Set importOutputs = getOutputs(importTableName, Entity.Type.TABLE); - Set partitionOps = getOutputs(DEFAULT_DB + "@" + importTableName + "@dt=" + PART_FILE, Entity.Type.PARTITION); - - partitionOps.addAll(importOutputs); - - HiveEventContext event2 = constructEvent(query, HiveOperation.IMPORT, expectedImportInputs , partitionOps); - AtlasEntity processEntity2 = validateProcess(event2, expectedImportInputs, importOutputs); - AtlasEntity hiveProcessExecution2 = validateProcessExecution(processEntity2, event2); - AtlasObjectId process2 = toAtlasObjectId(hiveProcessExecution2.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process2.getGuid(), processEntity2.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity2), 1); - Assert.assertNotEquals(processEntity1.getGuid(), processEntity2.getGuid()); - - //Export should update same process - filename = "pfile://" + mkdir("export2"); - query = "export table " + tableName + " to \"" + filename + "\""; - - runCommand(query); - - Set outputs2 = getOutputs(filename, Entity.Type.DFS_DIR); - Set p3Outputs = new LinkedHashSet() {{ - addAll(outputs2); - addAll(outputs); - }}; - - HiveEventContext event3 = constructEvent(query, HiveOperation.EXPORT, partitionIps, outputs2); - - // this process entity should return same as the processEntity1 since the inputs and outputs are the same, - // hence the qualifiedName will be the same - AtlasEntity processEntity3 = validateProcess(event3, expectedExportInputs, p3Outputs); - AtlasEntity hiveProcessExecution3 = validateProcessExecution(processEntity3, event3); - AtlasObjectId process3 = toAtlasObjectId(hiveProcessExecution3.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process3.getGuid(), processEntity3.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity3), 2); - Assert.assertEquals(processEntity1.getGuid(), processEntity3.getGuid()); - - query = "alter table " + importTableName + " drop partition (dt='"+ PART_FILE + "')"; - - runCommand(query); - - //Import should update same process - query = "import table " + importTableName + " from '" + filename + "'"; - - runCommandWithDelay(query, 3000); - - Set importInputs = getInputs(filename, Entity.Type.DFS_DIR); - Set expectedImport2Inputs = new LinkedHashSet() {{ - addAll(importInputs); - addAll(expectedImportInputs); - }}; - - HiveEventContext event4 = constructEvent(query, HiveOperation.IMPORT, importInputs, partitionOps); - - // This process is going to be same as processEntity2 - AtlasEntity processEntity4 = validateProcess(event4, expectedImport2Inputs, importOutputs); - AtlasEntity hiveProcessExecution4 = validateProcessExecution(processEntity4, event4); - AtlasObjectId process4 = toAtlasObjectId(hiveProcessExecution4.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process4.getGuid(), processEntity4.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity4), 2); - Assert.assertEquals(processEntity2.getGuid(), processEntity4.getGuid()); - Assert.assertNotEquals(processEntity1.getGuid(), processEntity4.getGuid()); - } - - @Test - public void testIgnoreSelect() throws Exception { - String tableName = createTable(); - String query = "select * from " + tableName; - - runCommand(query); - - Set inputs = getInputs(tableName, Entity.Type.TABLE); - HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.QUERY, inputs, null); - - assertProcessIsNotRegistered(hiveEventContext); - - //check with uppercase table name - query = "SELECT * from " + tableName.toUpperCase(); - - runCommand(query); - - assertProcessIsNotRegistered(hiveEventContext); - } - - @Test - public void testAlterTableRenameAliasRegistered() throws Exception{ - String tableName = createTable(false); - String tableGuid = assertTableIsRegistered(DEFAULT_DB, tableName); - String newTableName = tableName(); - String query = String.format("alter table %s rename to %s", tableName, newTableName); - - runCommand(query); - - String newTableGuid = assertTableIsRegistered(DEFAULT_DB, newTableName); - - assertEquals(tableGuid, newTableGuid); - - AtlasEntity atlasEntity = atlasClientV2.getEntityByGuid(newTableGuid).getEntity(); - Map valueMap = atlasEntity.getAttributes(); - Iterable aliasList = (Iterable) valueMap.get("aliases"); - String aliasTableName = aliasList.iterator().next(); - - assert tableName.toLowerCase().equals(aliasTableName); - } - - @Test - public void testAlterTableRename() throws Exception { - String tableName = createTable(true); - String newDBName = createDatabase(); - String tableId = assertTableIsRegistered(DEFAULT_DB, tableName); - AtlasEntity tableEntity = atlasClientV2.getEntityByGuid(tableId).getEntity(); - String createTime = String.valueOf(tableEntity.getAttribute(ATTRIBUTE_CREATE_TIME)); - - Assert.assertNotNull(createTime); - - String columnGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), NAME)); - String sdGuid = assertSDIsRegistered(HiveMetaStoreBridge.getStorageDescQFName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName)), null); - - assertDatabaseIsRegistered(newDBName); - - String colTraitDetails = createTrait(columnGuid); //Add trait to column - String sdTraitDetails = createTrait(sdGuid); //Add trait to sd - String partColumnGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "dt")); - String partColTraitDetails = createTrait(partColumnGuid); //Add trait to part col keys - String newTableName = tableName(); - String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName, newDBName + "." + newTableName); - - runCommandWithDelay(query, 3000); - - String newColGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, newDBName, newTableName), NAME)); - - Assert.assertEquals(newColGuid, columnGuid); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, newDBName, tableName), NAME)); - - assertTrait(columnGuid, colTraitDetails); - - String newSdGuid = assertSDIsRegistered(HiveMetaStoreBridge.getStorageDescQFName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, newDBName, newTableName)), null); - - Assert.assertEquals(newSdGuid, sdGuid); - assertTrait(sdGuid, sdTraitDetails); - assertTrait(partColumnGuid, partColTraitDetails); - assertTableIsNotRegistered(DEFAULT_DB, tableName); - - String renamedTableId = assertTableIsRegistered(newDBName, newTableName, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - AtlasObjectId sd = toAtlasObjectId(entity.getAttribute(ATTRIBUTE_STORAGEDESC)); - - assertNotNull(sd); - } - }); - - AtlasEntity renamedTableEntity = atlasClientV2.getEntityByGuid(renamedTableId).getEntity(); - List ddlQueries = (List) renamedTableEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - - } - - private List getColumns(String dbName, String tableName) throws Exception { - String tableId = assertTableIsRegistered(dbName, tableName); - AtlasEntityWithExtInfo tblEntityWithExtInfo = atlasClientV2.getEntityByGuid(tableId); - AtlasEntity tableEntity = tblEntityWithExtInfo.getEntity(); - - //with soft delete, the deleted columns are returned as well. So, filter the deleted ones - List columns = toAtlasObjectIdList(tableEntity.getAttribute(ATTRIBUTE_COLUMNS)); - List activeColumns = new ArrayList<>(); - - for (AtlasObjectId col : columns) { - AtlasEntity columnEntity = tblEntityWithExtInfo.getEntity(col.getGuid()); - - if (columnEntity.getStatus() == AtlasEntity.Status.ACTIVE) { - activeColumns.add(columnEntity); - } - } - - return activeColumns; - } - - private String createTrait(String guid) throws AtlasServiceException { - //add trait - //valid type names in v2 must consist of a letter followed by a sequence of letter, number, or _ characters - String traitName = "PII_Trait" + random(); - AtlasClassificationDef piiTrait = AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - - atlasClientV2.createAtlasTypeDefs(new AtlasTypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList())); - atlasClientV2.addClassifications(guid, Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - - return traitName; - } - - private void assertTrait(String guid, String traitName) throws AtlasServiceException { - AtlasClassification.AtlasClassifications classifications = atlasClientV2.getClassifications(guid); - - Assert.assertEquals(classifications.getList().get(0).getTypeName(), traitName); - } - - @Test - public void testAlterTableAddColumn() throws Exception { - String tableName = createTable(); - String column = columnName(); - String query = "alter table " + tableName + " add columns (" + column + " string)"; - - runCommand(query); - - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), column)); - - //Verify the number of columns present in the table - List columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 3); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - - } - - //ATLAS-1321: Disable problematic tests. Need to revisit and fix them later - @Test(enabled = false) - public void testAlterTableDropColumn() throws Exception { - String tableName = createTable(); - String colDropped = "id"; - String query = "alter table " + tableName + " replace columns (name string)"; - - runCommand(query); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), colDropped)); - - //Verify the number of columns present in the table - List columns = getColumns(DEFAULT_DB, tableName); - - assertEquals(columns.size(), 1); - assertEquals(columns.get(0).getAttribute(NAME), "name"); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - } - - @Test - public void testAlterTableChangeColumn() throws Exception { - //Change name - String oldColName = NAME; - String newColName = "name1"; - String tableName = createTable(); - String query = String.format("alter table %s change %s %s string", tableName, oldColName, newColName); - - runCommandWithDelay(query, 3000); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName)); - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName)); - - //Verify the number of columns present in the table - List columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 2); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - - //Change column type - oldColName = "name1"; - newColName = "name2"; - - String newColType = "int"; - - query = String.format("alter table %s change column %s %s %s", tableName, oldColName, newColName, newColType); - - runCommandWithDelay(query, 3000); - - columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 2); - - String newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName); - - assertColumnIsRegistered(newColQualifiedName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - assertEquals(entity.getAttribute("type"), "int"); - } - }); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName)); - - AtlasEntity tblEntity2 = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries2 = (List) tblEntity2.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries2); - Assert.assertEquals(ddlQueries2.size(), 3); - - //Change name and add comment - oldColName = "name2"; - newColName = "name3"; - - String comment = "added comment"; - - query = String.format("alter table %s change column %s %s %s COMMENT '%s' after id", tableName, oldColName, newColName, newColType, comment); - - runCommandWithDelay(query, 3000); - - columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 2); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName)); - - newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName); - - assertColumnIsRegistered(newColQualifiedName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - assertEquals(entity.getAttribute(ATTRIBUTE_COMMENT), comment); - } - }); - - //Change column position - oldColName = "name3"; - newColName = "name4"; - query = String.format("alter table %s change column %s %s %s first", tableName, oldColName, newColName, newColType); - - runCommandWithDelay(query, 3000); - - columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 2); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName)); - - newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName); - - assertColumnIsRegistered(newColQualifiedName); - - String finalNewColName = newColName; - - String tblId3 = assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - List columns = toAtlasObjectIdList(entity.getAttribute(ATTRIBUTE_COLUMNS)); - - assertEquals(columns.size(), 2); - } - } - ); - - AtlasEntity tblEntity3 = atlasClientV2.getEntityByGuid(tblId3).getEntity(); - List ddlQueries3 = (List) tblEntity3.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries3); - Assert.assertEquals(ddlQueries3.size(), 5); - - //Change col position again - oldColName = "name4"; - newColName = "name5"; - query = String.format("alter table %s change column %s %s %s after id", tableName, oldColName, newColName, newColType); - - runCommandWithDelay(query, 3000); - - columns = getColumns(DEFAULT_DB, tableName); - - Assert.assertEquals(columns.size(), 2); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), oldColName)); - - newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), newColName); - - assertColumnIsRegistered(newColQualifiedName); - - //Check col position - String finalNewColName2 = newColName; - - String tblId4 = assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - List columns = toAtlasObjectIdList(entity.getAttribute(ATTRIBUTE_COLUMNS)); - - assertEquals(columns.size(), 2); - } - } - ); - - AtlasEntity tblEntity4 = atlasClientV2.getEntityByGuid(tblId4).getEntity(); - List ddlQueries4 = (List) tblEntity4.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries4); - Assert.assertEquals(ddlQueries4.size(), 6); - } - - /** - * Reenabling this test since HIVE-14706 is fixed now and the hive version we are using now sends - * us the column lineage information - * @throws Exception - */ - @Test - public void testColumnLevelLineage() throws Exception { - String sourceTable = "table" + random(); - - runCommand("create table " + sourceTable + "(a int, b int)"); - - String sourceTableGUID = assertTableIsRegistered(DEFAULT_DB, sourceTable); - String a_guid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, sourceTable), "a")); - String b_guid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, sourceTable), "b")); - String ctasTableName = "table" + random(); - String query = "create table " + ctasTableName + " as " + "select sum(a+b) as a, count(*) as b from " + sourceTable; - - runCommand(query); - - String dest_a_guid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, ctasTableName), "a")); - String dest_b_guid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, ctasTableName), "b")); - - Set inputs = getInputs(sourceTable, Entity.Type.TABLE); - Set outputs = getOutputs(ctasTableName, Entity.Type.TABLE); - HiveEventContext event = constructEvent(query, HiveOperation.CREATETABLE_AS_SELECT, inputs, outputs); - AtlasEntity processEntity1 = validateProcess(event); - AtlasEntity hiveProcessExecution1 = validateProcessExecution(processEntity1, event); - AtlasObjectId process1 = toAtlasObjectId(hiveProcessExecution1.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - Assert.assertEquals(processEntity1.getGuid(), processEntity1.getGuid()); - - assertTableIsRegistered(DEFAULT_DB, ctasTableName); - - String processQName = sortEventsAndGetProcessQualifiedName(event); - List aLineageInputs = Arrays.asList(a_guid, b_guid); - String aLineageProcessName = processQName + ":" + "a"; - - LOG.debug("Searching for column lineage process {} ", aLineageProcessName); - String guid = assertEntityIsRegistered(HiveDataTypes.HIVE_COLUMN_LINEAGE.getName(), ATTRIBUTE_QUALIFIED_NAME, aLineageProcessName, null); - - AtlasEntity colLineageEntity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List processInputs = toAtlasObjectIdList(colLineageEntity.getAttribute("inputs")); - List processInputsAsString = new ArrayList<>(); - - for(AtlasObjectId input: processInputs){ - processInputsAsString.add(input.getGuid()); - } - - Collections.sort(processInputsAsString); - Collections.sort(aLineageInputs); - - Assert.assertEquals(processInputsAsString, aLineageInputs); - - List bLineageInputs = Arrays.asList(sourceTableGUID); - String bLineageProcessName = processQName + ":" + "b"; - - LOG.debug("Searching for column lineage process {} ", bLineageProcessName); - - String guid1 = assertEntityIsRegistered(HiveDataTypes.HIVE_COLUMN_LINEAGE.getName(), ATTRIBUTE_QUALIFIED_NAME, bLineageProcessName, null); - - - AtlasEntity colLineageEntity1 = atlasClientV2.getEntityByGuid(guid1).getEntity(); - List bProcessInputs = toAtlasObjectIdList(colLineageEntity1.getAttribute("inputs")); - List bProcessInputsAsString = new ArrayList<>(); - - for(AtlasObjectId input: bProcessInputs){ - bProcessInputsAsString.add(input.getGuid()); - } - - Collections.sort(bProcessInputsAsString); - Collections.sort(bLineageInputs); - - Assert.assertEquals(bProcessInputsAsString, bLineageInputs); - - //Test lineage API response - AtlasLineageInfo atlasLineageInfoInput = atlasClientV2.getLineageInfo(dest_a_guid, AtlasLineageInfo.LineageDirection.INPUT,0); - Map entityMap = atlasLineageInfoInput.getGuidEntityMap(); - - ObjectNode response = atlasClient.getInputGraphForEntity(dest_a_guid); - JsonNode vertices = response.get("values").get("vertices"); - JsonNode dest_a_val = vertices.get(dest_a_guid); - JsonNode src_a_val = vertices.get(a_guid); - JsonNode src_b_val = vertices.get(b_guid); - - Assert.assertNotNull(dest_a_val); - Assert.assertNotNull(src_a_val); - Assert.assertNotNull(src_b_val); - - ObjectNode b_response = atlasClient.getInputGraphForEntity(dest_b_guid); - JsonNode b_vertices = b_response.get("values").get("vertices"); - JsonNode b_val = b_vertices.get(dest_b_guid); - JsonNode src_tbl_val = b_vertices.get(sourceTableGUID); - - Assert.assertNotNull(b_val); - Assert.assertNotNull(src_tbl_val); - } - - @Test - public void testIgnoreTruncateTable() throws Exception { - String tableName = createTable(false); - String query = String.format("truncate table %s", tableName); - - runCommand(query); - - Set outputs = getOutputs(tableName, Entity.Type.TABLE); - HiveEventContext event = constructEvent(query, HiveOperation.TRUNCATETABLE, null, outputs); - - assertTableIsRegistered(DEFAULT_DB, tableName); - assertProcessIsNotRegistered(event); - } - - @Test - public void testAlterTablePartitionColumnType() throws Exception { - String tableName = createTable(true, true, false); - String newType = "int"; - String query = String.format("ALTER TABLE %s PARTITION COLUMN (dt %s)", tableName, newType); - - runCommand(query); - - String colQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "dt"); - String dtColId = assertColumnIsRegistered(colQualifiedName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity column) throws Exception { - Assert.assertEquals(column.getAttribute("type"), newType); - } - }); - - assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity table) throws Exception { - final List partitionKeys = toAtlasObjectIdList(table.getAttribute("partitionKeys")); - Assert.assertEquals(partitionKeys.size(), 1); - Assert.assertEquals(partitionKeys.get(0).getGuid(), dtColId); - - } - }); - } - - @Test - public void testAlterTableWithoutHookConf() throws Exception { - String tableName = tableName(); - String createCommand = "create table " + tableName + " (id int, name string)"; - - driverWithNoHook.run(createCommand); - - assertTableIsNotRegistered(DEFAULT_DB, tableName); - - String command = "alter table " + tableName + " change id id_new string"; - - runCommand(command); - - assertTableIsRegistered(DEFAULT_DB, tableName); - - String tbqn = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName); - - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(tbqn, "id_new")); - } - - @Test - public void testTraitsPreservedOnColumnRename() throws Exception { - String dbName = createDatabase(); - String tableName = tableName(); - String createQuery = String.format("create table %s.%s (id int, name string)", dbName, tableName); - - runCommand(createQuery); - - String tbqn = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName); - String guid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(tbqn, "id")); - String trait = createTrait(guid); - String oldColName = "id"; - String newColName = "id_new"; - String query = String.format("alter table %s.%s change %s %s string", dbName, tableName, oldColName, newColName); - - runCommand(query); - - String guid2 = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(tbqn, "id_new")); - - assertEquals(guid2, guid); - - assertTrue(atlasClient.getEntity(guid2).getTraitNames().contains(trait)); - } - - @Test - public void testAlterViewRename() throws Exception { - String tableName = createTable(); - String viewName = tableName(); - String newName = tableName(); - String query = "create view " + viewName + " as select * from " + tableName; - - runCommandWithDelay(query, 5000); - - query = "alter view " + viewName + " rename to " + newName; - - runCommandWithDelay(query, 5000); - - assertTableIsNotRegistered(DEFAULT_DB, viewName); - - String viewId = assertTableIsRegistered(DEFAULT_DB, newName); - AtlasEntity viewEntity = atlasClientV2.getEntityByGuid(viewId).getEntity(); - List ddlQueries = (List) viewEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - } - - @Test - public void testAlterTableLocation() throws Exception { - //Its an external table, so the HDFS location should also be registered as an entity - String tableName = createTable(true, true, false); - String testPath = createTestDFSPath("testBaseDir"); - String query = "alter table " + tableName + " set location '" + testPath + "'"; - - runCommandWithDelay(query, 8000); - - String tblId = assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity tableRef) throws Exception { - AtlasObjectId sd = toAtlasObjectId(tableRef.getAttribute(ATTRIBUTE_STORAGEDESC)); - - assertNotNull(sd); - } - }); - - AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); - List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - Assert.assertNotNull(ddlQueries); - Assert.assertEquals(ddlQueries.size(), 2); - - String processQualifiedName = getTableProcessQualifiedName(DEFAULT_DB, tableName); - String processId = assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, processQualifiedName, null); - AtlasEntity processEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - Assert.assertEquals(numberOfProcessExecutions(processEntity), 2); - //validateProcessExecution(processEntity, event); - validateHDFSPaths(processEntity, INPUTS, testPath); - } - - @Test - public void testAlterTableFileFormat() throws Exception { - String tableName = createTable(); - String testFormat = "orc"; - String query = "alter table " + tableName + " set FILEFORMAT " + testFormat; - - runCommand(query); - - assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity tableRef) throws Exception { - AtlasObjectId sdObjectId = toAtlasObjectId(tableRef.getAttribute(ATTRIBUTE_STORAGEDESC)); - AtlasEntity sdEntity = atlasClientV2.getEntityByGuid(sdObjectId.getGuid()).getEntity(); - - Assert.assertEquals(sdEntity.getAttribute(ATTRIBUTE_INPUT_FORMAT), "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"); - Assert.assertEquals(sdEntity.getAttribute(ATTRIBUTE_OUTPUT_FORMAT), "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"); - Assert.assertNotNull(sdEntity.getAttribute(ATTRIBUTE_SERDE_INFO)); - - AtlasStruct serdeInfo = toAtlasStruct(sdEntity.getAttribute(ATTRIBUTE_SERDE_INFO)); - - Assert.assertEquals(serdeInfo.getAttribute(ATTRIBUTE_SERIALIZATION_LIB), "org.apache.hadoop.hive.ql.io.orc.OrcSerde"); - Assert.assertNotNull(serdeInfo.getAttribute(ATTRIBUTE_PARAMETERS)); - Assert.assertEquals(((Map) serdeInfo.getAttribute(ATTRIBUTE_PARAMETERS)).get("serialization.format"), "1"); - } - }); - - - /** - * Hive 'alter table stored as' is not supported - See https://issues.apache.org/jira/browse/HIVE-9576 - * query = "alter table " + tableName + " STORED AS " + testFormat.toUpperCase(); - * runCommand(query); - - * tableRef = atlasClientV1.getEntity(tableId); - * sdRef = (AtlasEntity)tableRef.getAttribute(HiveMetaStoreBridge.STORAGE_DESC); - * Assert.assertEquals(sdRef.getAttribute(HiveMetaStoreBridge.STORAGE_DESC_INPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"); - * Assert.assertEquals(sdRef.getAttribute(HiveMetaStoreBridge.STORAGE_DESC_OUTPUT_FMT), "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"); - * Assert.assertEquals(((Map) sdRef.getAttribute(HiveMetaStoreBridge.PARAMETERS)).getAttribute("orc.compress"), "ZLIB"); - */ - } - - @Test - public void testAlterTableBucketingClusterSort() throws Exception { - String tableName = createTable(); - List cols = Collections.singletonList("id"); - - runBucketSortQuery(tableName, 5, cols, cols); - - cols = Arrays.asList("id", NAME); - - runBucketSortQuery(tableName, 2, cols, cols); - } - - private void runBucketSortQuery(String tableName, final int numBuckets, final List bucketCols, final List sortCols) throws Exception { - String fmtQuery = "alter table %s CLUSTERED BY (%s) SORTED BY (%s) INTO %s BUCKETS"; - String query = String.format(fmtQuery, tableName, stripListBrackets(bucketCols.toString()), stripListBrackets(sortCols.toString()), numBuckets); - - runCommand(query); - - assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - verifyBucketSortingProperties(entity, numBuckets, bucketCols, sortCols); - } - }); - } - - private String stripListBrackets(String listElements) { - return StringUtils.strip(StringUtils.strip(listElements, "["), "]"); - } - - private void verifyBucketSortingProperties(AtlasEntity tableRef, int numBuckets, List bucketColNames, List sortcolNames) throws Exception { - AtlasObjectId sdObjectId = toAtlasObjectId(tableRef.getAttribute(ATTRIBUTE_STORAGEDESC)); - AtlasEntity sdEntity = atlasClientV2.getEntityByGuid(sdObjectId.getGuid()).getEntity(); - - Assert.assertEquals((sdEntity.getAttribute(ATTRIBUTE_NUM_BUCKETS)), numBuckets); - Assert.assertEquals(sdEntity.getAttribute(ATTRIBUTE_BUCKET_COLS), bucketColNames); - - List hiveOrderStructList = toAtlasStructList(sdEntity.getAttribute(ATTRIBUTE_SORT_COLS)); - - Assert.assertNotNull(hiveOrderStructList); - Assert.assertEquals(hiveOrderStructList.size(), sortcolNames.size()); - - for (int i = 0; i < sortcolNames.size(); i++) { - AtlasStruct hiveOrderStruct = hiveOrderStructList.get(i); - - Assert.assertNotNull(hiveOrderStruct); - Assert.assertEquals(hiveOrderStruct.getAttribute("col"), sortcolNames.get(i)); - Assert.assertEquals(hiveOrderStruct.getAttribute("order"), 1); - } - } - - @Test - public void testAlterTableSerde() throws Exception { - //SERDE PROPERTIES - String tableName = createTable(); - Map expectedProps = new HashMap() {{ - put("key1", "value1"); - }}; - - runSerdePropsQuery(tableName, expectedProps); - - expectedProps.put("key2", "value2"); - - //Add another property - runSerdePropsQuery(tableName, expectedProps); - } - - @Test - public void testDropTable() throws Exception { - //Test Deletion of tables and its corrresponding columns - String tableName = createTable(true, true, false); - - assertTableIsRegistered(DEFAULT_DB, tableName); - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id")); - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), NAME)); - - String query = String.format("drop table %s ", tableName); - - runCommandWithDelay(query, 3000); - - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), "id")); - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName), NAME)); - assertTableIsNotRegistered(DEFAULT_DB, tableName); - } - - private WriteEntity getPartitionOutput() { - TestWriteEntity partEntity = new TestWriteEntity(PART_FILE, Entity.Type.PARTITION); - - return partEntity; - } - - private ReadEntity getPartitionInput() { - ReadEntity partEntity = new TestReadEntity(PART_FILE, Entity.Type.PARTITION); - - return partEntity; - } - - @Test - public void testDropDatabaseWithCascade() throws Exception { - //Test Deletion of database and its corresponding tables - String dbName = "db" + random(); - - runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')"); - - int numTables = 10; - String[] tableNames = new String[numTables]; - - for(int i = 0; i < numTables; i++) { - tableNames[i] = createTable(true, true, false); - } - - String query = String.format("drop database %s cascade", dbName); - - runCommand(query); - - //Verify columns are not registered for one of the tables - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), "id")); - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]), NAME)); - - for(int i = 0; i < numTables; i++) { - assertTableIsNotRegistered(dbName, tableNames[i]); - } - - assertDatabaseIsNotRegistered(dbName); - } - - @Test - public void testDropDatabaseWithoutCascade() throws Exception { - //Test Deletion of database and its corresponding tables - String dbName = "db" + random(); - - runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')"); - - int numTables = 5; - String[] tableNames = new String[numTables]; - - for(int i = 0; i < numTables; i++) { - tableNames[i] = createTable(true, true, false); - - String query = String.format("drop table %s", tableNames[i]); - - runCommand(query); - - assertTableIsNotRegistered(dbName, tableNames[i]); - } - - String query = String.format("drop database %s", dbName); - - runCommand(query); - - String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(CLUSTER_NAME, dbName); - - Thread.sleep(10000); - - try { - atlasClientV2.getEntityByAttribute(HiveDataTypes.HIVE_DB.getName(), Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, dbQualifiedName)); - } catch (AtlasServiceException e) { - if (e.getStatus() == ClientResponse.Status.NOT_FOUND) { - return; - } - } - - fail(String.format("Entity was not supposed to exist for typeName = %s, attributeName = %s, attributeValue = %s", HiveDataTypes.HIVE_DB.getName(), ATTRIBUTE_QUALIFIED_NAME, dbQualifiedName)); - } - - @Test - public void testDropNonExistingDB() throws Exception { - //Test Deletion of a non existing DB - String dbName = "nonexistingdb"; - - assertDatabaseIsNotRegistered(dbName); - - String query = String.format("drop database if exists %s cascade", dbName); - - runCommand(query); - - //Should have no effect - assertDatabaseIsNotRegistered(dbName); - } - - @Test - public void testDropNonExistingTable() throws Exception { - //Test Deletion of a non existing table - String tableName = "nonexistingtable"; - - assertTableIsNotRegistered(DEFAULT_DB, tableName); - - String query = String.format("drop table if exists %s", tableName); - - runCommand(query); - - //Should have no effect - assertTableIsNotRegistered(DEFAULT_DB, tableName); - } - - @Test - public void testDropView() throws Exception { - //Test Deletion of tables and its corrresponding columns - String tableName = createTable(true, true, false); - String viewName = tableName(); - String query = "create view " + viewName + " as select * from " + tableName; - - runCommandWithDelay(query, 3000); - - assertTableIsRegistered(DEFAULT_DB, viewName); - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "id")); - assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), NAME)); - - query = String.format("drop view %s ", viewName); - - runCommandWithDelay(query, 3000); - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), "id")); - assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName), NAME)); - assertTableIsNotRegistered(DEFAULT_DB, viewName); - } - - private void runSerdePropsQuery(String tableName, Map expectedProps) throws Exception { - String serdeLib = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; - String serializedProps = getSerializedProps(expectedProps); - String query = String.format("alter table %s set SERDE '%s' WITH SERDEPROPERTIES (%s)", tableName, serdeLib, serializedProps); - - runCommand(query); - - verifyTableSdProperties(tableName, serdeLib, expectedProps); - } - - private String getSerializedProps(Map expectedProps) { - StringBuilder sb = new StringBuilder(); - - for(String expectedPropKey : expectedProps.keySet()) { - if(sb.length() > 0) { - sb.append(","); - } - - sb.append("'").append(expectedPropKey).append("'"); - sb.append("="); - sb.append("'").append(expectedProps.get(expectedPropKey)).append("'"); - } - - return sb.toString(); - } - - @Test - public void testAlterDBOwner() throws Exception { - String dbName = createDatabase(); - - assertDatabaseIsRegistered(dbName); - - String owner = "testOwner"; - String fmtQuery = "alter database %s set OWNER %s %s"; - String query = String.format(fmtQuery, dbName, "USER", owner); - - runCommandWithDelay(query, 3000); - - assertDatabaseIsRegistered(dbName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) { - assertEquals(entity.getAttribute(AtlasClient.OWNER), owner); - } - }); - } - - @Test - public void testAlterDBProperties() throws Exception { - String dbName = createDatabase(); - String fmtQuery = "alter database %s %s DBPROPERTIES (%s)"; - - testAlterProperties(Entity.Type.DATABASE, dbName, fmtQuery); - } - - @Test - public void testAlterTableProperties() throws Exception { - String tableName = createTable(); - String fmtQuery = "alter table %s %s TBLPROPERTIES (%s)"; - - testAlterProperties(Entity.Type.TABLE, tableName, fmtQuery); - } - - private void testAlterProperties(Entity.Type entityType, String entityName, String fmtQuery) throws Exception { - String SET_OP = "set"; - String UNSET_OP = "unset"; - Map expectedProps = new HashMap() {{ - put("testPropKey1", "testPropValue1"); - put("comment", "test comment"); - }}; - - String query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps)); - - runCommandWithDelay(query, 3000); - - verifyEntityProperties(entityType, entityName, expectedProps, false); - - expectedProps.put("testPropKey2", "testPropValue2"); - //Add another property - - query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps)); - - runCommandWithDelay(query, 3000); - - verifyEntityProperties(entityType, entityName, expectedProps, false); - - if (entityType != Entity.Type.DATABASE) { - //Database unset properties doesnt work - alter database %s unset DBPROPERTIES doesnt work - //Unset all the props - StringBuilder sb = new StringBuilder("'"); - - query = String.format(fmtQuery, entityName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\'')); - - runCommandWithDelay(query, 3000); - - verifyEntityProperties(entityType, entityName, expectedProps, true); - } - } - - @Test - public void testAlterViewProperties() throws Exception { - String tableName = createTable(); - String viewName = tableName(); - String query = "create view " + viewName + " as select * from " + tableName; - - runCommand(query); - - String fmtQuery = "alter view %s %s TBLPROPERTIES (%s)"; - - testAlterProperties(Entity.Type.TABLE, viewName, fmtQuery); - } - - private void verifyEntityProperties(Entity.Type type, String entityName, final Map expectedProps, final boolean checkIfNotExists) throws Exception { - switch(type) { - case TABLE: - assertTableIsRegistered(DEFAULT_DB, entityName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - verifyProperties(entity, expectedProps, checkIfNotExists); - } - }); - break; - case DATABASE: - assertDatabaseIsRegistered(entityName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity entity) throws Exception { - verifyProperties(entity, expectedProps, checkIfNotExists); - } - }); - break; - } - } - - private void verifyTableSdProperties(String tableName, final String serdeLib, final Map expectedProps) throws Exception { - assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() { - @Override - public void assertOnEntity(AtlasEntity tableRef) throws Exception { - AtlasObjectId sdEntity = toAtlasObjectId(tableRef.getAttribute(ATTRIBUTE_STORAGEDESC)); - - assertNotNull(sdEntity); - } - }); - } - - - private void verifyProperties(AtlasStruct referenceable, Map expectedProps, boolean checkIfNotExists) { - Map parameters = (Map) referenceable.getAttribute(ATTRIBUTE_PARAMETERS); - - if (!checkIfNotExists) { - //Check if properties exist - Assert.assertNotNull(parameters); - for (String propKey : expectedProps.keySet()) { - Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey)); - } - } else { - //Check if properties dont exist - if (expectedProps != null && parameters != null) { - for (String propKey : expectedProps.keySet()) { - Assert.assertFalse(parameters.containsKey(propKey)); - } - } - } - } - - private String sortEventsAndGetProcessQualifiedName(final HiveEventContext event) throws HiveException{ - SortedSet sortedHiveInputs = event.getInputs() == null ? null : new TreeSet(entityComparator); - SortedSet sortedHiveOutputs = event.getOutputs() == null ? null : new TreeSet(entityComparator); - - if (event.getInputs() != null) { - sortedHiveInputs.addAll(event.getInputs()); - } - - if (event.getOutputs() != null) { - sortedHiveOutputs.addAll(event.getOutputs()); - } - - return getProcessQualifiedName(hiveMetaStoreBridge, event, sortedHiveInputs, sortedHiveOutputs, getSortedProcessDataSets(event.getInputs()), getSortedProcessDataSets(event.getOutputs())); - } - - private String assertProcessIsRegistered(final HiveEventContext event) throws Exception { - try { - String processQFName = sortEventsAndGetProcessQualifiedName(event); - - LOG.debug("Searching for process with query {}", processQFName); - - return assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, processQFName, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - List recentQueries = (List) entity.getAttribute(ATTRIBUTE_RECENT_QUERIES); - Assert.assertEquals(recentQueries.get(0), lower(event.getQueryStr())); - } - }); - } catch (Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - private String assertProcessIsRegistered(final HiveEventContext event, final Set inputTbls, final Set outputTbls) throws Exception { - try { - SortedSet sortedHiveInputs = event.getInputs() == null ? null : new TreeSet(entityComparator); - SortedSet sortedHiveOutputs = event.getOutputs() == null ? null : new TreeSet(entityComparator); - - if (event.getInputs() != null) { - sortedHiveInputs.addAll(event.getInputs()); - } - - if (event.getOutputs() != null) { - sortedHiveOutputs.addAll(event.getOutputs()); - } - - String processQFName = getProcessQualifiedName(hiveMetaStoreBridge, event, sortedHiveInputs, sortedHiveOutputs, getSortedProcessDataSets(inputTbls), getSortedProcessDataSets(outputTbls)); - - LOG.debug("Searching for process with query {}", processQFName); - - return assertEntityIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, processQFName, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - List recentQueries = (List) entity.getAttribute(BaseHiveEvent.ATTRIBUTE_RECENT_QUERIES); - - Assert.assertEquals(recentQueries.get(0), lower(event.getQueryStr())); - } - }); - } catch(Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - private String assertProcessExecutionIsRegistered(AtlasEntity hiveProcess, final HiveEventContext event) throws Exception { - try { - String guid = ""; - List processExecutions = toAtlasObjectIdList(hiveProcess.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS_EXECUTIONS)); - for (AtlasObjectId processExecution : processExecutions) { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2. - getEntityByGuid(processExecution.getGuid()); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - if (String.valueOf(entity.getAttribute(ATTRIBUTE_QUERY_TEXT)).equals(event.getQueryStr().toLowerCase().trim())) { - guid = entity.getGuid(); - } - } - - return assertEntityIsRegisteredViaGuid(guid, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - String queryText = (String) entity.getAttribute(ATTRIBUTE_QUERY_TEXT); - Assert.assertEquals(queryText, event.getQueryStr().toLowerCase().trim()); - } - }); - } catch(Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - - private String getDSTypeName(Entity entity) { - return Entity.Type.TABLE.equals(entity.getType()) ? HiveDataTypes.HIVE_TABLE.name() : HiveMetaStoreBridge.HDFS_PATH; - } - - private SortedMap getSortedProcessDataSets(Set inputTbls) { - SortedMap inputs = new TreeMap<>(entityComparator); - - if (inputTbls != null) { - for (final T tbl : inputTbls) { - AtlasEntity inputTableRef = new AtlasEntity(getDSTypeName(tbl), new HashMap() {{ - put(ATTRIBUTE_QUALIFIED_NAME, tbl.getName()); - }}); - - inputs.put(tbl, inputTableRef); - } - } - return inputs; - } - - private void assertProcessIsNotRegistered(HiveEventContext event) throws Exception { - try { - SortedSet sortedHiveInputs = event.getInputs() == null ? null : new TreeSet(entityComparator); - SortedSet sortedHiveOutputs = event.getOutputs() == null ? null : new TreeSet(entityComparator); - - if (event.getInputs() != null) { - sortedHiveInputs.addAll(event.getInputs()); - } - - if (event.getOutputs() != null) { - sortedHiveOutputs.addAll(event.getOutputs()); - } - - String processQFName = getProcessQualifiedName(hiveMetaStoreBridge, event, sortedHiveInputs, sortedHiveOutputs, getSortedProcessDataSets(event.getInputs()), getSortedProcessDataSets(event.getOutputs())); - - LOG.debug("Searching for process with query {}", processQFName); - - assertEntityIsNotRegistered(HiveDataTypes.HIVE_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, processQFName); - } catch(Exception e) { - LOG.error("Exception : ", e); - } - } - - private void assertTableIsNotRegistered(String dbName, String tableName, boolean isTemporaryTable) throws Exception { - LOG.debug("Searching for table {}.{}", dbName, tableName); - - String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, isTemporaryTable); - - assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), ATTRIBUTE_QUALIFIED_NAME, tableQualifiedName); - } - - private void assertTableIsNotRegistered(String dbName, String tableName) throws Exception { - LOG.debug("Searching for table {}.{}", dbName, tableName); - - String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName, false); - - assertEntityIsNotRegistered(HiveDataTypes.HIVE_TABLE.getName(), ATTRIBUTE_QUALIFIED_NAME, tableQualifiedName); - } - - private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate) throws Exception { - return assertTableIsRegistered(dbName, tableName, assertPredicate, false); - } - - @Test - public void testLineage() throws Exception { - String table1 = createTable(false); - String db2 = createDatabase(); - String table2 = tableName(); - String query = String.format("create table %s.%s as select * from %s", db2, table2, table1); - - runCommand(query); - - String table1Id = assertTableIsRegistered(DEFAULT_DB, table1); - String table2Id = assertTableIsRegistered(db2, table2); - AtlasLineageInfo inputLineage = atlasClientV2.getLineageInfo(table2Id, AtlasLineageInfo.LineageDirection.INPUT, 0); - Map entityMap = inputLineage.getGuidEntityMap(); - - assertTrue(entityMap.containsKey(table1Id)); - assertTrue(entityMap.containsKey(table2Id)); - - AtlasLineageInfo inputLineage1 = atlasClientV2.getLineageInfo(table1Id, AtlasLineageInfo.LineageDirection.OUTPUT, 0); - Map entityMap1 = inputLineage1.getGuidEntityMap(); - - assertTrue(entityMap1.containsKey(table1Id)); - assertTrue(entityMap1.containsKey(table2Id)); - } - - //For ATLAS-448 - @Test - public void testNoopOperation() throws Exception { - runCommand("show compactions"); - runCommand("show transactions"); - } - - private String createDatabase() throws Exception { - String dbName = dbName(); - - runCommand("create database " + dbName); - - return dbName; - } - - private String columnName() { - return "col" + random(); - } - - private String createTable() throws Exception { - return createTable(false); - } - - private String createTable(boolean isPartitioned) throws Exception { - String tableName = tableName(); - - runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (isPartitioned ? " partitioned by(dt string)" : "")); - - return tableName; - } - - private String createTable(boolean isExternal, boolean isPartitioned, boolean isTemporary) throws Exception { - String tableName = tableName(); - - String location = ""; - if (isExternal) { - location = " location '" + createTestDFSPath("someTestPath") + "'"; - } - - runCommandWithDelay("create " + (isExternal ? " EXTERNAL " : "") + (isTemporary ? "TEMPORARY " : "") + "table " + tableName + "(id int, name string) comment 'table comment' " + (isPartitioned ? " partitioned by(dt string)" : "") + location, 3000); - - return tableName; - } - - // ReadEntity class doesn't offer a constructor that takes (name, type). A hack to get the tests going! - private static class TestReadEntity extends ReadEntity { - private final String name; - private final Entity.Type type; - - public TestReadEntity(String name, Entity.Type type) { - this.name = name; - this.type = type; - } - - @Override - public String getName() { return name; } - - @Override - public Entity.Type getType() { return type; } - } - - // WriteEntity class doesn't offer a constructor that takes (name, type). A hack to get the tests going! - private static class TestWriteEntity extends WriteEntity { - private final String name; - private final Entity.Type type; - - public TestWriteEntity(String name, Entity.Type type) { - this.name = name; - this.type = type; - } - - @Override - public String getName() { return name; } - - @Override - public Entity.Type getType() { return type; } - } - - private int numberOfProcessExecutions(AtlasEntity hiveProcess) { - return toAtlasObjectIdList(hiveProcess.getRelationshipAttribute( - BaseHiveEvent.ATTRIBUTE_PROCESS_EXECUTIONS)).size(); - } -} diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveMetastoreHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveMetastoreHookIT.java deleted file mode 100755 index 2bce1b2a0e..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveMetastoreHookIT.java +++ /dev/null @@ -1,384 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.hive.hook; - -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.hive.HiveITBase; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.annotations.Test; - -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.hive.hook.events.BaseHiveEvent.ATTRIBUTE_DDL_QUERIES; -import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; -import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; -import static org.testng.AssertJUnit.*; - -public class HiveMetastoreHookIT extends HiveITBase { - private static final Logger LOG = LoggerFactory.getLogger(HiveMetastoreHookIT.class); - - @Test (priority = 1) - public void testCreateDatabase() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - AtlasEntity dbEntity = getAtlasEntity(dbId); - - assertEquals(((List) dbEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - } - - @Test (priority = 2) - public void testAlterDatabase() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - - AtlasEntity dbEntity = getAtlasEntity(dbId); - assertNotNull(dbEntity); - - // SET DBPROPERTIES - query = "ALTER DATABASE " + dbName + " SET DBPROPERTIES (\"prop1\"=\"val1\", \"prop2\"=\"val2\")"; - runCommandWithDelay(query); - - dbEntity = getAtlasEntity(dbId); - Map parameters = (Map) dbEntity.getAttribute("parameters"); - - assertEquals(((List) dbEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - assertNotNull(parameters); - assertEquals(2, parameters.size()); - - // SET OWNER to 'hive' - query = "ALTER DATABASE " + dbName + " SET OWNER USER hive"; - runCommandWithDelay(query); - - dbEntity = getAtlasEntity(dbId); - - - assertEquals(((List) dbEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - assertEquals(dbEntity.getAttribute("owner"), "hive"); - assertEquals(dbEntity.getAttribute("ownerType"), "USER"); - - // SET LOCATION - String hdfsPath = "hdfs://localhost:8020/warehouse/tablespace/managed/dwx/new_db.db"; - - query = String.format("ALTER DATABASE %s SET LOCATION \"%s\"", dbName, hdfsPath); - runCommandWithDelay(query); - - dbEntity = getAtlasEntity(dbId); - - assertEquals(((List) dbEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - - String location = (String) dbEntity.getAttribute("location"); - assertEquals(location, hdfsPath); - } - - @Test (priority = 3) - public void testDropDatabase() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - - AtlasEntity dbEntity = getAtlasEntity(dbId); - assertNotNull(dbEntity); - - query = "DROP DATABASE " + dbName; - runCommand(query); - assertDatabaseIsNotRegistered(dbName); - - dbEntity = getAtlasEntity(dbId); - assertEquals(dbEntity.getStatus(), DELETED); - } - - @Test (priority = 4) - public void testDropDatabaseWithTables() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommandWithDelay(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String table1 = tableName(); - runCommandWithDelay("CREATE TABLE " + dbName + "." + table1 + " (name string, age int, dob date)"); - String table1Id = assertTableIsRegistered(dbName, table1); - assertEquals(getAtlasEntity(table1Id).getStatus(), ACTIVE); - - String table2 = tableName(); - runCommandWithDelay("CREATE TABLE " + dbName + "." + table2 + " (name string, age int, dob date)"); - String table2Id = assertTableIsRegistered(dbName, table2); - assertEquals(getAtlasEntity(table2Id).getStatus(), ACTIVE); - - query = "DROP DATABASE " + dbName + " CASCADE"; - runCommandWithDelay(query); - assertDatabaseIsNotRegistered(dbName); - - assertEquals(getAtlasEntity(dbId).getStatus(), DELETED); - assertEquals(getAtlasEntity(table1Id).getStatus(), DELETED); - assertEquals(getAtlasEntity(table2Id).getStatus(), DELETED); - } - - @Test (priority = 5) - public void testCreateTable() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - AtlasEntity tblEntity = getAtlasEntity(tblId); - - assertEquals(((List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - } - - @Test (priority = 6) - public void testCreateView() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - - String viewName = tableName(); - - runCommand("CREATE VIEW " + dbName + "." + viewName + " AS SELECT * FROM " + dbName + "." + tableName); - - tblId = assertTableIsRegistered(dbName, viewName); - AtlasEntity tblEntity = getAtlasEntity(tblId); - - assertEquals(((List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - } - - @Test (priority = 7) - public void testAlterTableProperties() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - assertEquals(((List) getAtlasEntity(dbId).getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - assertEquals(((List) getAtlasEntity(tblId).getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - - // SET TBLPROPERTIES - query = "ALTER TABLE " + dbName + "." + tableName + " SET TBLPROPERTIES (\"prop1\"=\"val1\", \"prop2\"=\"val2\", \"prop3\"=\"val3\")"; - runCommandWithDelay(query); - - query = "ALTER TABLE " + dbName + "." + tableName + " SET TBLPROPERTIES (\"comment\" = \"sample comment\")"; - runCommandWithDelay(query); - - // SET SERDE - query = "ALTER TABLE " + dbName + "." + tableName + " SET SERDE \"org.apache.hadoop.hive.ql.io.orc.OrcSerde\" WITH SERDEPROPERTIES (\"prop1\"=\"val1\", \"prop2\"=\"val2\")"; - runCommandWithDelay(query); - - // SET SERDEPROPERTIES - query = "ALTER TABLE " + dbName + "." + tableName + " SET SERDEPROPERTIES (\"prop1\"=\"val1\", \"prop2\"=\"val2\")"; - runCommandWithDelay(query); - - AtlasEntity tableEntity = getAtlasEntity(tblId); - Map tableParameters = (Map) tableEntity.getAttribute("parameters"); - - assertEquals(tableParameters.get("comment"), "sample comment"); - assertEquals(tableParameters.get("prop1"), "val1"); - assertEquals(tableParameters.get("prop2"), "val2"); - assertEquals(tableParameters.get("prop3"), "val3"); - - AtlasEntity sdEntity = getAtlasEntity((String) ((Map) tableEntity.getAttribute("sd")).get("guid")); - Map serdeInfo = (Map) sdEntity.getAttribute("serdeInfo"); - Map serdeAttrs = (Map) serdeInfo.get("attributes"); - - assertEquals(serdeAttrs.get("serializationLib"), "org.apache.hadoop.hive.ql.io.orc.OrcSerde"); - assertEquals(((Map) serdeAttrs.get("parameters")).get("prop1"), "val1"); - assertEquals(((Map) serdeAttrs.get("parameters")).get("prop2"), "val2"); - assertEquals(((List) tableEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - } - - @Test (priority = 8) - public void testAlterTableRenameTableName() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - - // RENAME TABLE NAME - String newTableName = tableName + "_new"; - query = "ALTER TABLE " + dbName + "." + tableName + " RENAME TO " + dbName + "." + newTableName; - runCommandWithDelay(query); - - AtlasEntityWithExtInfo tableEntityWithExtInfo = getAtlasEntityWithExtInfo(tblId); - AtlasEntity tableEntity = tableEntityWithExtInfo.getEntity(); - - assertEquals(((List) tableEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - - // validate table rename in table entity - assertEquals(newTableName, tableEntity.getAttribute("name")); - assertTrue(((String) tableEntity.getAttribute("qualifiedName")).contains(newTableName)); - - // validate table rename in column and sd entity - for (AtlasEntity referredEntity : tableEntityWithExtInfo.getReferredEntities().values()) { - assertTrue(((String) referredEntity.getAttribute("qualifiedName")).contains(newTableName)); - } - } - - @Test (priority = 9) - public void testAlterTableRenameColumnName() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (col1 int, col2 int, col3 int)"); - String tblId = assertTableIsRegistered(dbName, tableName); - AtlasEntityWithExtInfo tableEntityWithExtInfo = getAtlasEntityWithExtInfo(tblId); - - assertEquals(tableEntityWithExtInfo.getEntity().getStatus(), ACTIVE); - - String col1Id = getColumnId(tableEntityWithExtInfo, "col1"); - String col2Id = getColumnId(tableEntityWithExtInfo, "col2"); - - // RENAME COLUMN NAME - query = "ALTER TABLE " + dbName + "." + tableName + " CHANGE col1 col11 int"; - runCommandWithDelay(query); - - AtlasEntity col1Entity = getAtlasEntity(col1Id); - assertEquals(col1Entity.getAttribute("name"), "col11"); - assertTrue(((String) col1Entity.getAttribute("qualifiedName")).contains("col11")); - - // CHANGE COLUMN NAME and DATATYPE - query = "ALTER TABLE " + dbName + "." + tableName + " CHANGE col2 col22 string"; - runCommandWithDelay(query); - - AtlasEntity col2Entity = getAtlasEntity(col2Id); - assertEquals(col2Entity.getAttribute("name"), "col22"); - assertEquals(col2Entity.getAttribute("type"), "string"); - assertEquals(((List) getAtlasEntity(tblId).getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES)).size(), 0); - } - - @Test (priority = 10) - public void testDropTable() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - - query = "DROP TABLE " + dbName + "." + tableName; - runCommandWithDelay(query); - - assertEquals(getAtlasEntity(tblId).getStatus(), DELETED); - } - - @Test (priority = 11) - public void testDropView() throws Exception { - String dbName = dbName(); - String query = "CREATE DATABASE " + dbName; - - runCommand(query); - String dbId = assertDatabaseIsRegistered(dbName); - assertEquals(getAtlasEntity(dbId).getStatus(), ACTIVE); - - String tableName = tableName(); - runCommand("CREATE TABLE " + dbName + "." + tableName + " (name string, age int, dob date)"); - String tblId = assertTableIsRegistered(dbName, tableName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - - String viewName = tableName(); - runCommand("CREATE VIEW " + dbName + "." + viewName + " AS SELECT * FROM " + dbName + "." + tableName); - tblId = assertTableIsRegistered(dbName, viewName); - assertEquals(getAtlasEntity(tblId).getStatus(), ACTIVE); - - query = "DROP VIEW " + dbName + "." + viewName; - runCommandWithDelay(query); - - assertEquals(getAtlasEntity(tblId).getStatus(), DELETED); - } - - private String getColumnId(AtlasEntityWithExtInfo entityWithExtInfo, String columnName) { - String ret = null; - - for (AtlasEntity entity : entityWithExtInfo.getReferredEntities().values()) { - - if (entity.getTypeName().equals("hive_column") && entity.getAttribute("name").equals(columnName)) { - ret = entity.getGuid(); - break; - } - } - - return ret; - } - - private AtlasEntity getAtlasEntity(String guid) throws AtlasServiceException { - return atlasClientV2.getEntityByGuid(guid).getEntity(); - } - - private AtlasEntityWithExtInfo getAtlasEntityWithExtInfo(String guid) throws AtlasServiceException { - return atlasClientV2.getEntityByGuid(guid); - } - - protected void runCommand(String cmd) throws Exception { - runCommandWithDelay(driverWithoutContext, cmd, 0); - } - - protected void runCommandWithDelay(String cmd) throws Exception { - int delayTimeInMs = 10000; - runCommandWithDelay(driverWithoutContext, cmd, delayTimeInMs); - } -} \ No newline at end of file diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilterTest.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilterTest.java deleted file mode 100644 index a20121496e..0000000000 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/utils/ActiveEntityFilterTest.java +++ /dev/null @@ -1,210 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.hive.hook.utils; - -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.utils.TestResourceFileUtils; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -public class ActiveEntityFilterTest { - private static String FILE_SUFFIX_ACTUAL_RESULTS = "-v2"; - private static String ADDITIONAL_TYPE_HDFS_PATH = "hdfs_path"; - - @BeforeClass - public void setup() { - ActiveEntityFilter.init(true, Arrays.asList(new String[]{ADDITIONAL_TYPE_HDFS_PATH})); - } - - @Test - public void verifyMessages() throws IOException { - assertAtlasEntitiesWithExtInfoFromFile("hs2-drop-db"); - assertAtlasEntitiesWithExtInfoFromFile("hs2-create-db"); - assertAtlasEntitiesWithExtInfoFromFile("hs2-create-table"); - assertMessageFromFile("hs2-table-rename"); - assertMessageFromFile("hs2-alter-view"); - assertMessageFromFile("hs2-drop-table"); - assertAtlasEntitiesWithExtInfoFromFile("hs2-create-process"); - assertMessageFromFile("hs2-load-inpath"); - } - - private void assertMessageFromFile(String msgFile) throws IOException { - List incoming = loadList(msgFile); - List expected = loadList(msgFile + FILE_SUFFIX_ACTUAL_RESULTS); - int expectedSize = expected.size(); - - List actual = ActiveEntityFilter.apply((List) incoming); - assertEquals(actual.size(), expected.size()); - for (int i = 0; i < expectedSize; i++) { - if (actual.get(i) instanceof HookNotification.EntityCreateRequestV2) { - HookNotification.EntityCreateRequestV2 actualN = (HookNotification.EntityCreateRequestV2) actual.get(i); - HookNotification.EntityCreateRequestV2 expectedN = (HookNotification.EntityCreateRequestV2) expected.get(i); - - assertAtlasEntitiesWithExtInfo(actualN.getEntities(), expectedN.getEntities()); - } - - if (actual.get(i) instanceof HookNotification.EntityUpdateRequestV2) { - HookNotification.EntityUpdateRequestV2 actualN = (HookNotification.EntityUpdateRequestV2) actual.get(i); - HookNotification.EntityUpdateRequestV2 expectedN = (HookNotification.EntityUpdateRequestV2) expected.get(i); - - assertAtlasEntitiesWithExtInfo(actualN.getEntities(), expectedN.getEntities()); - } - - if (actual.get(i) instanceof HookNotification.EntityPartialUpdateRequestV2) { - HookNotification.EntityPartialUpdateRequestV2 actualN = (HookNotification.EntityPartialUpdateRequestV2) actual.get(i); - HookNotification.EntityPartialUpdateRequestV2 expectedN = (HookNotification.EntityPartialUpdateRequestV2) expected.get(i); - - assertAtlasEntitiesWithExtInfo(actualN.getEntity(), expectedN.getEntity()); - } - } - } - - private List loadList(String msgFile) throws IOException { - List list = TestResourceFileUtils.readObjectFromJson("", msgFile, List.class); - List ret = new ArrayList<>(); - - for (int i = 0; i < list.size(); i++) { - HookNotification notification = createNotification((LinkedHashMap) list.get(i)); - if (notification == null) { - continue; - } - - ret.add(notification); - } - - return ret; - } - - private HookNotification createNotification(LinkedHashMap linkedHashMap) { - assertTrue(linkedHashMap.containsKey("type")); - - String type = (String) linkedHashMap.get("type"); - switch (type) { - case "ENTITY_CREATE_V2": - return AtlasType.fromLinkedHashMap(linkedHashMap, HookNotification.EntityCreateRequestV2.class); - - case "ENTITY_FULL_UPDATE_V2": - return AtlasType.fromLinkedHashMap(linkedHashMap, HookNotification.EntityUpdateRequestV2.class); - - case "ENTITY_PARTIAL_UPDATE_V2": - return AtlasType.fromLinkedHashMap(linkedHashMap, HookNotification.EntityPartialUpdateRequestV2.class); - - default: - return null; - } - } - - - private void assertAtlasEntitiesWithExtInfo(AtlasEntity.AtlasEntityWithExtInfo actual, AtlasEntity.AtlasEntityWithExtInfo expected) { - String actualJson = AtlasType.toJson(actual); - String expectedJson = AtlasType.toJson(expected); - - LinkedHashMap actualLHM = AtlasType.fromJson(actualJson, LinkedHashMap.class); - LinkedHashMap expectedLHM = AtlasType.fromJson(expectedJson, LinkedHashMap.class); - - AssertLinkedHashMap.assertEquals(actualLHM, expectedLHM); - } - - private void assertAtlasEntitiesWithExtInfoFromFile(String entityFile) throws IOException { - AtlasEntity.AtlasEntitiesWithExtInfo incoming = TestResourceFileUtils.readObjectFromJson("", entityFile, AtlasEntity.AtlasEntitiesWithExtInfo.class); - AtlasEntity.AtlasEntitiesWithExtInfo expected = TestResourceFileUtils.readObjectFromJson("", entityFile + FILE_SUFFIX_ACTUAL_RESULTS, AtlasEntity.AtlasEntitiesWithExtInfo.class); - - HiveDDLEntityFilter hiveLineageEntityFilter = new HiveDDLEntityFilter(null); - AtlasEntity.AtlasEntitiesWithExtInfo actual = hiveLineageEntityFilter.apply(incoming); - assertAtlasEntitiesWithExtInfo(actual, expected); - } - - private void assertAtlasEntitiesWithExtInfo(AtlasEntity.AtlasEntitiesWithExtInfo actual, AtlasEntity.AtlasEntitiesWithExtInfo expected) { - assertNotNull(actual); - assertNotNull(expected); - - assertEquals(actual.getEntities().size(), expected.getEntities().size()); - assertEntity(actual.getEntities(), expected.getEntities()); - - if (expected.getReferredEntities() == null && actual.getReferredEntities() != null) { - fail("expected.getReferredEntities() == null, but expected.getReferredEntities() != null"); - } - - if (expected.getReferredEntities() != null && actual.getReferredEntities() != null) { - assertEntity(actual.getReferredEntities(), expected.getReferredEntities()); - } - } - - private void assertEntity(Map actual, Map expected) { - assertEquals(actual.size(), expected.size()); - } - - private void assertEntity(List actual, List expected) { - AssertLinkedHashMap.assertEquals(actual, expected); - } - - private static class AssertLinkedHashMap { - private static final String MISMATCH_KEY_FORMAT = "Mismatch: Key: %s"; - private static final Set excludeKeys = new HashSet() {{ - add("guid"); - add("owner"); - }}; - - public static void assertEquals(LinkedHashMap actual, LinkedHashMap expected) { - for (String key : expected.keySet()) { - assertTrue(actual.containsKey(key), "Key: " + key + " Not found!"); - - if (excludeKeys.contains(key)) { - continue; - } - - if (actual.get(key) instanceof LinkedHashMap) { - assertEquals((LinkedHashMap) actual.get(key), (LinkedHashMap) expected.get(key)); - continue; - } - - Assert.assertEquals(actual.get(key), actual.get(key), String.format(MISMATCH_KEY_FORMAT, key)); - } - } - - public static void assertEquals(List actual, List expected) { - Assert.assertEquals(actual.size(), expected.size()); - for (int i = 0; i < actual.size(); i++) { - AtlasEntity actualEntity = actual.get(i); - AtlasEntity expectedEntity = expected.get(i); - - String actualJson = AtlasType.toJson(actualEntity); - String expectedJson = AtlasType.toJson(expectedEntity); - - Assert.assertEquals(actualJson, expectedJson, "Actual: " + actualJson); - } - } - } -} diff --git a/addons/hive-bridge/src/test/resources/atlas-application.properties b/addons/hive-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 5d24a3014c..0000000000 --- a/addons/hive-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,125 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties -atlas.hook.hive.hs2.ignore.ddl.operations=false \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/atlas-log4j.xml b/addons/hive-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 262a710f7a..0000000000 --- a/addons/hive-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/hive-bridge/src/test/resources/hive-site.xml b/addons/hive-bridge/src/test/resources/hive-site.xml deleted file mode 100644 index f9ec5773e8..0000000000 --- a/addons/hive-bridge/src/test/resources/hive-site.xml +++ /dev/null @@ -1,99 +0,0 @@ - - - - - - - hive.exec.submit.local.task.via.child - false - - - - mapreduce.framework.name - local - - - - fs.default.name - file:/// - - - - hive.exec.post.hooks - org.apache.atlas.hive.hook.HiveHook - - - - hive.metastore.event.listeners - org.apache.atlas.hive.hook.HiveMetastoreHookImpl - - - - hive.support.concurrency - false - - - - hive.metastore.warehouse.dir - ${project.basedir}/target/metastore - - - - javax.jdo.option.ConnectionURL - jdbc:derby:;databaseName=${project.basedir}/target/metastore_db;create=true - - - - atlas.hook.hive.synchronous - true - - - - fs.pfile.impl - org.apache.hadoop.fs.ProxyLocalFileSystem - - - - hive.in.test - true - - - - hive.zookeeper.quorum - localhost:19026 - - - - hive.metastore.schema.verification - false - - - - hive.metastore.disallow.incompatible.col.type.changes - false - - - - datanucleus.schema.autoCreateAll - true - - - - hive.exec.scratchdir - ${project.basedir}/target/scratchdir - - - \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-alter-view-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-alter-view-v2.json deleted file mode 100644 index ebe896ba9a..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-alter-view-v2.json +++ /dev/null @@ -1,35 +0,0 @@ -[ - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6_renamed_view2@cm:1616450675937", - "execTime": 1616450675937, - "queryText": "ALTER VIEW hh6_renamed_view RENAME TO hh6_renamed_view2", - "name": "ALTER VIEW hh6_renamed_view RENAME TO hh6_renamed_view2", - "userName": "hive" - }, - "guid": "-14529329955589487", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] - } - } -] diff --git a/addons/hive-bridge/src/test/resources/json/hs2-alter-view.json b/addons/hive-bridge/src/test/resources/json/hs2-alter-view.json deleted file mode 100644 index cfe77847ca..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-alter-view.json +++ /dev/null @@ -1,268 +0,0 @@ -[ - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view.col1@cm" - } - }, - "entity": { - "entity": { - "typeName": "hive_column", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed_view2.col1@cm" - }, - "guid": "-14529329955589486", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view@cm_storage" - } - }, - "entity": { - "entity": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm_storage", - "storedAsSubDirectories": false, - "location": null, - "compressed": false, - "inputFormat": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": null, - "name": null, - "parameters": {} - } - }, - "numBuckets": -1 - }, - "guid": "-14529329955589484", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view@cm" - } - }, - "entity": { - "entity": { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "temporary": false, - "lastAccessTime": 1616450675000, - "aliases": [ - "hh6_renamed_view" - ], - "qualifiedName": "cadb02.hh6_renamed_view2@cm", - "viewExpandedText": "select `hh6_renamed`.`col1` from `cadb02`.`hh6_renamed`", - "tableType": "VIRTUAL_VIEW", - "createTime": 1616450675000, - "name": "hh6_renamed_view2", - "comment": null, - "parameters": { - "transient_lastDdlTime": "1616450676", - "bucketing_version": "2", - "last_modified_time": "1616450676", - "last_modified_by": "hive" - }, - "retention": 0, - "viewOriginalText": "select * from hh6_renamed" - }, - "guid": "-14529329955589483", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_FULL_UPDATE_V2", - "user": "hive", - "entities": { - "referredEntities": { - "-14529329955589485": { - "typeName": "hive_column", - "attributes": { - "owner": "hive", - "qualifiedName": "cadb02.hh6_renamed_view2.col1@cm", - "name": "col1", - "comment": null, - "position": 0, - "type": "int" - }, - "guid": "-14529329955589485", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589483", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm" - }, - "relationshipType": "hive_table_columns" - } - }, - "proxy": false - }, - "-14529329955589484": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm_storage", - "storedAsSubDirectories": false, - "location": null, - "compressed": false, - "inputFormat": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": null, - "name": null, - "parameters": {} - } - }, - "numBuckets": -1 - }, - "guid": "-14529329955589484", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589483", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm" - }, - "relationshipType": "hive_table_storagedesc" - } - }, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "temporary": false, - "lastAccessTime": 1616450675000, - "aliases": [ - "hh6_renamed_view" - ], - "qualifiedName": "cadb02.hh6_renamed_view2@cm", - "viewExpandedText": "select `hh6_renamed`.`col1` from `cadb02`.`hh6_renamed`", - "tableType": "VIRTUAL_VIEW", - "createTime": 1616450675000, - "name": "hh6_renamed_view2", - "comment": null, - "parameters": { - "transient_lastDdlTime": "1616450676", - "bucketing_version": "2", - "last_modified_time": "1616450676", - "last_modified_by": "hive" - }, - "retention": 0, - "viewOriginalText": "select * from hh6_renamed" - }, - "guid": "-14529329955589483", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "sd": { - "guid": "-14529329955589484", - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm_storage" - }, - "relationshipType": "hive_table_storagedesc" - }, - "columns": [ - { - "guid": "-14529329955589485", - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2.col1@cm" - }, - "relationshipType": "hive_table_columns" - } - ], - "partitionKeys": [], - "db": { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_table_db" - } - }, - "proxy": false - } - ] - } - }, - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6_renamed_view2@cm:1616450675937", - "execTime": 1616450675937, - "queryText": "ALTER VIEW hh6_renamed_view RENAME TO hh6_renamed_view2", - "name": "ALTER VIEW hh6_renamed_view RENAME TO hh6_renamed_view2", - "userName": "hive" - }, - "guid": "-14529329955589487", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] - } - } -] diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-db-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-create-db-v2.json deleted file mode 100644 index 28d3b6b24e..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-db-v2.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "referredEntities": {}, - "entities": [ - { - "typeName": "hive_db_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02@cm:1616450673617", - "execTime": 1616450673617, - "queryText": "create database cadb02", - "name": "create database cadb02", - "userName": "hive" - }, - "guid": "-14529329955589449", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "db": { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_db_ddl_queries" - } - }, - "proxy": false - }, - { - "typeName": "hdfs_path", - "attributes": { - "path": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db", - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db@cm", - "clusterName": "cm", - "name": "/warehouse/tablespace/external/hive/cadb02.db" - }, - "guid": "-14529329955589450", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "hiveDb": { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_db_location" - } - }, - "proxy": false - } - ] -} diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-db.json b/addons/hive-bridge/src/test/resources/json/hs2-create-db.json deleted file mode 100644 index a5b810f729..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-db.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "referredEntities": {}, - "entities": [ - { - "typeName": "hive_db", - "attributes": { - "owner": "hive", - "ownerType": "USER", - "managedLocation": null, - "qualifiedName": "cadb02@cm", - "clusterName": "cm", - "name": "cadb02", - "location": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db", - "parameters": {} - }, - "guid": "-14529329955589448", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - }, - { - "typeName": "hive_db_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02@cm:1616450673617", - "execTime": 1616450673617, - "queryText": "create database cadb02", - "name": "create database cadb02", - "userName": "hive" - }, - "guid": "-14529329955589449", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "db": { - "guid": "-14529329955589448", - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_db_ddl_queries" - } - }, - "proxy": false - }, - { - "typeName": "hdfs_path", - "attributes": { - "path": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db", - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db@cm", - "clusterName": "cm", - "name": "/warehouse/tablespace/external/hive/cadb02.db" - }, - "guid": "-14529329955589450", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "hiveDb": { - "guid": "-14529329955589448", - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_db_location" - } - }, - "proxy": false - } - ] -} diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-process-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-create-process-v2.json deleted file mode 100644 index 9291cde24f..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-process-v2.json +++ /dev/null @@ -1,141 +0,0 @@ -{ - "referredEntities": {}, - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb202.vw202@primary:1616604468798", - "execTime": 1616604468798, - "queryText": "create table vw202 as select * from hh202", - "name": "create table vw202 as select * from hh202", - "userName": "hive" - }, - "guid": "-44808597128613", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - }, - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "create table vw202 as select * from hh202" - ], - "qualifiedName": "cadb202.vw202@primary:1616604471000", - "clusterName": "primary", - "name": "cadb202.vw202@primary:1616604471000", - "queryText": "", - "operationType": "CREATETABLE_AS_SELECT", - "startTime": 1616604475518, - "queryPlan": "Not Supported", - "endTime": 1616604475518, - "userName": "", - "queryId": "" - }, - "guid": "-44808597128614", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.hh202@primary" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "21806.local", - "qualifiedName": "cadb202.vw202@primary:1616604471000:1616604468798:1616604475518", - "name": "cadb202.vw202@primary:1616604471000:1616604468798:1616604475518", - "queryText": "create table vw202 as select * from hh202", - "startTime": 1616604468798, - "queryPlan": "Not Supported", - "endTime": 1616604475518, - "userName": "hive", - "queryId": "hive_20210324094633_2144da6f-70ac-476e-aeec-0e758cdf1fa6" - }, - "guid": "-44808597128615", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-44808597128614", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - }, - { - "typeName": "hive_column_lineage", - "attributes": { - "expression": null, - "qualifiedName": "cadb202.vw202@primary:1616604471000:col202", - "name": "cadb202.vw202@primary:1616604471000:col202", - "depenendencyType": "SIMPLE" - }, - "guid": "-44808597128616", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202.col202@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb202.hh202.col202@primary" - }, - "relationshipType": "dataset_process_inputs" - } - ], - "query": { - "guid": "-44808597128614", - "typeName": "hive_process", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary:1616604471000" - }, - "relationshipType": "hive_process_column_lineage" - } - }, - "proxy": false - } - ] -} \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-process.json b/addons/hive-bridge/src/test/resources/json/hs2-create-process.json deleted file mode 100644 index 1bbd95a995..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-process.json +++ /dev/null @@ -1,283 +0,0 @@ -{ - "referredEntities": { - "-44808597128606": { - "typeName": "hive_db", - "attributes": { - "owner": "hive", - "ownerType": "USER", - "qualifiedName": "cadb202@primary", - "clusterName": "primary", - "name": "cadb202", - "location": "file:/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/metastore/cadb202.db", - "parameters": {} - }, - "guid": "-44808597128606", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - }, - "-44808597128612": { - "typeName": "hive_column", - "attributes": { - "owner": "hive", - "qualifiedName": "cadb202.vw202.col202@primary", - "name": "col202", - "comment": null, - "position": 0, - "type": "string" - }, - "guid": "-44808597128612", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-44808597128610", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "hive_table_columns" - } - }, - "proxy": false - }, - "-44808597128611": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb202.vw202@primary_storage", - "storedAsSubDirectories": false, - "location": "file:/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/metastore/cadb202.db/vw202", - "compressed": false, - "inputFormat": "org.apache.hadoop.mapred.TextInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "name": null, - "parameters": { - "serialization.format": "1" - } - } - }, - "numBuckets": -1 - }, - "guid": "-44808597128611", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-44808597128610", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "hive_table_storagedesc" - } - }, - "proxy": false - }, - "-44808597128610": { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "tableType": "MANAGED_TABLE", - "temporary": false, - "lastAccessTime": 1616604471000, - "createTime": 1616604471000, - "qualifiedName": "cadb202.vw202@primary", - "name": "vw202", - "comment": null, - "parameters": { - "totalSize": "0", - "numRows": "0", - "rawDataSize": "0", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "numFiles": "0", - "transient_lastDdlTime": "1616604472", - "bucketing_version": "2" - }, - "retention": 0 - }, - "guid": "-44808597128610", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "sd": { - "guid": "-44808597128611", - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary_storage" - }, - "relationshipType": "hive_table_storagedesc" - }, - "columns": [ - { - "guid": "-44808597128612", - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202.col202@primary" - }, - "relationshipType": "hive_table_columns" - } - ], - "partitionKeys": [], - "db": { - "guid": "-44808597128606", - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb202@primary" - }, - "relationshipType": "hive_table_db" - } - }, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb202.vw202@primary:1616604468798", - "execTime": 1616604468798, - "queryText": "create table vw202 as select * from hh202", - "name": "create table vw202 as select * from hh202", - "userName": "hive" - }, - "guid": "-44808597128613", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-44808597128610", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - }, - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "create table vw202 as select * from hh202" - ], - "qualifiedName": "cadb202.vw202@primary:1616604471000", - "clusterName": "primary", - "name": "cadb202.vw202@primary:1616604471000", - "queryText": "", - "operationType": "CREATETABLE_AS_SELECT", - "startTime": 1616604475518, - "queryPlan": "Not Supported", - "endTime": 1616604475518, - "userName": "", - "queryId": "" - }, - "guid": "-44808597128614", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "guid": "-44808597128610", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb202.hh202@primary" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "21806.local", - "qualifiedName": "cadb202.vw202@primary:1616604471000:1616604468798:1616604475518", - "name": "cadb202.vw202@primary:1616604471000:1616604468798:1616604475518", - "queryText": "create table vw202 as select * from hh202", - "startTime": 1616604468798, - "queryPlan": "Not Supported", - "endTime": 1616604475518, - "userName": "hive", - "queryId": "hive_20210324094633_2144da6f-70ac-476e-aeec-0e758cdf1fa6" - }, - "guid": "-44808597128615", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-44808597128614", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - }, - { - "typeName": "hive_column_lineage", - "attributes": { - "expression": null, - "qualifiedName": "cadb202.vw202@primary:1616604471000:col202", - "name": "cadb202.vw202@primary:1616604471000:col202", - "depenendencyType": "SIMPLE" - }, - "guid": "-44808597128616", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "guid": "-44808597128612", - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202.col202@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb202.hh202.col202@primary" - }, - "relationshipType": "dataset_process_inputs" - } - ], - "query": { - "guid": "-44808597128614", - "typeName": "hive_process", - "uniqueAttributes": { - "qualifiedName": "cadb202.vw202@primary:1616604471000" - }, - "relationshipType": "hive_process_column_lineage" - } - }, - "proxy": false - } - ] -} \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-table-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-create-table-v2.json deleted file mode 100644 index ebf9e51fd4..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-table-v2.json +++ /dev/null @@ -1,115 +0,0 @@ -{ - "referredEntities": { - "-14529329955589455": { - "typeName": "hdfs_path", - "attributes": { - "path": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv", - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv@cm", - "clusterName": "cm", - "name": "/tmp/external/hh6.csv" - }, - "guid": "-14529329955589455", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "create external table hh6(col1 int) location '/tmp/external/hh6.csv'" - ], - "qualifiedName": "cadb02.hh6@cm:1616450674000", - "clusterName": "cm", - "name": "cadb02.hh6@cm:1616450674000", - "queryText": "", - "operationType": "CREATETABLE", - "startTime": 1616450674217, - "queryPlan": "Not Supported", - "endTime": 1616450674217, - "userName": "", - "queryId": "" - }, - "guid": "-14529329955589456", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "guid": "-14529329955589455", - "typeName": "hdfs_path", - "uniqueAttributes": { - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv@cm" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "ve0126.halxg.cloudera.com", - "qualifiedName": "cadb02.hh6@cm:1616450674000:1616450673854:1616450674217", - "name": "cadb02.hh6@cm:1616450674000:1616450673854:1616450674217", - "queryText": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "startTime": 1616450673854, - "queryPlan": "Not Supported", - "endTime": 1616450674217, - "userName": "hive", - "queryId": "hive_20210322150433_52b16d58-6a44-49e2-afe4-4bb1a5bb7484" - }, - "guid": "-14529329955589457", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-14529329955589456", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - }, - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6@cm:1616450673854", - "execTime": 1616450673854, - "queryText": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "name": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "userName": "hive" - }, - "guid": "-14529329955589458", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] -} \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-create-table.json b/addons/hive-bridge/src/test/resources/json/hs2-create-table.json deleted file mode 100644 index 4e13ed1526..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-create-table.json +++ /dev/null @@ -1,255 +0,0 @@ -{ - "referredEntities": { - "-14529329955589453": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb02.hh6@cm_storage", - "storedAsSubDirectories": false, - "location": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv", - "compressed": false, - "inputFormat": "org.apache.hadoop.mapred.TextInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "name": null, - "parameters": { - "serialization.format": "1" - } - } - }, - "numBuckets": -1 - }, - "guid": "-14529329955589453", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589452", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "hive_table_storagedesc" - } - }, - "proxy": false - }, - "-14529329955589451": { - "typeName": "hive_db", - "attributes": { - "owner": "hive", - "ownerType": "USER", - "managedLocation": null, - "qualifiedName": "cadb02@cm", - "clusterName": "cm", - "name": "cadb02", - "location": "hdfs://ve0126.halxg.cloudera.com:8020/warehouse/tablespace/external/hive/cadb02.db", - "parameters": {} - }, - "guid": "-14529329955589451", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - }, - "-14529329955589455": { - "typeName": "hdfs_path", - "attributes": { - "path": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv", - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv@cm", - "clusterName": "cm", - "name": "/tmp/external/hh6.csv" - }, - "guid": "-14529329955589455", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - }, - "-14529329955589454": { - "typeName": "hive_column", - "attributes": { - "owner": "hive", - "qualifiedName": "cadb02.hh6.col1@cm", - "name": "col1", - "comment": null, - "position": 0, - "type": "int" - }, - "guid": "-14529329955589454", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589452", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "hive_table_columns" - } - }, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "tableType": "EXTERNAL_TABLE", - "temporary": false, - "lastAccessTime": 1616450674000, - "createTime": 1616450674000, - "qualifiedName": "cadb02.hh6@cm", - "name": "hh6", - "comment": null, - "parameters": { - "totalSize": "0", - "EXTERNAL": "TRUE", - "numFiles": "0", - "transient_lastDdlTime": "1616450674", - "bucketing_version": "2", - "numFilesErasureCoded": "0" - }, - "retention": 0 - }, - "guid": "-14529329955589452", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "sd": { - "guid": "-14529329955589453", - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm_storage" - }, - "relationshipType": "hive_table_storagedesc" - }, - "columns": [ - { - "guid": "-14529329955589454", - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6.col1@cm" - }, - "relationshipType": "hive_table_columns" - } - ], - "partitionKeys": [], - "db": { - "guid": "-14529329955589451", - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_table_db" - } - }, - "proxy": false - }, - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "create external table hh6(col1 int) location '/tmp/external/hh6.csv'" - ], - "qualifiedName": "cadb02.hh6@cm:1616450674000", - "clusterName": "cm", - "name": "cadb02.hh6@cm:1616450674000", - "queryText": "", - "operationType": "CREATETABLE", - "startTime": 1616450674217, - "queryPlan": "Not Supported", - "endTime": 1616450674217, - "userName": "", - "queryId": "" - }, - "guid": "-14529329955589456", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "guid": "-14529329955589452", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "guid": "-14529329955589455", - "typeName": "hdfs_path", - "uniqueAttributes": { - "qualifiedName": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv@cm" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "ve0126.halxg.cloudera.com", - "qualifiedName": "cadb02.hh6@cm:1616450674000:1616450673854:1616450674217", - "name": "cadb02.hh6@cm:1616450674000:1616450673854:1616450674217", - "queryText": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "startTime": 1616450673854, - "queryPlan": "Not Supported", - "endTime": 1616450674217, - "userName": "hive", - "queryId": "hive_20210322150433_52b16d58-6a44-49e2-afe4-4bb1a5bb7484" - }, - "guid": "-14529329955589457", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-14529329955589456", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - }, - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6@cm:1616450673854", - "execTime": 1616450673854, - "queryText": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "name": "create external table hh6(col1 int) location '/tmp/external/hh6.csv'", - "userName": "hive" - }, - "guid": "-14529329955589458", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589452", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] -} \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-drop-db-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-drop-db-v2.json deleted file mode 100644 index 76860cea9a..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-drop-db-v2.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "entities": [ - ] -} diff --git a/addons/hive-bridge/src/test/resources/json/hs2-drop-db.json b/addons/hive-bridge/src/test/resources/json/hs2-drop-db.json deleted file mode 100644 index 429f95fb38..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-drop-db.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "entities": [ - { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - } - } - ] -} diff --git a/addons/hive-bridge/src/test/resources/json/hs2-drop-table-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-drop-table-v2.json deleted file mode 100644 index 0637a088a0..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-drop-table-v2.json +++ /dev/null @@ -1 +0,0 @@ -[] \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-drop-table.json b/addons/hive-bridge/src/test/resources/json/hs2-drop-table.json deleted file mode 100644 index ed62032493..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-drop-table.json +++ /dev/null @@ -1,50 +0,0 @@ -[ - { - "type": "ENTITY_DELETE_V2", - "user": "hive", - "entities": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm" - } - } - ] - }, - { - "type": "ENTITY_DELETE_V2", - "user": "hive", - "entities": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed_view2@cm" - } - } - ] - }, - { - "type": "ENTITY_DELETE_V2", - "user": "hive", - "entities": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.cc1@cm" - } - } - ] - }, - { - "type": "ENTITY_DELETE_V2", - "user": "hive", - "entities": [ - { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - } - } - ] - } -] diff --git a/addons/hive-bridge/src/test/resources/json/hs2-load-inpath-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-load-inpath-v2.json deleted file mode 100644 index 499a9c2e87..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-load-inpath-v2.json +++ /dev/null @@ -1,96 +0,0 @@ -[ - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "referredEntities": { - "-98504074851374": { - "typeName": "hdfs_path", - "attributes": { - "path": "file:/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj", - "qualifiedName": "file:/users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj", - "clusterName": "primary", - "name": "/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj" - }, - "guid": "-98504074851374", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "load data local inpath 'file:///users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj' into table table_nrx8uoggc0 partition(dt = '2015-01-01')" - ], - "qualifiedName": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000", - "clusterName": "primary", - "name": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000", - "queryText": "", - "operationType": "LOAD", - "startTime": 1622738659471, - "queryPlan": "Not Supported", - "endTime": 1622738659471, - "userName": "", - "queryId": "" - }, - "guid": "-98504074851381", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "default.table_nrx8uoggc0@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "guid": "-98504074851374", - "typeName": "hdfs_path", - "uniqueAttributes": { - "qualifiedName": "file:/users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "21806.local", - "qualifiedName": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000:1622738658982:1622738659471", - "name": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000:1622738658982:1622738659471", - "queryText": "load data local inpath 'file:///users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj' into table table_nrx8uoggc0 partition(dt = '2015-01-01')", - "startTime": 1622738658982, - "queryPlan": "Not Supported", - "endTime": 1622738659471, - "userName": "hive", - "queryId": "hive_20210603094308_ef789483-7de1-462b-ac74-bb0ebe7aeedf" - }, - "guid": "-98504074851382", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-98504074851381", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - } - ] - } - } -] \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-load-inpath.json b/addons/hive-bridge/src/test/resources/json/hs2-load-inpath.json deleted file mode 100644 index 499a9c2e87..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-load-inpath.json +++ /dev/null @@ -1,96 +0,0 @@ -[ - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "referredEntities": { - "-98504074851374": { - "typeName": "hdfs_path", - "attributes": { - "path": "file:/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj", - "qualifiedName": "file:/users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj", - "clusterName": "primary", - "name": "/Users/hive/Apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj" - }, - "guid": "-98504074851374", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_process", - "attributes": { - "recentQueries": [ - "load data local inpath 'file:///users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj' into table table_nrx8uoggc0 partition(dt = '2015-01-01')" - ], - "qualifiedName": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000", - "clusterName": "primary", - "name": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000", - "queryText": "", - "operationType": "LOAD", - "startTime": 1622738659471, - "queryPlan": "Not Supported", - "endTime": 1622738659471, - "userName": "", - "queryId": "" - }, - "guid": "-98504074851381", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "outputs": [ - { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "default.table_nrx8uoggc0@primary" - }, - "relationshipType": "process_dataset_outputs" - } - ], - "inputs": [ - { - "guid": "-98504074851374", - "typeName": "hdfs_path", - "uniqueAttributes": { - "qualifiedName": "file:/users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj" - }, - "relationshipType": "dataset_process_inputs" - } - ] - }, - "proxy": false - }, - { - "typeName": "hive_process_execution", - "attributes": { - "hostName": "21806.local", - "qualifiedName": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000:1622738658982:1622738659471", - "name": "LOAD->:default.table_nrx8uoggc0@primary:1622738598000:1622738658982:1622738659471", - "queryText": "load data local inpath 'file:///users/hive/apache/atlas-wip/addons/hive-bridge/target/load-data-thi5jt1lgj' into table table_nrx8uoggc0 partition(dt = '2015-01-01')", - "startTime": 1622738658982, - "queryPlan": "Not Supported", - "endTime": 1622738659471, - "userName": "hive", - "queryId": "hive_20210603094308_ef789483-7de1-462b-ac74-bb0ebe7aeedf" - }, - "guid": "-98504074851382", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "process": { - "guid": "-98504074851381", - "typeName": "hive_process", - "relationshipType": "hive_process_process_executions" - } - }, - "proxy": false - } - ] - } - } -] \ No newline at end of file diff --git a/addons/hive-bridge/src/test/resources/json/hs2-table-rename-v2.json b/addons/hive-bridge/src/test/resources/json/hs2-table-rename-v2.json deleted file mode 100644 index f133e7ffbf..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-table-rename-v2.json +++ /dev/null @@ -1,35 +0,0 @@ -[ - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6_renamed@cm:1616450674247", - "execTime": 1616450674247, - "queryText": "ALTER TABLE hh6 RENAME TO hh6_renamed", - "name": "ALTER TABLE hh6 RENAME TO hh6_renamed", - "userName": "hive" - }, - "guid": "-14529329955589467", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] - } - } -] diff --git a/addons/hive-bridge/src/test/resources/json/hs2-table-rename.json b/addons/hive-bridge/src/test/resources/json/hs2-table-rename.json deleted file mode 100644 index 6b8094dc83..0000000000 --- a/addons/hive-bridge/src/test/resources/json/hs2-table-rename.json +++ /dev/null @@ -1,276 +0,0 @@ -[ - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6.col1@cm" - } - }, - "entity": { - "entity": { - "typeName": "hive_column", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed.col1@cm" - }, - "guid": "-14529329955589466", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm_storage" - } - }, - "entity": { - "entity": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed@cm_storage", - "storedAsSubDirectories": false, - "location": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv", - "compressed": false, - "inputFormat": "org.apache.hadoop.mapred.TextInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "name": null, - "parameters": { - "serialization.format": "1" - } - } - }, - "numBuckets": -1 - }, - "guid": "-14529329955589464", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_PARTIAL_UPDATE_V2", - "user": "hive", - "entityId": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6@cm" - } - }, - "entity": { - "entity": { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "tableType": "EXTERNAL_TABLE", - "temporary": false, - "lastAccessTime": 1616450674000, - "aliases": [ - "hh6" - ], - "createTime": 1616450674000, - "qualifiedName": "cadb02.hh6_renamed@cm", - "name": "hh6_renamed", - "comment": null, - "parameters": { - "last_modified_time": "1616450674", - "totalSize": "0", - "EXTERNAL": "TRUE", - "numFiles": "0", - "transient_lastDdlTime": "1616450674", - "bucketing_version": "2", - "last_modified_by": "hive", - "numFilesErasureCoded": "0" - }, - "retention": 0 - }, - "guid": "-14529329955589463", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "proxy": false - } - } - }, - { - "type": "ENTITY_FULL_UPDATE_V2", - "user": "hive", - "entities": { - "referredEntities": { - "-14529329955589464": { - "typeName": "hive_storagedesc", - "attributes": { - "qualifiedName": "cadb02.hh6_renamed@cm_storage", - "storedAsSubDirectories": false, - "location": "hdfs://ve0126.halxg.cloudera.com:8020/tmp/external/hh6.csv", - "compressed": false, - "inputFormat": "org.apache.hadoop.mapred.TextInputFormat", - "parameters": {}, - "outputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", - "serdeInfo": { - "typeName": "hive_serde", - "attributes": { - "serializationLib": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "name": null, - "parameters": { - "serialization.format": "1" - } - } - }, - "numBuckets": -1 - }, - "guid": "-14529329955589464", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589463", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm" - }, - "relationshipType": "hive_table_storagedesc" - } - }, - "proxy": false - }, - "-14529329955589465": { - "typeName": "hive_column", - "attributes": { - "owner": "hive", - "qualifiedName": "cadb02.hh6_renamed.col1@cm", - "name": "col1", - "comment": null, - "position": 0, - "type": "int" - }, - "guid": "-14529329955589465", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "guid": "-14529329955589463", - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm" - }, - "relationshipType": "hive_table_columns" - } - }, - "proxy": false - } - }, - "entities": [ - { - "typeName": "hive_table", - "attributes": { - "owner": "hive", - "tableType": "EXTERNAL_TABLE", - "temporary": false, - "lastAccessTime": 1616450674000, - "aliases": [ - "hh6" - ], - "createTime": 1616450674000, - "qualifiedName": "cadb02.hh6_renamed@cm", - "name": "hh6_renamed", - "comment": null, - "parameters": { - "last_modified_time": "1616450674", - "totalSize": "0", - "EXTERNAL": "TRUE", - "numFiles": "0", - "transient_lastDdlTime": "1616450674", - "bucketing_version": "2", - "last_modified_by": "hive", - "numFilesErasureCoded": "0" - }, - "retention": 0 - }, - "guid": "-14529329955589463", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "sd": { - "guid": "-14529329955589464", - "typeName": "hive_storagedesc", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm_storage" - }, - "relationshipType": "hive_table_storagedesc" - }, - "columns": [ - { - "guid": "-14529329955589465", - "typeName": "hive_column", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed.col1@cm" - }, - "relationshipType": "hive_table_columns" - } - ], - "partitionKeys": [], - "db": { - "typeName": "hive_db", - "uniqueAttributes": { - "qualifiedName": "cadb02@cm" - }, - "relationshipType": "hive_table_db" - } - }, - "proxy": false - } - ] - } - }, - { - "type": "ENTITY_CREATE_V2", - "user": "hive", - "entities": { - "entities": [ - { - "typeName": "hive_table_ddl", - "attributes": { - "serviceType": "hive", - "qualifiedName": "cadb02.hh6_renamed@cm:1616450674247", - "execTime": 1616450674247, - "queryText": "ALTER TABLE hh6 RENAME TO hh6_renamed", - "name": "ALTER TABLE hh6 RENAME TO hh6_renamed", - "userName": "hive" - }, - "guid": "-14529329955589467", - "isIncomplete": false, - "provenanceType": 0, - "version": 0, - "relationshipAttributes": { - "table": { - "typeName": "hive_table", - "uniqueAttributes": { - "qualifiedName": "cadb02.hh6_renamed@cm" - }, - "relationshipType": "hive_table_ddl_queries" - } - }, - "proxy": false - } - ] - } - } -] diff --git a/addons/hive-bridge/src/test/resources/users-credentials.properties b/addons/hive-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/hive-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/impala-bridge-shim/pom.xml b/addons/impala-bridge-shim/pom.xml deleted file mode 100644 index 079eb85006..0000000000 --- a/addons/impala-bridge-shim/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - impala-bridge-shim - Apache Atlas Impala Bridge Shim Module - Apache Atlas Impala Bridge Shim - jar - - - - org.apache.atlas - atlas-plugin-classloader - - - org.apache.atlas - impala-hook-api - - - \ No newline at end of file diff --git a/addons/impala-bridge-shim/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java b/addons/impala-bridge-shim/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java deleted file mode 100644 index 34e6dcf4ed..0000000000 --- a/addons/impala-bridge-shim/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.impala.hooks.QueryCompleteContext; -import org.apache.impala.hooks.QueryEventHook; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is used to convert lineage records from Impala to lineage notifications and - * send them to Atlas. - */ -public class ImpalaLineageHook implements QueryEventHook { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaLineageHook.class); - - private static final String ATLAS_PLUGIN_TYPE_IMPALA = "impala"; - private static final String ATLAS_IMPALA_LINEAGE_HOOK_IMPL_CLASSNAME = - "org.apache.atlas.impala.hook.ImpalaHook"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private QueryEventHook impalaLineageHookImpl; - - public ImpalaLineageHook() { - } - - /** - * Execute Impala hook - */ - public void onQueryComplete(QueryCompleteContext context) { - LOG.debug("==> ImpalaLineageHook.onQueryComplete()"); - - try { - activatePluginClassLoader(); - impalaLineageHookImpl.onQueryComplete(context); - } catch (Exception ex) { - String errorMessage = String.format("Error in processing impala lineage: {}", context.getLineageGraph()); - LOG.error(errorMessage, ex); - } finally { - deactivatePluginClassLoader(); - } - - LOG.debug("<== ImpalaLineageHook.onQueryComplete()"); - } - - /** - * Initialization of Impala hook - */ - public void onImpalaStartup() { - LOG.debug("==> ImpalaLineageHook.onImpalaStartup()"); - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE_IMPALA, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = (Class) Class - .forName(ATLAS_IMPALA_LINEAGE_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - impalaLineageHookImpl = cls.newInstance(); - impalaLineageHookImpl.onImpalaStartup(); - } catch (Exception excp) { - LOG.error("Error instantiating Atlas hook implementation for Impala lineage", excp); - } finally { - deactivatePluginClassLoader(); - } - - LOG.debug("<== ImpalaLineageHook.onImpalaStartup()"); - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} \ No newline at end of file diff --git a/addons/impala-bridge/pom.xml b/addons/impala-bridge/pom.xml deleted file mode 100644 index 186251abe2..0000000000 --- a/addons/impala-bridge/pom.xml +++ /dev/null @@ -1,549 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - impala-bridge - Apache Atlas Impala Bridge Module - Apache Atlas Impala Bridge - jar - - - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - - - org.apache.atlas - atlas-notification - - - - org.apache.atlas - impala-hook-api - - - - - org.apache.atlas - atlas-client-v2 - ${project.version} - test - - - org.apache.logging.log4j - log4j-core - ${log4j2.version} - test - - - org.apache.logging.log4j - log4j-api - ${log4j2.version} - test - - - - com.sun.jersey - jersey-server - ${jersey.version} - test - - - - org.apache.atlas - hive-bridge - test - - - - org.apache.atlas - atlas-webapp - war - test - - - - org.apache.hadoop - hadoop-client - - - javax.servlet - servlet-api - - - org.eclipse.jetty - * - - - - - - org.apache.hadoop - hadoop-annotations - - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - - commons-io - commons-io - ${commons-io.version} - - - - commons-cli - commons-cli - ${commons-cli.version} - - - - commons-lang - commons-lang - ${commons-lang.version} - - - - org.testng - testng - - - - org.mockito - mockito-all - - - - org.eclipse.jetty - jetty-server - test - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - org.apache.atlas - atlas-intg - tests - test - - - - org.apache.atlas - atlas-repository - tests - test - - - - org.apache.hive - hive-exec - ${hive.version} - test - - - javax.servlet - * - - - - - - org.apache.hive - hive-jdbc - ${hive.version} - test - - - javax.servlet - * - - - javax.ws.rs - * - - - org.eclipse.jetty - * - - - - - - org.apache.hive - hive-cli - ${hive.version} - test - - - javax.servlet - * - - - org.eclipse.jetty.aggregate - * - - - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/impala/atlas-impala-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey.contribs - jersey-multipart - ${jersey.version} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - commons-configuration - commons-configuration - ${commons-conf.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/impala - false - false - true - - - ${project.groupId} - impala-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - ${debug.jetty.daemon} - ${project.build.testOutputDirectory} - true - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/0010-base_model.json - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - - diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/ImpalaLineageTool.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/ImpalaLineageTool.java deleted file mode 100644 index 6e6d6f1eed..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/ImpalaLineageTool.java +++ /dev/null @@ -1,216 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala; - -import org.apache.atlas.impala.hook.ImpalaLineageHook; - -import java.io.*; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOCase; -import org.apache.commons.io.comparator.LastModifiedFileComparator; -import org.apache.commons.io.filefilter.PrefixFileFilter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Entry point of actual implementation of Impala lineage tool. It reads the lineage records in - * lineage log. It then calls instance of ImpalaLineageHook to convert lineage records to - * lineage notifications and send them to Atlas. - */ -public class ImpalaLineageTool { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaLineageTool.class); - private static final String WAL_FILE_EXTENSION = ".wal"; - private static final String WAL_FILE_PREFIX = "WAL"; - private String directoryName; - private String prefix; - - public ImpalaLineageTool(String[] args) { - try { - Options options = new Options(); - options.addOption("d", "directory", true, "the lineage files' folder"); - options.addOption("p", "prefix", true, "the prefix of the lineage files"); - - CommandLine cmd = new DefaultParser().parse(options, args); - directoryName = cmd.getOptionValue("d"); - prefix = cmd.getOptionValue("p"); - } catch(ParseException e) { - LOG.warn("Failed to parse command arguments. Error: ", e.getMessage()); - printUsage(); - - throw new RuntimeException(e); - } - } - - public void run() { - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - File[] currentFiles = getCurrentFiles(); - int fileNum = currentFiles.length; - - for(int i = 0; i < fileNum; i++) { - String filename = currentFiles[i].getAbsolutePath(); - String walFilename = directoryName + WAL_FILE_PREFIX + currentFiles[i].getName() + WAL_FILE_EXTENSION; - - LOG.info("Importing: {}", filename); - importHImpalaEntities(impalaLineageHook, filename, walFilename); - - if(i != fileNum - 1) { - deleteLineageAndWal(currentFiles[i], walFilename); - } - } - LOG.info("Impala bridge processing: Done! "); - } - - public static void main(String[] args) { - if (args != null && args.length != 4) { - // The lineage file location and prefix should be input as the parameters - System.out.println("Impala bridge: wrong number of arguments. Please try again"); - printUsage(); - return; - } - - ImpalaLineageTool instance = new ImpalaLineageTool(args); - instance.run(); - } - - /** - * Delete the used lineage file and wal file - * @param currentFile The current file - * @param wal The wal file - */ - public static void deleteLineageAndWal(File currentFile, String wal) { - if(currentFile.exists() && currentFile.delete()) { - LOG.info("Lineage file {} is deleted successfully", currentFile.getPath()); - } else { - LOG.info("Failed to delete the lineage file {}", currentFile.getPath()); - } - - File file = new File(wal); - - if(file.exists() && file.delete()) { - LOG.info("Wal file {} deleted successfully", wal); - } else { - LOG.info("Failed to delete the wal file {}", wal); - } - } - - private static void printUsage() { - System.out.println(); - System.out.println(); - System.out.println("Usage: import-impala.sh [-d ] [-p ]" ); - System.out.println(" Imports specified lineage files by given directory and file prefix."); - System.out.println(); - } - - /** - * This function figures out the right lineage file path+name to process sorted by the last - * time they are modified. (old -> new) - * @return get the lineage files from given directory with given prefix. - */ - public File[] getCurrentFiles() { - try { - LOG.info("Scanning: " + directoryName); - File folder = new File(directoryName); - File[] listOfFiles = folder.listFiles((FileFilter) new PrefixFileFilter(prefix, IOCase.SENSITIVE)); - - if ((listOfFiles == null) || (listOfFiles.length == 0)) { - LOG.info("Found no lineage files."); - return new File[0]; - } - - if(listOfFiles.length > 1) { - Arrays.sort(listOfFiles, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR); - } - - LOG.info("Found {} lineage files" + listOfFiles.length); - return listOfFiles; - } catch(Exception e) { - LOG.error("Import lineage file failed.", e); - } - return new File[0]; - } - - private boolean processImpalaLineageHook(ImpalaLineageHook impalaLineageHook, List lineageList) { - boolean allSucceed = true; - - // returns true if successfully sent to Atlas - for (String lineageRecord : lineageList) { - try { - impalaLineageHook.process(lineageRecord); - } catch (Exception ex) { - String errorMessage = String.format("Exception at query {} \n", lineageRecord); - LOG.error(errorMessage, ex); - - allSucceed = false; - } - } - - return allSucceed; - } - - /** - * Create a list of lineage queries based on the lineage file and the wal file - * @param name - * @param walfile - * @return - */ - public void importHImpalaEntities(ImpalaLineageHook impalaLineageHook, String name, String walfile) { - List lineageList = new ArrayList<>(); - - try { - File lineageFile = new File(name); //use current file length to minus the offset - File walFile = new File(walfile); - // if the wal file does not exist, create one with 0 byte read, else, read the number - if(!walFile.exists()) { - BufferedWriter writer = new BufferedWriter(new FileWriter(walfile)); - writer.write("0, " + name); - writer.close(); - } - - LOG.debug("Reading: " + name); - String lineageRecord = FileUtils.readFileToString(lineageFile, "UTF-8"); - - lineageList.add(lineageRecord); - - // call instance of ImpalaLineageHook to process the list of Impala lineage record - if(processImpalaLineageHook(impalaLineageHook, lineageList)) { - // write how many bytes the current file is to the wal file - FileWriter newWalFile = new FileWriter(walfile, true); - BufferedWriter newWalFileBuf = new BufferedWriter(newWalFile); - newWalFileBuf.newLine(); - newWalFileBuf.write(String.valueOf(lineageFile.length()) + "," + name); - - newWalFileBuf.close(); - newWalFile.close(); - } else { - LOG.error("Error sending some of impala lineage records to ImpalaHook"); - } - } catch (Exception e) { - LOG.error("Error in processing lineage records. Exception: " + e.getMessage()); - } - } - -} \ No newline at end of file diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/AtlasImpalaHookContext.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/AtlasImpalaHookContext.java deleted file mode 100644 index 51b2f832e7..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/AtlasImpalaHookContext.java +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import org.apache.atlas.impala.model.ImpalaOperationType; -import org.apache.atlas.impala.model.ImpalaQuery; -import org.apache.atlas.impala.model.LineageVertex; -import org.apache.atlas.impala.model.LineageVertexMetadata; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.commons.lang.StringUtils; - - -/** - * Contain the info related to an linear record from Impala - */ -public class AtlasImpalaHookContext { - public static final char QNAME_SEP_METADATA_NAMESPACE = '@'; - public static final char QNAME_SEP_ENTITY_NAME = '.'; - public static final char QNAME_SEP_PROCESS = ':'; - - private final ImpalaLineageHook hook; - private final ImpalaOperationType impalaOperation; - private final ImpalaQuery lineageQuery; - private final Map qNameEntityMap = new HashMap<>(); - - public AtlasImpalaHookContext(ImpalaLineageHook hook, ImpalaOperationType operationType, - ImpalaQuery lineageQuery) throws Exception { - this.hook = hook; - this.impalaOperation = operationType; - this.lineageQuery = lineageQuery; - - } - - public ImpalaQuery getLineageQuery() { - return lineageQuery; - } - public String getQueryStr() { return lineageQuery.getQueryText(); } - - public ImpalaOperationType getImpalaOperationType() { - return impalaOperation; - } - - public void putEntity(String qualifiedName, AtlasEntity entity) { - qNameEntityMap.put(qualifiedName, entity); - } - - public AtlasEntity getEntity(String qualifiedName) { - return qNameEntityMap.get(qualifiedName); - } - - public Collection getEntities() { return qNameEntityMap.values(); } - - public String getMetadataNamespace() { - return hook.getMetadataNamespace(); - } - - public String getHostName() { - return hook.getHostName(); - } - - public boolean isConvertHdfsPathToLowerCase() { - return hook.isConvertHdfsPathToLowerCase(); - } - - public String getQualifiedNameForDb(String dbName) { - return (dbName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + getMetadataNamespace(); - } - - public String getQualifiedNameForTable(String fullTableName) throws IllegalArgumentException { - if (fullTableName == null) { - throw new IllegalArgumentException("fullTableName is null"); - } - - int sepPos = fullTableName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - - if (!isSeparatorIndexValid(sepPos)) { - throw new IllegalArgumentException(fullTableName + " does not contain database name"); - } - - return getQualifiedNameForTable(fullTableName.substring(0, sepPos), fullTableName.substring(sepPos+1)); - } - - public String getQualifiedNameForTable(String dbName, String tableName) { - return (dbName + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + getMetadataNamespace(); - } - - public String getQualifiedNameForColumn(LineageVertex vertex) { - // get database name and table name - LineageVertexMetadata metadata = vertex.getMetadata(); - - if (metadata == null) { - return getQualifiedNameForColumn(vertex.getVertexId()); - } - - String fullTableName = metadata.getTableName(); - - if (StringUtils.isEmpty(fullTableName)) { - throw new IllegalArgumentException("fullTableName in column metadata is null"); - } - - int sepPos = fullTableName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - - if (!isSeparatorIndexValid(sepPos)) { - throw new IllegalArgumentException(fullTableName + "in column metadata does not contain database name"); - } - - // get pure column name - String columnName = vertex.getVertexId(); - if (StringUtils.isEmpty(columnName)) { - throw new IllegalArgumentException("column name in vertexId is null"); - } - - int sepPosLast = columnName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - if (isSeparatorIndexValid(sepPosLast)) { - columnName = columnName.substring(sepPosLast+1); - } - - return getQualifiedNameForColumn( - fullTableName.substring(0, sepPos), - fullTableName.substring(sepPos+1), - columnName); - } - - public String getQualifiedNameForColumn(String fullColumnName) throws IllegalArgumentException { - if (fullColumnName == null) { - throw new IllegalArgumentException("fullColumnName is null"); - } - - int sepPosFirst = fullColumnName.indexOf(QNAME_SEP_ENTITY_NAME); - int sepPosLast = fullColumnName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - - if (!isSeparatorIndexValid(sepPosFirst) || !isSeparatorIndexValid(sepPosLast) || - sepPosFirst == sepPosLast) { - throw new IllegalArgumentException( - String.format("fullColumnName {} does not contain database name or table name", - fullColumnName)); - } - - return getQualifiedNameForColumn( - fullColumnName.substring(0, sepPosFirst), - fullColumnName.substring(sepPosFirst+1, sepPosLast), - fullColumnName.substring(sepPosLast+1)); - } - - public String getColumnNameOnly(String fullColumnName) throws IllegalArgumentException { - if (fullColumnName == null) { - throw new IllegalArgumentException("fullColumnName is null"); - } - - int sepPosLast = fullColumnName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - - if (!isSeparatorIndexValid(sepPosLast)) { - return fullColumnName; - } - - return fullColumnName.substring(sepPosLast+1); - } - - public String getQualifiedNameForColumn(String dbName, String tableName, String columnName) { - return - (dbName + QNAME_SEP_ENTITY_NAME + tableName + QNAME_SEP_ENTITY_NAME + - columnName + QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + getMetadataNamespace(); - } - - public String getUserName() { return lineageQuery.getUser(); } - - public String getDatabaseNameFromTable(String fullTableName) { - int sepPos = fullTableName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - if (isSeparatorIndexValid(sepPos)) { - return fullTableName.substring(0, sepPos); - } - - return null; - } - - public String getTableNameFromColumn(String columnName) { - int sepPos = columnName.lastIndexOf(QNAME_SEP_ENTITY_NAME); - if (!isSeparatorIndexValid(sepPos)) { - return null; - } - - String tableName = columnName.substring(0, sepPos); - if (!ImpalaIdentifierParser.isTableNameValid(tableName)) { - return null; - } - - return tableName; - } - - public boolean isSeparatorIndexValid(int index) { - return index > 0; - } - -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaHook.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaHook.java deleted file mode 100644 index fc047629a6..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaHook.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import org.apache.impala.hooks.QueryCompleteContext; -import org.apache.impala.hooks.QueryEventHook; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ImpalaHook implements QueryEventHook { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaHook.class); - - private ImpalaLineageHook lineageHook; - - /** - * Execute Impala hook - */ - public void onQueryComplete(QueryCompleteContext context) { - try { - lineageHook.process(context.getLineageGraph()); - } catch (Exception ex) { - String errorMessage = String.format("Error in processing impala lineage: {}", context.getLineageGraph()); - LOG.error(errorMessage, ex); - } - } - - /** - * Initialization of Impala hook - */ - public void onImpalaStartup() { - lineageHook = new ImpalaLineageHook(); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaIdentifierParser.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaIdentifierParser.java deleted file mode 100644 index 33e44f729e..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaIdentifierParser.java +++ /dev/null @@ -1,389 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import java.util.Arrays; -import java.util.HashSet; - -import java.util.Set; -import org.apache.commons.lang.StringUtils; - -/** - * Check if a string is a valid Impala table identifier. - * It could be . or - */ -public class ImpalaIdentifierParser { - // http://www.cloudera.com/content/www/en-us/documentation/enterprise/latest/topics/impala_identifiers.html - // https://github.com/apache/impala/blob/64e6719870db5602a6fa85014bc6c264080b9414/tests/common/patterns.py - // VALID_IMPALA_IDENTIFIER_REGEX = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]{,127}$') - // add "." to allow . - public static final String VALID_IMPALA_IDENTIFIER_REGEX = "^[a-zA-Z][a-zA-Z0-9_.]{0,127}$"; - - public static boolean isTableNameValid(String inTableName) { - if (StringUtils.isEmpty(inTableName)) { - return false; - } - - if (!inTableName.matches(VALID_IMPALA_IDENTIFIER_REGEX)) { - return false; - } - - String[] tokens = inTableName.split("."); - if (tokens.length > 2) { - // valid value should be . or - return false; - } - - for (String token : tokens) { - if (isReserved(token)) { - return false; - } - } - - return true; - } - - // The following is extracted from Impala code. - // Mainly from https://github.com/apache/impala/blob/master/fe/src/main/jflex/sql-scanner.flex - // Map from keyword string to token id. - // We use a linked hash map because the insertion order is important. - // for example, we want "and" to come after "&&" to make sure error reporting - // uses "and" as a display name and not "&&". - // Please keep the puts sorted alphabetically by keyword (where the order - // does not affect the desired error reporting) - static HashSet keywordMap; - // map from token id to token description - static HashSet tokenIdMap; - // Reserved words are words that cannot be used as identifiers. It is a superset of - // keywords. - static Set reservedWords; - - - public static void init() { - // initilize keywords - keywordMap = new HashSet<>(); - keywordMap.add("&&"); - keywordMap.add("add"); - keywordMap.add("aggregate"); - keywordMap.add("all"); - keywordMap.add("alter"); - keywordMap.add("analytic"); - keywordMap.add("and"); - keywordMap.add("anti"); - keywordMap.add("api_version"); - keywordMap.add("array"); - keywordMap.add("as"); - keywordMap.add("asc"); - keywordMap.add("authorization"); - keywordMap.add("avro"); - keywordMap.add("between"); - keywordMap.add("bigint"); - keywordMap.add("binary"); - keywordMap.add("block_size"); - keywordMap.add("boolean"); - keywordMap.add("by"); - keywordMap.add("cached"); - keywordMap.add("case"); - keywordMap.add("cascade"); - keywordMap.add("cast"); - keywordMap.add("change"); - keywordMap.add("char"); - keywordMap.add("class"); - keywordMap.add("close_fn"); - keywordMap.add("column"); - keywordMap.add("columns"); - keywordMap.add("comment"); - keywordMap.add("compression"); - keywordMap.add("compute"); - keywordMap.add("copy"); - keywordMap.add("create"); - keywordMap.add("cross"); - keywordMap.add("current"); - keywordMap.add("data"); - keywordMap.add("database"); - keywordMap.add("databases"); - keywordMap.add("date"); - keywordMap.add("datetime"); - keywordMap.add("decimal"); - //keywordMap.add("default"); "default" can be database or table name - keywordMap.add("delete"); - keywordMap.add("delimited"); - keywordMap.add("desc"); - keywordMap.add("describe"); - keywordMap.add("distinct"); - keywordMap.add("div"); - keywordMap.add("double"); - keywordMap.add("drop"); - keywordMap.add("else"); - keywordMap.add("encoding"); - keywordMap.add("end"); - keywordMap.add("escaped"); - keywordMap.add("exists"); - keywordMap.add("explain"); - keywordMap.add("extended"); - keywordMap.add("external"); - keywordMap.add("false"); - keywordMap.add("fields"); - keywordMap.add("fileformat"); - keywordMap.add("files"); - keywordMap.add("finalize_fn"); - keywordMap.add("first"); - keywordMap.add("float"); - keywordMap.add("following"); - keywordMap.add("for"); - keywordMap.add("format"); - keywordMap.add("formatted"); - keywordMap.add("from"); - keywordMap.add("full"); - keywordMap.add("function"); - keywordMap.add("functions"); - keywordMap.add("grant"); - keywordMap.add("group"); - keywordMap.add("hash"); - keywordMap.add("having"); - keywordMap.add("if"); - keywordMap.add("ilike"); - keywordMap.add("ignore"); - keywordMap.add("in"); - keywordMap.add("incremental"); - keywordMap.add("init_fn"); - keywordMap.add("inner"); - keywordMap.add("inpath"); - keywordMap.add("insert"); - keywordMap.add("int"); - keywordMap.add("integer"); - keywordMap.add("intermediate"); - keywordMap.add("interval"); - keywordMap.add("into"); - keywordMap.add("invalidate"); - keywordMap.add("iregexp"); - keywordMap.add("is"); - keywordMap.add("join"); - keywordMap.add("kudu"); - keywordMap.add("last"); - keywordMap.add("left"); - keywordMap.add("like"); - keywordMap.add("limit"); - keywordMap.add("lines"); - keywordMap.add("load"); - keywordMap.add("location"); - keywordMap.add("map"); - keywordMap.add("merge_fn"); - keywordMap.add("metadata"); - keywordMap.add("not"); - keywordMap.add("null"); - keywordMap.add("nulls"); - keywordMap.add("offset"); - keywordMap.add("on"); - keywordMap.add("||"); - keywordMap.add("or"); - keywordMap.add("orc"); - keywordMap.add("order"); - keywordMap.add("outer"); - keywordMap.add("over"); - keywordMap.add("overwrite"); - keywordMap.add("parquet"); - keywordMap.add("parquetfile"); - keywordMap.add("partition"); - keywordMap.add("partitioned"); - keywordMap.add("partitions"); - keywordMap.add("preceding"); - keywordMap.add("prepare_fn"); - keywordMap.add("primary"); - keywordMap.add("produced"); - keywordMap.add("purge"); - keywordMap.add("range"); - keywordMap.add("rcfile"); - keywordMap.add("real"); - keywordMap.add("recover"); - keywordMap.add("refresh"); - keywordMap.add("regexp"); - keywordMap.add("rename"); - keywordMap.add("repeatable"); - keywordMap.add("replace"); - keywordMap.add("replication"); - keywordMap.add("restrict"); - keywordMap.add("returns"); - keywordMap.add("revoke"); - keywordMap.add("right"); - keywordMap.add("rlike"); - keywordMap.add("role"); - keywordMap.add("roles"); - keywordMap.add("row"); - keywordMap.add("rows"); - keywordMap.add("schema"); - keywordMap.add("schemas"); - keywordMap.add("select"); - keywordMap.add("semi"); - keywordMap.add("sequencefile"); - keywordMap.add("serdeproperties"); - keywordMap.add("serialize_fn"); - keywordMap.add("set"); - keywordMap.add("show"); - keywordMap.add("smallint"); - keywordMap.add("sort"); - keywordMap.add("stats"); - keywordMap.add("stored"); - keywordMap.add("straight_join"); - keywordMap.add("string"); - keywordMap.add("struct"); - keywordMap.add("symbol"); - keywordMap.add("table"); - keywordMap.add("tables"); - keywordMap.add("tablesample"); - keywordMap.add("tblproperties"); - keywordMap.add("terminated"); - keywordMap.add("textfile"); - keywordMap.add("then"); - keywordMap.add("timestamp"); - keywordMap.add("tinyint"); - keywordMap.add("to"); - keywordMap.add("true"); - keywordMap.add("truncate"); - keywordMap.add("unbounded"); - keywordMap.add("uncached"); - keywordMap.add("union"); - keywordMap.add("unknown"); - keywordMap.add("update"); - keywordMap.add("update_fn"); - keywordMap.add("upsert"); - keywordMap.add("use"); - keywordMap.add("using"); - keywordMap.add("values"); - keywordMap.add("varchar"); - keywordMap.add("view"); - keywordMap.add("when"); - keywordMap.add("where"); - keywordMap.add("with"); - - // Initilize tokenIdMap for error reporting - tokenIdMap = new HashSet<>(keywordMap); - - // add non-keyword tokens. Please keep this in the same order as they are used in this - // file. - tokenIdMap.add("EOF"); - tokenIdMap.add("..."); - tokenIdMap.add(":"); - tokenIdMap.add(";"); - tokenIdMap.add("COMMA"); - tokenIdMap.add("."); - tokenIdMap.add("*"); - tokenIdMap.add("("); - tokenIdMap.add(")"); - tokenIdMap.add("["); - tokenIdMap.add("]"); - tokenIdMap.add("/"); - tokenIdMap.add("%"); - tokenIdMap.add("+"); - tokenIdMap.add("-"); - tokenIdMap.add("&"); - tokenIdMap.add("|"); - tokenIdMap.add("^"); - tokenIdMap.add("~"); - tokenIdMap.add("="); - tokenIdMap.add("!"); - tokenIdMap.add("<"); - tokenIdMap.add(">"); - tokenIdMap.add("UNMATCHED STRING LITERAL"); - tokenIdMap.add("!="); - tokenIdMap.add("INTEGER LITERAL"); - tokenIdMap.add("NUMERIC OVERFLOW"); - tokenIdMap.add("DECIMAL LITERAL"); - tokenIdMap.add("EMPTY IDENTIFIER"); - tokenIdMap.add("IDENTIFIER"); - tokenIdMap.add("STRING LITERAL"); - tokenIdMap.add("COMMENTED_PLAN_HINT_START"); - tokenIdMap.add("COMMENTED_PLAN_HINT_END"); - tokenIdMap.add("Unexpected character"); - - - // For impala 3.0, reserved words = keywords + sql16ReservedWords - builtinFunctions - // - whitelist - // unused reserved words = reserved words - keywords. These words are reserved for - // forward compatibility purposes. - reservedWords = new HashSet<>(keywordMap); - // Add SQL:2016 reserved words - reservedWords.addAll(Arrays.asList(new String[] { - "abs", "acos", "allocate", "any", "are", "array_agg", "array_max_cardinality", - "asensitive", "asin", "asymmetric", "at", "atan", "atomic", "avg", "begin", - "begin_frame", "begin_partition", "blob", "both", "call", "called", "cardinality", - "cascaded", "ceil", "ceiling", "char_length", "character", "character_length", - "check", "classifier", "clob", "close", "coalesce", "collate", "collect", - "commit", "condition", "connect", "constraint", "contains", "convert", "copy", - "corr", "corresponding", "cos", "cosh", "count", "covar_pop", "covar_samp", - "cube", "cume_dist", "current_catalog", "current_date", - "current_default_transform_group", "current_path", "current_path", "current_role", - "current_role", "current_row", "current_schema", "current_time", - "current_timestamp", "current_transform_group_for_type", "current_user", "cursor", - "cycle", "day", "deallocate", "dec", "decfloat", "declare", "define", - "dense_rank", "deref", "deterministic", "disconnect", "dynamic", "each", - "element", "empty", "end-exec", "end_frame", "end_partition", "equals", "escape", - "every", "except", "exec", "execute", "exp", "extract", "fetch", "filter", - "first_value", "floor", "foreign", "frame_row", "free", "fusion", "get", "global", - "grouping", "groups", "hold", "hour", "identity", "indicator", "initial", "inout", - "insensitive", "integer", "intersect", "intersection", "json_array", - "json_arrayagg", "json_exists", "json_object", "json_objectagg", "json_query", - "json_table", "json_table_primitive", "json_value", "lag", "language", "large", - "last_value", "lateral", "lead", "leading", "like_regex", "listagg", "ln", - "local", "localtime", "localtimestamp", "log", "log10 ", "lower", "match", - "match_number", "match_recognize", "matches", "max", "member", "merge", "method", - "min", "minute", "mod", "modifies", "module", "month", "multiset", "national", - "natural", "nchar", "nclob", "new", "no", "none", "normalize", "nth_value", - "ntile", "nullif", "numeric", "occurrences_regex", "octet_length", "of", "old", - "omit", "one", "only", "open", "out", "overlaps", "overlay", "parameter", - "pattern", "per", "percent", "percent_rank", "percentile_cont", "percentile_disc", - "period", "portion", "position", "position_regex", "power", "precedes", - "precision", "prepare", "procedure", "ptf", "rank", "reads", "real", "recursive", - "ref", "references", "referencing", "regr_avgx", "regr_avgy", "regr_count", - "regr_intercept", "regr_r2", "regr_slope", "regr_sxx", "regr_sxy", "regr_syy", - "release", "result", "return", "rollback", "rollup", "row_number", "running", - "savepoint", "scope", "scroll", "search", "second", "seek", "sensitive", - "session_user", "similar", "sin", "sinh", "skip", "some", "specific", - "specifictype", "sql", "sqlexception", "sqlstate", "sqlwarning", "sqrt", "start", - "static", "stddev_pop", "stddev_samp", "submultiset", "subset", "substring", - "substring_regex", "succeeds", "sum", "symmetric", "system", "system_time", - "system_user", "tan", "tanh", "time", "timezone_hour", "timezone_minute", - "trailing", "translate", "translate_regex", "translation", "treat", "trigger", - "trim", "trim_array", "uescape", "unique", "unknown", "unnest", "update ", - "upper", "user", "value", "value_of", "var_pop", "var_samp", "varbinary", - "varying", "versioning", "whenever", "width_bucket", "window", "within", - "without", "year"})); - // TODO: Remove impala builtin function names. Need to find content of - // BuiltinsDb.getInstance().getAllFunctions() - //reservedWords.removeAll(BuiltinsDb.getInstance().getAllFunctions().keySet()); - - // Remove whitelist words. These words might be heavily used in production, and - // impala is unlikely to implement SQL features around these words in the near future. - reservedWords.removeAll(Arrays.asList(new String[] { - // time units - "year", "month", "day", "hour", "minute", "second", - "begin", "call", "check", "classifier", "close", "identity", "language", - "localtime", "member", "module", "new", "nullif", "old", "open", "parameter", - "period", "result", "return", "sql", "start", "system", "time", "user", "value" - })); - } - - static { - init(); - } - - static boolean isReserved(String token) { - return token != null && reservedWords.contains(token.toLowerCase()); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java deleted file mode 100644 index 907f24478c..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaLineageHook.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import com.google.common.collect.Sets; -import java.io.IOException; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.impala.hook.events.BaseImpalaEvent; -import org.apache.atlas.impala.hook.events.CreateImpalaProcess; -import org.apache.atlas.impala.model.ImpalaOperationType; -import org.apache.atlas.impala.model.ImpalaQuery; -import org.apache.atlas.type.AtlasType; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import javax.security.auth.Subject; -import javax.security.auth.kerberos.KerberosPrincipal; -import java.util.HashSet; - -import static org.apache.atlas.repository.Constants.IMPALA_SOURCE; - -public class ImpalaLineageHook extends AtlasHook { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaLineageHook.class); - public static final String ATLAS_ENDPOINT = "atlas.rest.address"; - public static final String REALM_SEPARATOR = "@"; - public static final String CONF_PREFIX = "atlas.hook.impala."; - public static final String CONF_REALM_NAME = "atlas.realm.name"; - public static final String HDFS_PATH_CONVERT_TO_LOWER_CASE = CONF_PREFIX + "hdfs_path.convert_to_lowercase"; - public static final String DEFAULT_HOST_NAME = "localhost"; - - private static final String realm; - private static final boolean convertHdfsPathToLowerCase; - private static String hostName; - - static { - realm = atlasProperties.getString(CONF_REALM_NAME, DEFAULT_CLUSTER_NAME); // what should default be ?? - convertHdfsPathToLowerCase = atlasProperties.getBoolean(HDFS_PATH_CONVERT_TO_LOWER_CASE, false); - - try { - hostName = InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - LOG.warn("No hostname found. Setting the hostname to default value {}", DEFAULT_HOST_NAME, e); - hostName = DEFAULT_HOST_NAME; - } - } - - public ImpalaLineageHook() { - - } - - public String getMessageSource() { - return IMPALA_SOURCE; - } - - public void process(String impalaQueryString) throws Exception { - if (StringUtils.isEmpty(impalaQueryString)) { - LOG.warn("==> ImpalaLineageHook.process skips because the impalaQueryString is empty <=="); - return; - } - - ImpalaQuery lineageQuery = AtlasType.fromJson(impalaQueryString, ImpalaQuery.class); - process(lineageQuery); - } - - public void process(ImpalaQuery lineageQuery) throws Exception { - if (lineageQuery == null) { - LOG.warn("==> ImpalaLineageHook.process skips because the query object is null <=="); - return; - } - - if (StringUtils.isEmpty(lineageQuery.getQueryText())) { - LOG.warn("==> ImpalaLineageHook.process skips because the query text is empty <=="); - return; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("==> ImpalaLineageHook.process({})", lineageQuery.getQueryText()); - } - - try { - ImpalaOperationType operationType = ImpalaOperationParser.getImpalaOperationType(lineageQuery.getQueryText()); - AtlasImpalaHookContext context = - new AtlasImpalaHookContext(this, operationType, lineageQuery); - BaseImpalaEvent event = null; - - switch (operationType) { - case CREATEVIEW: - case CREATETABLE_AS_SELECT: - case ALTERVIEW_AS: - case QUERY: - event = new CreateImpalaProcess(context); - break; - default: - if (LOG.isDebugEnabled()) { - LOG.debug("HiveHook.run({}): operation ignored", lineageQuery.getQueryText()); - } - break; - } - - if (event != null) { - LOG.debug("Processing event: " + lineageQuery.getQueryText()); - - final UserGroupInformation ugi = getUgiFromUserName(lineageQuery.getUser()); - - super.notifyEntities(event.getNotificationMessages(), ugi); - } - } catch (Throwable t) { - - LOG.error("ImpalaLineageHook.process(): failed to process query {}", - AtlasType.toJson(lineageQuery), t); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== ImpalaLineageHook.process({})", lineageQuery.getQueryText()); - } - } - - public String getHostName() { - return hostName; - } - - private UserGroupInformation getUgiFromUserName(String userName) throws IOException { - String userPrincipal = userName.contains(REALM_SEPARATOR)? userName : userName + "@" + getRealm(); - Subject userSubject = new Subject(false, Sets.newHashSet( - new KerberosPrincipal(userPrincipal)), new HashSet(),new HashSet()); - return UserGroupInformation.getUGIFromSubject(userSubject); - } - - public String getRealm() { - return realm; - } - - public boolean isConvertHdfsPathToLowerCase() { - return convertHdfsPathToLowerCase; - } -} \ No newline at end of file diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaOperationParser.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaOperationParser.java deleted file mode 100644 index 98f3eed1b7..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/ImpalaOperationParser.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import org.apache.atlas.impala.model.ImpalaOperationType; -import org.apache.commons.lang.StringUtils; -import java.util.regex.Pattern; - -/** - * Parse an Impala query text and output the impala operation type - */ -public class ImpalaOperationParser { - - private static final Pattern COMMENT_PATTERN = Pattern.compile("/\\*.*?\\*/", Pattern.DOTALL); - - private static final Pattern CREATE_VIEW_PATTERN = - Pattern.compile("^[ ]*\\bcreate\\b.*\\bview\\b.*", Pattern.DOTALL | Pattern.CASE_INSENSITIVE); - - private static final Pattern CREATE_TABLE_AS_SELECT_PATTERN = - Pattern.compile("^[ ]*\\bcreate\\b.*\\btable\\b.*\\bas\\b.*\\bselect\\b.*", Pattern.DOTALL | Pattern.CASE_INSENSITIVE); - - private static final Pattern ALTER_VIEW_AS_SELECT_PATTERN = - Pattern.compile("^[ ]*\\balter\\b.*\\bview\\b.*\\bas.*\\bselect\\b.*", Pattern.DOTALL | Pattern.CASE_INSENSITIVE); - - private static final Pattern INSERT_SELECT_FROM_PATTERN = - Pattern.compile("^[ ]*\\binsert\\b.*\\b(into|overwrite)\\b.*\\bselect\\b.*\\bfrom\\b.*", Pattern.DOTALL | Pattern.CASE_INSENSITIVE); - - public ImpalaOperationParser() { - } - - public static ImpalaOperationType getImpalaOperationType(String queryText) { - // Impala does no generate lineage record for command "LOAD DATA IN PATH" - String queryTextWithNoComments = COMMENT_PATTERN.matcher(queryText).replaceAll(""); - if (doesMatch(queryTextWithNoComments, CREATE_VIEW_PATTERN)) { - return ImpalaOperationType.CREATEVIEW; - } else if (doesMatch(queryTextWithNoComments, CREATE_TABLE_AS_SELECT_PATTERN)) { - return ImpalaOperationType.CREATETABLE_AS_SELECT; - } else if (doesMatch(queryTextWithNoComments, ALTER_VIEW_AS_SELECT_PATTERN)) { - return ImpalaOperationType.ALTERVIEW_AS; - } else if (doesMatch(queryTextWithNoComments, INSERT_SELECT_FROM_PATTERN)) { - return ImpalaOperationType.QUERY; - } - - return ImpalaOperationType.UNKNOWN; - } - - public static ImpalaOperationType getImpalaOperationSubType(ImpalaOperationType operationType, String queryText) { - if (operationType == ImpalaOperationType.QUERY) { - if (StringUtils.containsIgnoreCase(queryText, "insert into")) { - return ImpalaOperationType.INSERT; - } else if (StringUtils.containsIgnoreCase(queryText, "insert overwrite")) { - return ImpalaOperationType.INSERT_OVERWRITE; - } - } - - return ImpalaOperationType.UNKNOWN; - } - - private static boolean doesMatch(final String queryText, final Pattern pattern) { - return pattern.matcher(queryText).matches(); - } - -} \ No newline at end of file diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/BaseImpalaEvent.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/BaseImpalaEvent.java deleted file mode 100644 index 32efb8321c..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/BaseImpalaEvent.java +++ /dev/null @@ -1,665 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook.events; - -import static org.apache.atlas.impala.hook.AtlasImpalaHookContext.QNAME_SEP_PROCESS; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.atlas.impala.hook.AtlasImpalaHookContext; -import org.apache.atlas.impala.hook.ImpalaOperationParser; -import org.apache.atlas.impala.model.ImpalaDataType; -import org.apache.atlas.impala.model.ImpalaNode; -import org.apache.atlas.impala.model.ImpalaOperationType; -import org.apache.atlas.impala.model.ImpalaVertexType; -import org.apache.atlas.impala.model.LineageVertex; -import org.apache.atlas.impala.model.LineageVertexMetadata; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.type.AtlasTypeUtil; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The base class for generating notification event to Atlas server - * Most code is copied from BaseHiveEvent to avoid depending on org.apache.atlas.hive.hook - */ -public abstract class BaseImpalaEvent { - private static final Logger LOG = LoggerFactory.getLogger(BaseImpalaEvent.class); - - // Impala should re-use the same entity type as hive. So Hive and Impala can operate on same - // database or table - public static final String HIVE_TYPE_DB = "hive_db"; - public static final String HIVE_TYPE_TABLE = "hive_table"; - public static final String HIVE_TYPE_COLUMN = "hive_column"; - - public static final String ATTRIBUTE_QUALIFIED_NAME = "qualifiedName"; - public static final String ATTRIBUTE_NAME = "name"; - public static final String ATTRIBUTE_OWNER = "owner"; - public static final String ATTRIBUTE_CLUSTER_NAME = "clusterName"; - public static final String ATTRIBUTE_CREATE_TIME = "createTime"; - public static final String ATTRIBUTE_LAST_ACCESS_TIME = "lastAccessTime"; - public static final String ATTRIBUTE_DB = "db"; - public static final String ATTRIBUTE_COLUMNS = "columns"; - public static final String ATTRIBUTE_TABLE = "table"; - public static final String ATTRIBUTE_INPUTS = "inputs"; - public static final String ATTRIBUTE_OUTPUTS = "outputs"; - public static final String ATTRIBUTE_OPERATION_TYPE = "operationType"; - public static final String ATTRIBUTE_START_TIME = "startTime"; - public static final String ATTRIBUTE_USER_NAME = "userName"; - public static final String ATTRIBUTE_QUERY_TEXT = "queryText"; - public static final String ATTRIBUTE_PROCESS = "process"; - public static final String ATTRIBUTE_PROCESS_EXECUTIONS = "processExecutions"; - public static final String ATTRIBUTE_QUERY_ID = "queryId"; - public static final String ATTRIBUTE_QUERY_PLAN = "queryPlan"; - public static final String ATTRIBUTE_END_TIME = "endTime"; - public static final String ATTRIBUTE_RECENT_QUERIES = "recentQueries"; - public static final String ATTRIBUTE_QUERY = "query"; - public static final String ATTRIBUTE_DEPENDENCY_TYPE = "dependencyType"; - public static final String ATTRIBUTE_HOSTNAME = "hostName"; - public static final String EMPTY_ATTRIBUTE_VALUE = ""; - public static final String ATTRIBUTE_EXEC_TIME = "execTime"; - public static final String ATTRIBUTE_DDL_QUERIES = "ddlQueries"; - public static final String ATTRIBUTE_SERVICE_TYPE = "serviceType"; - public static final long MILLIS_CONVERT_FACTOR = 1000; - - protected final AtlasImpalaHookContext context; - protected final Map vertexNameMap; - protected final Map verticesMap; - - public BaseImpalaEvent(AtlasImpalaHookContext context) { - - this.context = context; - vertexNameMap = new HashMap<>(); - verticesMap = new HashMap<>(); - } - - public AtlasImpalaHookContext getContext() { - return context; - } - - public abstract List getNotificationMessages() throws Exception; - - public String getUserName() { return context.getUserName(); } - - public String getTableNameFromVertex(LineageVertex vertex) { - if (vertex.getVertexType() == ImpalaVertexType.COLUMN) { - LineageVertexMetadata metadata = vertex.getMetadata(); - - if (metadata != null) { - return metadata.getTableName(); - } - } - - return getTableNameFromColumn(vertex.getVertexId()); - } - - public String getTableNameFromColumn(String columnName) { - return context.getTableNameFromColumn(columnName); - } - - public String getQualifiedName(ImpalaNode node) throws IllegalArgumentException { - - return getQualifiedName(node.getOwnVertex()); - } - - public String getQualifiedName(LineageVertex node) throws IllegalArgumentException { - if (node == null) { - throw new IllegalArgumentException("node is null"); - } - - ImpalaVertexType nodeType = node.getVertexType(); - - if (nodeType == null) { - if (node.getVertexId() != null) { - LOG.warn("null qualified name for type: null and name: {}", node.getVertexId()); - } - return null; - } - - if (node.getVertexId() == null) { - LOG.warn("null qualified name for type: {} and name: null", nodeType); - return null; - } - - switch (nodeType) { - case DATABASE: - return context.getQualifiedNameForDb(node.getVertexId()); - - case TABLE: - return context.getQualifiedNameForTable(node.getVertexId()); - - case COLUMN: - return context.getQualifiedNameForColumn(node); - - default: - LOG.warn("null qualified name for type: {} and name: {}", nodeType, node.getVertexId()); - return null; - } - } - - static final class AtlasEntityComparator implements Comparator { - @Override - public int compare(AtlasEntity entity1, AtlasEntity entity2) { - String name1 = (String)entity1.getAttribute(ATTRIBUTE_QUALIFIED_NAME); - String name2 = (String)entity2.getAttribute(ATTRIBUTE_QUALIFIED_NAME); - - if (name1 == null) { - return -1; - } - - if (name2 == null) { - return 1; - } - - return name1.toLowerCase().compareTo(name2.toLowerCase()); - } - } - - static final Comparator entityComparator = new AtlasEntityComparator(); - - protected String getQualifiedName(List inputs, List outputs) throws Exception { - ImpalaOperationType operation = context.getImpalaOperationType(); - - if (operation == ImpalaOperationType.CREATEVIEW || - operation == ImpalaOperationType.CREATETABLE_AS_SELECT || - operation == ImpalaOperationType.ALTERVIEW_AS) { - List sortedEntities = new ArrayList<>(outputs); - - Collections.sort(sortedEntities, entityComparator); - - for (AtlasEntity entity : sortedEntities) { - if (entity.getTypeName().equalsIgnoreCase(HIVE_TYPE_TABLE)) { - Long createTime = (Long)entity.getAttribute(ATTRIBUTE_CREATE_TIME); - - return (String)entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME) + QNAME_SEP_PROCESS + createTime; - } - } - } - - if (operation != ImpalaOperationType.QUERY) { - String errorMessage = String.format("Expect operation to be QUERY, but get unexpected operation type {}", operation.name()); - LOG.error(errorMessage); - throw new IllegalArgumentException(errorMessage); - } - - // construct qualified name for QUERY - String qualifiedName = null; - String operationName = operation.toString(); - - if (operationName != null) { - StringBuilder sb = new StringBuilder(operationName); - - addToProcessQualifiedName(sb, inputs, false); - sb.append("->"); - addToProcessQualifiedName(sb, outputs, true); - - qualifiedName = sb.toString(); - } - - - return qualifiedName; - } - - protected void addToProcessQualifiedName(StringBuilder processQualifiedName, Collection entities, boolean isOutput) { - if (entities == null) { - return; - } - - ImpalaOperationType operation = context.getImpalaOperationType(); - String queryText = context.getQueryStr(); - List sortedEntities = new ArrayList<>(entities); - - Collections.sort(sortedEntities, entityComparator); - - Set dataSetsProcessed = new HashSet<>(); - - for (AtlasEntity entity : sortedEntities) { - String qualifiedName = null; - long createTime = 0; - - qualifiedName = (String)entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME); - - if (entity.getTypeName().equalsIgnoreCase(HIVE_TYPE_TABLE)) { - Long createTimeObj = (Long)entity.getAttribute(ATTRIBUTE_CREATE_TIME); - if (createTimeObj != null) { - createTime = createTimeObj; - } - } - - if (qualifiedName == null || !dataSetsProcessed.add(qualifiedName)) { - continue; - } - - if (isOutput) { - boolean addWriteType = false; - ImpalaOperationType subType = ImpalaOperationParser.getImpalaOperationSubType(operation, queryText); - - switch (subType) { - // Impala does not generate lineage for UPDATE and DELETE - case INSERT: - case INSERT_OVERWRITE: - addWriteType = true; - break; - } - - if (addWriteType) { - processQualifiedName.append(QNAME_SEP_PROCESS).append(subType.name()); - } - } - - processQualifiedName.append(QNAME_SEP_PROCESS).append(qualifiedName.toLowerCase().replaceAll("/", "")); - - if (createTime != 0) { - processQualifiedName.append(QNAME_SEP_PROCESS).append(createTime); - } - } - } - - protected AtlasEntity getInputOutputEntity(ImpalaNode node, AtlasEntityExtInfo entityExtInfo) throws Exception { - AtlasEntity ret = null; - - switch(node.getNodeType()) { - case TABLE: - case PARTITION: - case DFS_DIR: { - ret = toAtlasEntity(node, entityExtInfo); - } - break; - } - - return ret; - } - - protected AtlasEntity toAtlasEntity(ImpalaNode node, AtlasEntityExtInfo entityExtInfo) throws Exception { - AtlasEntity ret = null; - - switch (node.getNodeType()) { - case DATABASE: - ret = toDbEntity(node); - break; - - case TABLE: - case PARTITION: - ret = toTableEntity(node, entityExtInfo); - break; - - default: - break; - } - - return ret; - } - - protected AtlasEntity toDbEntity(ImpalaNode db) throws Exception { - return toDbEntity(db.getNodeName()); - } - - protected AtlasEntity toDbEntity(String dbName) throws Exception { - String dbQualifiedName = context.getQualifiedNameForDb(dbName); - AtlasEntity ret = context.getEntity(dbQualifiedName); - - if (ret == null) { - ret = new AtlasEntity(HIVE_TYPE_DB); - - // Impala hook should not send metadata entities. set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - ret.setGuid(null); - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, dbQualifiedName); - ret.setAttribute(ATTRIBUTE_NAME, dbName.toLowerCase()); - ret.setAttribute(ATTRIBUTE_CLUSTER_NAME, context.getMetadataNamespace()); - - context.putEntity(dbQualifiedName, ret); - } - - return ret; - } - - protected AtlasEntityWithExtInfo toTableEntity(ImpalaNode table) throws Exception { - AtlasEntityWithExtInfo ret = new AtlasEntityWithExtInfo(); - - AtlasEntity entity = toTableEntity(table, ret); - - if (entity != null) { - ret.setEntity(entity); - } else { - ret = null; - } - - return ret; - } - - protected AtlasEntity toTableEntity(ImpalaNode table, AtlasEntitiesWithExtInfo entities) throws Exception { - AtlasEntity ret = toTableEntity(table, (AtlasEntityExtInfo) entities); - - if (ret != null) { - entities.addEntity(ret); - } - - return ret; - } - - protected AtlasEntity toTableEntity(ImpalaNode table, AtlasEntityExtInfo entityExtInfo) throws Exception { - if ((table == null) || (table.getNodeName() == null)) { - throw new IllegalArgumentException("table is null or its name is null"); - } - - String dbName = context.getDatabaseNameFromTable(table.getNodeName()); - if (dbName == null) { - throw new IllegalArgumentException(String.format("db name is null for table: {}", table.getNodeName())); - } - - AtlasEntity dbEntity = toDbEntity(dbName); - - if (entityExtInfo != null) { - if (dbEntity != null) { - entityExtInfo.addReferredEntity(dbEntity); - } - } - - AtlasEntity ret = toTableEntity(getObjectId(dbEntity), table, entityExtInfo); - - return ret; - } - - protected AtlasEntity toTableEntity(AtlasObjectId dbId, ImpalaNode table, AtlasEntityExtInfo entityExtInfo) throws Exception { - String tblQualifiedName = getQualifiedName(table); - AtlasEntity ret = context.getEntity(tblQualifiedName); - - if (ret != null) { - return ret; - } - - // a table created in Impala still uses HIVE_TYPE_TABLE to allow both Impala and Hive operate - // on the same table - ret = new AtlasEntity(HIVE_TYPE_TABLE); - - // Impala hook should not send meta data entity to Atlas. set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - // TODO: enable this once HMS hook is in. Disable this before that. - ret.setGuid(null); - - long createTime = getTableCreateTime(table); - long lastAccessTime = createTime; - - ret.setAttribute(ATTRIBUTE_DB, dbId); - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, tblQualifiedName); - ret.setAttribute(ATTRIBUTE_NAME, table.getNodeName().toLowerCase()); - - // just fake it. It should not be sent to Atlas once HMS hook is in - ret.setAttribute(ATTRIBUTE_OWNER, getUserName()); - - ret.setAttribute(ATTRIBUTE_CREATE_TIME, createTime); - ret.setAttribute(ATTRIBUTE_LAST_ACCESS_TIME, lastAccessTime); - - AtlasObjectId tableId = getObjectId(ret); - List columns = getColumnEntities(tableId, table); - - if (entityExtInfo != null) { - if (columns != null) { - for (AtlasEntity column : columns) { - entityExtInfo.addReferredEntity(column); - } - } - } - - ret.setAttribute(ATTRIBUTE_COLUMNS, getObjectIds(columns)); - - - context.putEntity(tblQualifiedName, ret); - - return ret; - } - - public static AtlasObjectId getObjectId(AtlasEntity entity) { - String qualifiedName = (String) entity.getAttribute(ATTRIBUTE_QUALIFIED_NAME); - AtlasObjectId ret = new AtlasObjectId(entity.getGuid(), entity.getTypeName(), Collections - .singletonMap(ATTRIBUTE_QUALIFIED_NAME, qualifiedName)); - - return ret; - } - - public static List getObjectIds(List entities) { - final List ret; - - if (CollectionUtils.isNotEmpty(entities)) { - ret = new ArrayList<>(entities.size()); - - for (AtlasEntity entity : entities) { - ret.add(getObjectId(entity)); - } - } else { - ret = Collections.emptyList(); - } - - return ret; - } - - /** - * return the createTime of the table. - * @param table - * @return the createTime of the table. Its unit is in milliseconds. - */ - public static long getTableCreateTime(ImpalaNode table) { - return getTableCreateTime(table.getOwnVertex()); - } - - public static long getTableCreateTime(LineageVertex tableVertex) { - Long createTime = tableVertex.getCreateTime(); - if (createTime != null) { - // the time unit of vertex is in seconds. Convert to milliseconds before sending to Atlas. - return createTime.longValue() * MILLIS_CONVERT_FACTOR; - } else { - return System.currentTimeMillis(); - } - } - - protected List getColumnEntities(AtlasObjectId tableId, ImpalaNode table) { - List ret = new ArrayList<>(); - - for (ImpalaNode childNode : table.getChildren().values()) { - String colQualifiedName = getQualifiedName(childNode); - AtlasEntity column = context.getEntity(colQualifiedName); - - if (column == null) { - column = new AtlasEntity(HIVE_TYPE_COLUMN); - - // if column's table was sent in an earlier notification, set 'guid' to null - which will: - // - result in this entity to be not included in 'referredEntities' - // - cause Atlas server to resolve the entity by its qualifiedName - // TODO: enable this once HMS hook is in. Disable this before that. - column.setGuid(null); - - column.setAttribute(ATTRIBUTE_TABLE, tableId); - column.setAttribute(ATTRIBUTE_QUALIFIED_NAME, colQualifiedName); - column.setAttribute(ATTRIBUTE_NAME, context.getColumnNameOnly(childNode.getNodeName())); - - // just fake it. It should not be sent to Atlas once HMS hook is in - column.setAttribute(ATTRIBUTE_OWNER, getUserName()); - - context.putEntity(colQualifiedName, column); - } - - ret.add(column); - } - - return ret; - } - - protected AtlasEntity getImpalaProcessEntity(List inputs, List outputs) throws Exception { - AtlasEntity ret = new AtlasEntity(ImpalaDataType.IMPALA_PROCESS.getName()); - String queryStr = context.getQueryStr(); - - if (queryStr != null) { - queryStr = queryStr.toLowerCase().trim(); - } - - Long startTime = getQueryStartTime(); - Long endTime = getQueryEndTime(); - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, getQualifiedName(inputs, outputs)); - ret.setAttribute(ATTRIBUTE_INPUTS, getObjectIds(inputs)); - ret.setAttribute(ATTRIBUTE_OUTPUTS, getObjectIds(outputs)); - ret.setAttribute(ATTRIBUTE_NAME, queryStr); - ret.setAttribute(ATTRIBUTE_OPERATION_TYPE, context.getImpalaOperationType()); - - // We are setting an empty value to these attributes, since now we have a new entity type called impala process - // execution which captures these values. We have to set empty values here because these attributes are - // mandatory attributes for impala process entity type. - ret.setAttribute(ATTRIBUTE_START_TIME, startTime); - ret.setAttribute(ATTRIBUTE_END_TIME, endTime); - ret.setAttribute(ATTRIBUTE_USER_NAME, EMPTY_ATTRIBUTE_VALUE); - ret.setAttribute(ATTRIBUTE_QUERY_TEXT, EMPTY_ATTRIBUTE_VALUE); - ret.setAttribute(ATTRIBUTE_QUERY_ID, EMPTY_ATTRIBUTE_VALUE); - ret.setAttribute(ATTRIBUTE_QUERY_PLAN, "Not Supported"); - ret.setAttribute(ATTRIBUTE_RECENT_QUERIES, Collections.singletonList(queryStr)); - - return ret; - } - - protected AtlasEntity getImpalaProcessExecutionEntity(AtlasEntity impalaProcess) throws Exception { - AtlasEntity ret = new AtlasEntity(ImpalaDataType.IMPALA_PROCESS_EXECUTION.getName()); - String queryStr = context.getQueryStr(); - - if (queryStr != null) { - queryStr = queryStr.toLowerCase().trim(); - } - - Long startTime = getQueryStartTime(); - Long endTime = getQueryEndTime(); - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, impalaProcess.getAttribute(ATTRIBUTE_QUALIFIED_NAME).toString() + - QNAME_SEP_PROCESS + startTime.toString() + - QNAME_SEP_PROCESS + endTime.toString()); - ret.setAttribute(ATTRIBUTE_NAME, queryStr + QNAME_SEP_PROCESS + startTime); - ret.setAttribute(ATTRIBUTE_START_TIME, startTime); - ret.setAttribute(ATTRIBUTE_END_TIME, endTime); - ret.setAttribute(ATTRIBUTE_USER_NAME, getUserName()); - ret.setAttribute(ATTRIBUTE_QUERY_TEXT, queryStr); - ret.setAttribute(ATTRIBUTE_QUERY_ID, context.getLineageQuery().getQueryId()); - ret.setAttribute(ATTRIBUTE_QUERY_PLAN, "Not Supported"); - ret.setAttribute(ATTRIBUTE_HOSTNAME, context.getHostName()); - ret.setRelationshipAttribute(ATTRIBUTE_PROCESS, AtlasTypeUtil.toAtlasRelatedObjectId(impalaProcess)); - - return ret; - } - - protected Long getQueryStartTime() { - return context.getLineageQuery().getTimestamp() * BaseImpalaEvent.MILLIS_CONVERT_FACTOR; - } - - protected Long getQueryEndTime() { - return context.getLineageQuery().getEndTime() * BaseImpalaEvent.MILLIS_CONVERT_FACTOR; - } - - protected void addProcessedEntities(AtlasEntitiesWithExtInfo entitiesWithExtInfo) { - for (AtlasEntity entity : context.getEntities()) { - entitiesWithExtInfo.addReferredEntity(entity); - } - - entitiesWithExtInfo.compact(); - } - - // The unit of createTime in vertex is in seconds. So the returned value is - // time in seconds. - protected Long getCreateTimeInVertex(LineageVertex vertex) { - if (vertex == null) { - return System.currentTimeMillis() / MILLIS_CONVERT_FACTOR; - } - - Long createTime = vertex.getCreateTime(); - - if (createTime != null) { - return createTime; - } - - if (vertex.getVertexType() == ImpalaVertexType.COLUMN) { - LineageVertexMetadata metadata = vertex.getMetadata(); - - if (metadata != null) { - return metadata.getTableCreateTime(); - } - } - - return System.currentTimeMillis() / MILLIS_CONVERT_FACTOR; - } - - protected ImpalaNode createTableNode(String tableName, Long createTime) { - // the created table vertex does not have its Id set as it is not referred in edge - LineageVertex tableVertex = new LineageVertex(); - tableVertex.setVertexType(ImpalaVertexType.TABLE); - tableVertex.setVertexId(tableName); - tableVertex.setCreateTime(createTime); - return new ImpalaNode(tableVertex); - } - - protected AtlasEntity createHiveDDLEntity(AtlasEntity dbOrTable) { - return createHiveDDLEntity(dbOrTable, true); - } - - protected AtlasEntity createHiveDDLEntity(AtlasEntity dbOrTable, boolean excludeEntityGuid) { - AtlasObjectId objId = BaseImpalaEvent.getObjectId(dbOrTable); - AtlasEntity hiveDDL = null; - - if (excludeEntityGuid) { - objId.setGuid(null); - } - - if (StringUtils.equals(objId.getTypeName(), HIVE_TYPE_DB)) { - hiveDDL = new AtlasEntity(ImpalaDataType.HIVE_DB_DDL.getName(), ATTRIBUTE_DB, objId); - } else if (StringUtils.equals(objId.getTypeName(), HIVE_TYPE_TABLE)) { - hiveDDL = new AtlasEntity(ImpalaDataType.HIVE_TABLE_DDL.getName(), ATTRIBUTE_TABLE, objId); - } - - if (hiveDDL != null) { - hiveDDL.setAttribute(ATTRIBUTE_SERVICE_TYPE, "impala"); - hiveDDL.setAttribute(ATTRIBUTE_EXEC_TIME, getQueryStartTime()); - hiveDDL.setAttribute(ATTRIBUTE_QUERY_TEXT, context.getQueryStr()); - hiveDDL.setAttribute(ATTRIBUTE_USER_NAME, getUserName()); - hiveDDL.setAttribute(ATTRIBUTE_NAME, context.getQueryStr() + QNAME_SEP_PROCESS + getQueryStartTime().toString()); - hiveDDL.setAttribute(ATTRIBUTE_QUALIFIED_NAME, hiveDDL.getAttribute(ATTRIBUTE_NAME)); - } - - return hiveDDL; - } - - protected boolean isDdlOperation() { - return (context.getImpalaOperationType().equals(ImpalaOperationType.CREATEVIEW) - || context.getImpalaOperationType().equals(ImpalaOperationType.ALTERVIEW_AS) - || context.getImpalaOperationType().equals(ImpalaOperationType.CREATETABLE_AS_SELECT)); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/CreateImpalaProcess.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/CreateImpalaProcess.java deleted file mode 100644 index 5e6ea5a55f..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/hook/events/CreateImpalaProcess.java +++ /dev/null @@ -1,361 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook.events; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.atlas.impala.hook.AtlasImpalaHookContext; -import org.apache.atlas.impala.model.ImpalaDataType; -import org.apache.atlas.impala.model.ImpalaDependencyType; -import org.apache.atlas.impala.model.ImpalaNode; -import org.apache.atlas.impala.model.ImpalaVertexType; -import org.apache.atlas.impala.model.LineageEdge; -import org.apache.atlas.impala.model.ImpalaQuery; -import org.apache.atlas.impala.model.LineageVertex; -import org.apache.atlas.impala.model.LineageVertexMetadata; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.commons.collections.CollectionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CreateImpalaProcess extends BaseImpalaEvent { - private static final Logger LOG = LoggerFactory.getLogger(CreateImpalaProcess.class); - - public CreateImpalaProcess(AtlasImpalaHookContext context) { - super(context); - } - - public List getNotificationMessages() throws Exception { - List ret = null; - AtlasEntitiesWithExtInfo entities = getEntities(); - - if (entities != null && CollectionUtils.isNotEmpty(entities.getEntities())) { - ret = Collections.singletonList(new EntityCreateRequestV2(getUserName(), entities)); - } - - return ret; - } - - public AtlasEntitiesWithExtInfo getEntities() throws Exception { - AtlasEntitiesWithExtInfo ret = null; - List inputNodes = new ArrayList<>(); - List outputNodes = new ArrayList<>(); - List inputs = new ArrayList<>(); - List outputs = new ArrayList<>(); - Set processedNames = new HashSet<>(); - - getInputOutList(context.getLineageQuery(), inputNodes, outputNodes); - - if (skipProcess(inputNodes, outputNodes)) { - return ret; - } - - ret = new AtlasEntitiesWithExtInfo(); - - if (!inputNodes.isEmpty()) { - for (ImpalaNode input : inputNodes) { - String qualifiedName = getQualifiedName(input); - - if (qualifiedName == null || !processedNames.add(qualifiedName)) { - continue; - } - - AtlasEntity entity = getInputOutputEntity(input, ret); - - if (entity != null) { - inputs.add(entity); - } - } - } - - if (outputNodes != null) { - for (ImpalaNode output : outputNodes) { - String qualifiedName = getQualifiedName(output); - - if (qualifiedName == null || !processedNames.add(qualifiedName)) { - continue; - } - - AtlasEntity entity = getInputOutputEntity(output, ret); - - if (entity != null) { - outputs.add(entity); - - if (isDdlOperation()) { - AtlasEntity ddlEntity = createHiveDDLEntity(entity); - if (ddlEntity != null) { - ret.addEntity(ddlEntity); - } - } - } - } - } - - if (!inputs.isEmpty() || !outputs.isEmpty()) { - AtlasEntity process = getImpalaProcessEntity(inputs, outputs); - if (process!= null) { - if (LOG.isDebugEnabled()) { - LOG.debug("get process entity with qualifiedName: {}", - process.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } - - ret.addEntity(process); - - AtlasEntity processExecution = getImpalaProcessExecutionEntity(process); - if (processExecution != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("get process executition entity with qualifiedName: {}", - processExecution.getAttribute(ATTRIBUTE_QUALIFIED_NAME)); - } - - ret.addEntity(processExecution); - } - - processColumnLineage(process, ret); - - addProcessedEntities(ret); - } - } else { - ret = null; - } - - - return ret; - } - - private void processColumnLineage(AtlasEntity impalaProcess, AtlasEntitiesWithExtInfo entities) { - List edges = context.getLineageQuery().getEdges(); - - if (CollectionUtils.isEmpty(edges)) { - return; - } - - final List columnLineages = new ArrayList<>(); - final Set processedOutputCols = new HashSet<>(); - - for (LineageEdge edge : edges) { - - if (!edge.getEdgeType().equals(ImpalaDependencyType.PROJECTION)) { - // Impala dependency type can only be predicate or projection. - // Impala predicate dependency: This is a dependency between a set of target - // columns (or exprs) and a set of source columns (base table columns). It - // indicates that the source columns restrict the values of their targets (e.g. - // by participating in WHERE clause predicates). It should not be part of lineage - continue; - } - - List outputColumns = new ArrayList<>(); - for (Long targetId : edge.getTargets()) { - LineageVertex columnVertex = verticesMap.get(targetId); - String outputColName = getQualifiedName(columnVertex); - AtlasEntity outputColumn = context.getEntity(outputColName); - - if (LOG.isDebugEnabled()) { - LOG.debug("processColumnLineage(): target id = {}, target column name = {}", - targetId, outputColName); - } - - if (outputColumn == null) { - LOG.warn("column-lineage: non-existing output-column {}", outputColName); - continue; - } - - if (processedOutputCols.contains(outputColName)) { - LOG.warn("column-lineage: duplicate for output-column {}", outputColName); - continue; - } else { - processedOutputCols.add(outputColName); - } - - outputColumns.add(outputColumn); - } - - List inputColumns = new ArrayList<>(); - - for (Long sourceId : edge.getSources()) { - LineageVertex columnVertex = verticesMap.get(sourceId); - String inputColName = getQualifiedName(columnVertex); - AtlasEntity inputColumn = context.getEntity(inputColName); - - if (inputColumn == null) { - LOG.warn("column-lineage: non-existing input-column {} with id ={}", inputColName, sourceId); - continue; - } - - inputColumns.add(inputColumn); - } - - if (inputColumns.isEmpty()) { - continue; - } - - AtlasEntity columnLineageProcess = new AtlasEntity(ImpalaDataType.IMPALA_COLUMN_LINEAGE.getName()); - - String columnQualifiedName = (String)impalaProcess.getAttribute(ATTRIBUTE_QUALIFIED_NAME) + - AtlasImpalaHookContext.QNAME_SEP_PROCESS + outputColumns.get(0).getAttribute(ATTRIBUTE_NAME); - columnLineageProcess.setAttribute(ATTRIBUTE_NAME, columnQualifiedName); - columnLineageProcess.setAttribute(ATTRIBUTE_QUALIFIED_NAME, columnQualifiedName); - columnLineageProcess.setAttribute(ATTRIBUTE_INPUTS, getObjectIds(inputColumns)); - columnLineageProcess.setAttribute(ATTRIBUTE_OUTPUTS, getObjectIds(outputColumns)); - columnLineageProcess.setAttribute(ATTRIBUTE_QUERY, getObjectId(impalaProcess)); - - // based on https://github.com/apache/impala/blob/master/fe/src/main/java/org/apache/impala/analysis/ColumnLineageGraph.java#L267 - // There are two types of dependencies that are represented as edges in the column - // lineage graph: - // a) Projection dependency: This is a dependency between a set of source - // columns (base table columns) and a single target (result expr or table column). - // This dependency indicates that values of the target depend on the values of the source - // columns. - // b) Predicate dependency: This is a dependency between a set of target - // columns (or exprs) and a set of source columns (base table columns). It indicates that - // the source columns restrict the values of their targets (e.g. by participating in - // WHERE clause predicates). - columnLineageProcess.setAttribute(ATTRIBUTE_DEPENDENCY_TYPE, ImpalaDependencyType.PROJECTION.getName()); - - columnLineages.add(columnLineageProcess); - } - - for (AtlasEntity columnLineage : columnLineages) { - String columnQualifiedName = (String)columnLineage.getAttribute(ATTRIBUTE_QUALIFIED_NAME); - if (LOG.isDebugEnabled()) { - LOG.debug("get column lineage entity with qualifiedName: {}", columnQualifiedName); - } - - entities.addEntity(columnLineage); - } - } - - // Process the impala query, classify the vertices as input or output based on LineageEdge - // Then organize the vertices into hierarchical structure: put all column vertices of a table - // as children of a ImpalaNode representing that table. - private void getInputOutList(ImpalaQuery lineageQuery, List inputNodes, - List outputNodes) { - // get vertex map with key being its id and - // ImpalaNode map with its own vertex's vertexId as its key - for (LineageVertex vertex : lineageQuery.getVertices()) { - updateVertexMap(vertex); - } - - // get set of source ID and set of target Id - Set sourceIds = new HashSet<>(); - Set targetIds = new HashSet<>(); - for (LineageEdge edge : lineageQuery.getEdges()) { - if (ImpalaDependencyType.PROJECTION.equals(edge.getEdgeType())) { - sourceIds.addAll(edge.getSources()); - targetIds.addAll(edge.getTargets()); - } - } - - Map inputMap = buildInputOutputList(sourceIds, verticesMap, vertexNameMap); - Map outputMap = buildInputOutputList(targetIds, verticesMap, vertexNameMap); - - inputNodes.addAll(inputMap.values()); - outputNodes.addAll(outputMap.values()); - } - - // Update internal maps using this vertex. - private void updateVertexMap(LineageVertex vertex) { - verticesMap.put(vertex.getId(), vertex); - vertexNameMap.put(vertex.getVertexId(), new ImpalaNode(vertex)); - - if (vertex.getVertexType() == ImpalaVertexType.COLUMN) { - LineageVertexMetadata metadata = vertex.getMetadata(); - - if (metadata == null) { - return; - } - - // if the vertex is column and contains metadata, create a vertex for its table - String tableName = metadata.getTableName(); - ImpalaNode tableNode = vertexNameMap.get(tableName); - - if (tableNode == null) { - tableNode = createTableNode(tableName, metadata.getTableCreateTime()); - vertexNameMap.put(tableName, tableNode); - } - } - } - - /** - * From the list of Ids and Id to Vertices map, generate the Table name to ImpalaNode map. - * @param idSet the list of Ids. They are from lineage edges - * @param vertexMap the Id to Vertex map - * @param vertexNameMap the vertexId to ImpalaNode map. - * @return the table name to ImpalaNode map, whose table node contains its columns - */ - private Map buildInputOutputList(Set idSet, Map vertexMap, - Map vertexNameMap) { - Map returnTableMap = new HashMap<>(); - - for (Long id : idSet) { - LineageVertex vertex = vertexMap.get(id); - if (vertex == null) { - LOG.warn("cannot find vertex with id: {}", id); - continue; - } - - if (ImpalaVertexType.COLUMN.equals(vertex.getVertexType())) { - // add column to its table node - String tableName = getTableNameFromVertex(vertex); - if (tableName == null) { - LOG.warn("cannot find tableName for vertex with id: {}, column name : {}", - id, vertex.getVertexId() == null? "null" : vertex.getVertexId()); - - continue; - } - - ImpalaNode tableNode = returnTableMap.get(tableName); - - if (tableNode == null) { - tableNode = vertexNameMap.get(tableName); - - if (tableNode == null) { - LOG.warn("cannot find table node for vertex with id: {}, column name : {}", - id, vertex.getVertexId()); - - tableNode = createTableNode(tableName, getCreateTimeInVertex(null)); - vertexNameMap.put(tableName, tableNode); - } - - returnTableMap.put(tableName, tableNode); - } - - tableNode.addChild(vertex); - } - } - - return returnTableMap; - } - - private boolean skipProcess(List inputNodes, List ouputNodes) { - if (inputNodes.isEmpty() || ouputNodes.isEmpty()) { - return true; - } - - return false; - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDataType.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDataType.java deleted file mode 100644 index 4e0d478706..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDataType.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.impala.model; - -/** - * Data types used for Impala bridge - */ -public enum ImpalaDataType { - - IMPALA_PROCESS, - IMPALA_PROCESS_EXECUTION, - IMPALA_COLUMN_LINEAGE, - HIVE_DB_DDL, - HIVE_TABLE_DDL; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDependencyType.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDependencyType.java deleted file mode 100644 index 892ee9b2fb..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaDependencyType.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -public enum ImpalaDependencyType { - PROJECTION("PROJECTION"), - PREDICATE("PREDICATE"); - - private final String name; - - ImpalaDependencyType(String name) { - this.name = name; - } - - public String getName() { - return name.toUpperCase(); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaNode.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaNode.java deleted file mode 100644 index a3ddf53729..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaNode.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -import java.util.HashMap; -import java.util.Map; - -/** - * Contain vertex info of this node and its children. It is used only internally - */ -public class ImpalaNode { - LineageVertex ownVertex; - Map children; - - public ImpalaNode(LineageVertex ownVertex) { - this.ownVertex = ownVertex; - children = new HashMap<>(); - } - - public String getNodeName() { return ownVertex.getVertexId(); } - public ImpalaVertexType getNodeType() { return ownVertex.getVertexType(); } - public LineageVertex getOwnVertex() { return ownVertex; } - public Map getChildren() { return children; } - - /** - * Add child to this node - * @param child - * @return the node corresponding to the input child vertex - */ - public ImpalaNode addChild(LineageVertex child) { - ImpalaNode exitingChild = children.get(child.getId()); - if (exitingChild != null) { - return exitingChild; - } - - ImpalaNode newChild = new ImpalaNode(child); - return children.put(child.getId(), newChild); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaOperationType.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaOperationType.java deleted file mode 100644 index a893b8845c..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaOperationType.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.impala.model; - -public enum ImpalaOperationType{ - // main operation type - CREATEVIEW ("CREATEVIEW"), - CREATETABLE_AS_SELECT ("CREATETABLE_AS_SELECT"), - ALTERVIEW_AS ("ALTERVIEW_AS"), - QUERY ("QUERY"), - - // sub operation type, which is associated with output - INSERT ("INSERT"), - INSERT_OVERWRITE ("INSERT_OVERWRITE"), - - // default type - UNKNOWN ("UNKNOWN"); - - private final String name; - - ImpalaOperationType(String s) { - name = s; - } - - public boolean equalsName(String otherName) { - return name.equals(otherName); - } - - public String toString() { - return this.name; - } -} \ No newline at end of file diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaQuery.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaQuery.java deleted file mode 100644 index 27bdc72e9d..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaQuery.java +++ /dev/null @@ -1,110 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import java.util.List; - -/** - * Represent an Impala lineage record in lineage log. - */ -@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -public class ImpalaQuery { - private String queryText; - private String queryId; - private String hash; - private String user; - - // the time stamp is in seconds. It is Unix epoch, which is the number of seconds that have - // elapsed since January 1, 1970 (midnight UTC/GMT), not counting leap seconds - private Long timestamp; - private Long endTime; - private List edges; - private List vertices; - - public List getEdges() { - return edges; - } - - public List getVertices() { - return vertices; - } - - public Long getEndTime() { - return endTime; - } - - public String getHash() { - return hash; - } - - public String getQueryId() { - return queryId; - } - - public String getQueryText() { - return queryText; - } - - public Long getTimestamp() { - return timestamp; - } - - public String getUser() { - return user; - } - - public void setEdges(List edges) { - this.edges = edges; - } - - public void setEndTime(Long endTime) { - this.endTime = endTime; - } - - public void setHash(String hash) { - this.hash = hash; - } - - public void setQueryId(String queryId) { - this.queryId = queryId; - } - - public void setQueryText(String queryText) { - this.queryText = queryText; - } - - public void setTimestamp(Long timestamp) { this.timestamp = timestamp; } - - public void setUser(String user) { - this.user = user; - } - - public void setVertices(List vertices) { - this.vertices = vertices; - } - -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaVertexType.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaVertexType.java deleted file mode 100644 index 8ec3f857ad..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/ImpalaVertexType.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -public enum ImpalaVertexType { - DFS_DIR("DFS_DIR"), - PARTITION("PARTITION"), - COLUMN("COLUMN"), - TABLE("TABLE"), - DATABASE("DATABASE"); - - private final String name; - - ImpalaVertexType(String name) { - this.name = name; - } - - public String getName() { - return name.toUpperCase(); - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageEdge.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageEdge.java deleted file mode 100644 index 251507e9e1..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageEdge.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import java.util.List; - -/** - * This represents an edge in Impala's lineage record that connects two entities - */ -@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -public class LineageEdge { - private List sources; - private List targets; - private ImpalaDependencyType edgeType; - - public List getSources() { - return sources; - } - - public List getTargets() { - return targets; - } - - public ImpalaDependencyType getEdgeType() { - return edgeType; - } - - public void setSources(List sources) { - this.sources = sources; - } - - public void setTargets(List targets) { - this.targets = targets; - } - - public void setEdgeType(ImpalaDependencyType edgeType) { - this.edgeType = edgeType; - } -} diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertex.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertex.java deleted file mode 100644 index 0a664fc8b6..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertex.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * This represents an entity in Impala's lineage record. - */ -@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -public class LineageVertex { - // id is used to reference this entity. It is used in LineageEdge to specify source and target - // https://github.com/apache/impala/blob/master/be/src/util/lineage-util.h#L40 - // Impala id is int64. Therefore, define this field as Long - private Long id; - - // specify the type of the entity, it could be "TABLE", "COLUMN" etc. - private ImpalaVertexType vertexType; - - // specify the name of the entity - private String vertexId; - - // It is optional, and could be null. It is only set if the entity is a column, and this field contains metadata of its table. - private LineageVertexMetadata metadata; - - // It is optional. Its unit in seconds. - private Long createTime; - - public Long getId() { return id; } - - public ImpalaVertexType getVertexType() { - return vertexType; - } - - public String getVertexId() { - return vertexId; - } - - public LineageVertexMetadata getMetadata() { - return metadata; - } - - public Long getCreateTime() { return createTime; } - - public void setId(Long id) { - this.id = id; - } - - public void setVertexType(ImpalaVertexType vertexType) { - this.vertexType = vertexType; - } - - public void setVertexId(String vertexId) { - this.vertexId = vertexId; - } - - public void setMetadata(LineageVertexMetadata metadata) { this.metadata = metadata; } - - public void setCreateTime(Long createTime) { this.createTime = createTime; } -} \ No newline at end of file diff --git a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertexMetadata.java b/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertexMetadata.java deleted file mode 100644 index 2b3226c21d..0000000000 --- a/addons/impala-bridge/src/main/java/org/apache/atlas/impala/model/LineageVertexMetadata.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.model; - -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * This represents optional metadata in Impala's lineage vertex entity. - */ -@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -public class LineageVertexMetadata { - // specify the name of the table - private String tableName; - - // the create time of the table. Its unit is in seconds. - private Long tableCreateTime; - - public String getTableName() { return tableName; } - - public Long getTableCreateTime() { return tableCreateTime; } - - public void setTableName(String tableName) { this.tableName = tableName; } - - public void setTableCreateTime(Long createTime) { this.tableCreateTime = createTime; } -} diff --git a/addons/impala-bridge/src/main/resources/atlas-log4j.xml b/addons/impala-bridge/src/main/resources/atlas-log4j.xml deleted file mode 100644 index 97317a8754..0000000000 --- a/addons/impala-bridge/src/main/resources/atlas-log4j.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/addons/impala-bridge/src/main/resources/import-impala.sh b/addons/impala-bridge/src/main/resources/import-impala.sh deleted file mode 100644 index b440f2d056..0000000000 --- a/addons/impala-bridge/src/main/resources/import-impala.sh +++ /dev/null @@ -1,114 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. See accompanying LICENSE file. -# -# resolve links - $0 may be a softlink -PRG="${0}" - -[[ `uname -s` == *"CYGWIN"* ]] && CYGWIN=true - -while [ -h "${PRG}" ]; do - ls=`ls -ld "${PRG}"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "${PRG}"`/"$link" - fi -done - -BASEDIR=`dirname ${PRG}` - -if test -z "${JAVA_HOME}" -then - JAVA_BIN=`which java` - JAR_BIN=`which jar` -else - JAVA_BIN="${JAVA_HOME}/bin/java" - JAR_BIN="${JAVA_HOME}/bin/jar" -fi -export JAVA_BIN - -if [ ! -e "${JAVA_BIN}" ] || [ ! -e "${JAR_BIN}" ]; then - echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available." - exit 1 -fi - -# Construct ATLAS_CONF where atlas-properties reside -# assume the hive-server2 is installed and contains Atlas configuration -# Otherwise, need to setup Atlas required properties and libraries before running this tool -if [ ! -z "$HIVE_CONF_DIR" ]; then - HIVE_CONF=$HIVE_CONF_DIR -elif [ ! -z "$HIVE_HOME" ]; then - HIVE_CONF="$HIVE_HOME/conf" -elif [ -e /etc/hive/conf ]; then - HIVE_CONF="/etc/hive/conf" -else - echo "Could not find a valid HIVE configuration for ATLAS" - exit 1 -fi -if [ -z "$ATLAS_CONF" ]; then - export ATLAS_CONF=$HIVE_CONF -fi - -# log dir for applications -ATLAS_LOG_DIR="/var/log/atlas" -ATLAS_LOG_FILE="impala-bridge.log" -LOG_CONFIG="${BASEDIR}/atlas-log4j.xml" - -# Construct Atlas classpath. -DIR=$PWD -PARENT="$(dirname "$DIR")" -GRANDPARENT="$(dirname "$PARENT")" -LIB_PATH="$GRANDPARENT/server/webapp/atlas/WEB-INF/lib" -echo "$LIB_PATH" -# Construct Atlas classpath. -for i in "$LIB_PATH/"*.jar; do - ATLASCPPATH="${ATLASCPPATH}:$i" -done - -for i in "${BASEDIR}/"*.jar; do - ATLASCPPATH="${ATLASCPPATH}:$i" -done - -if [ -z "${ATLAS_CONF_DIR}" ] && [ -e /etc/atlas/conf ];then - ATLAS_CONF_DIR=/etc/atlas/conf -fi -ATLASCPPATH=${ATLASCPPATH}:${ATLAS_CONF_DIR} - -echo "Logging: ${ATLAS_LOG_DIR}/${ATLAS_LOG_FILE}" -echo "Log config: ${LOG_CONFIG}" - -TIME=`date %Y%m%d%H%M%s` -CP="${ATLASCPPATH}:${ATLAS_CONF}" - -# If running in cygwin, convert pathnames and classpath to Windows format. -if [ "${CYGWIN}" == "true" ] -then - ATLAS_LOG_DIR=`cygpath -w ${ATLAS_LOG_DIR}` - ATLAS_LOG_FILE=`cygpath -w ${ATLAS_LOG_FILE}` - CP=`cygpath -w -p ${CP}` -fi - -JAVA_PROPERTIES="$ATLAS_OPTS -Datlas.log.dir=$ATLAS_LOG_DIR -Datlas.log.file=$ATLAS_LOG_FILE -Dlog4j.configuration=file://$LOG_CONFIG" - -IMPORT_ARGS=$@ -JVM_ARGS= - -JAVA_PROPERTIES="${JAVA_PROPERTIES} ${JVM_ARGS}" -"${JAVA_BIN}" ${JAVA_PROPERTIES} -cp "${CP}" org.apache.atlas.impala.ImpalaLineageTool $IMPORT_ARGS - -RETVAL=$? -[ $RETVAL -eq 0 ] && echo Done! -[ $RETVAL -ne 0 ] && echo Failed! -exit $RETVAL \ No newline at end of file diff --git a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageITBase.java b/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageITBase.java deleted file mode 100644 index ef23a26d10..0000000000 --- a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageITBase.java +++ /dev/null @@ -1,495 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala; - -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_QUALIFIED_NAME; -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_QUERY_TEXT; -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_RECENT_QUERIES; -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.HIVE_TYPE_DB; -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.HIVE_TYPE_TABLE; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.impala.hook.AtlasImpalaHookContext; -import org.apache.atlas.impala.hook.ImpalaLineageHook; -import org.apache.atlas.impala.hook.events.BaseImpalaEvent; -import org.apache.atlas.impala.model.ImpalaDataType; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.testng.annotations.BeforeClass; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; - -public class ImpalaLineageITBase { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaLineageITBase.class); - - public static final String DEFAULT_DB = "default"; - public static final String SEP = ":".intern(); - public static final String IO_SEP = "->".intern(); - protected static final String DGI_URL = "http://localhost:21000/"; - protected static final String CLUSTER_NAME = "primary"; - protected static final String PART_FILE = "2015-01-01"; - protected static final String INPUTS = "inputs"; - protected static final String OUTPUTS = "outputs"; - protected static AtlasClientV2 atlasClientV2; - - private static final String REFERENCEABLE_ATTRIBUTE_NAME = "qualifiedName"; - private static final String ATTR_NAME = "name"; - - // to push entity creation/update to HMS, so HMS hook can push the metadata notification - // to Atlas, then the lineage notification from this tool can be created at Atlas - protected static Driver driverWithoutContext; - protected static SessionState ss; - protected static HiveConf conf; - - - @BeforeClass - public void setUp() throws Exception { - //Set-up hive session - conf = new HiveConf(); - conf.setClassLoader(Thread.currentThread().getContextClassLoader()); - HiveConf conf = new HiveConf(); - SessionState ss = new SessionState(conf); - ss = SessionState.start(ss); - SessionState.setCurrentSessionState(ss); - driverWithoutContext = new Driver(conf); - - Configuration configuration = ApplicationProperties.get(); - - String[] atlasEndPoint = configuration.getStringArray(ImpalaLineageHook.ATLAS_ENDPOINT); - if (atlasEndPoint == null || atlasEndPoint.length == 0) { - atlasEndPoint = new String[]{DGI_URL}; - } - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClientV2 = new AtlasClientV2(atlasEndPoint, new String[]{"admin", "admin"}); - } else { - atlasClientV2 = new AtlasClientV2(atlasEndPoint); - } - - } - - // return guid of the entity - protected String assertEntityIsRegistered(final String typeName, final String property, final String value, - final AssertPredicate assertPredicate) throws Exception { - waitFor(100000, new Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections - .singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - } - }); - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByAttribute(typeName, Collections.singletonMap(property,value)); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - return (String) entity.getGuid(); - } - - protected String assertEntityIsRegistered(final String typeName, List processQFNames, - final AssertPredicates assertPredicates) throws Exception { - List> attributesList = new ArrayList<>(); - - for (String processName : processQFNames) { - attributesList.add(Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, processName)); - } - - return waitForWithReturn(80000, new PredicateWithReturn() { - @Override - public String evaluate() throws Exception { - AtlasEntity.AtlasEntitiesWithExtInfo atlasEntitiesWithExtInfo = atlasClientV2.getEntitiesByAttribute(typeName, attributesList); - List entities = atlasEntitiesWithExtInfo.getEntities(); - assertNotNull(entities); - if (assertPredicates != null) { - return assertPredicates.assertOnEntities(entities); - } - - return null; - } - }); - } - - protected String assertEntityIsRegisteredViaGuid(String guid, - final AssertPredicate assertPredicate) throws Exception { - waitFor(80000, new Predicate() { - @Override - public void evaluate() throws Exception { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByGuid(guid); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - - } - }); - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2.getEntityByGuid(guid); - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - return (String) entity.getGuid(); - } - - - protected String assertProcessIsRegistered(List processQFNames, String queryString) throws Exception { - try { - Thread.sleep(5000); - - LOG.debug("Searching for process with query {}", queryString); - - return assertEntityIsRegistered(ImpalaDataType.IMPALA_PROCESS.getName(), processQFNames, new AssertPredicates() { - @Override - public String assertOnEntities(final List entities) throws Exception { - for (AtlasEntity entity : entities) { - List recentQueries = (List) entity - .getAttribute(ATTRIBUTE_RECENT_QUERIES); - - if (queryString.equalsIgnoreCase(recentQueries.get(0))) - return entity.getGuid(); - - } - - throw new IllegalStateException("Not found entity with matching query"); - } - }); - } catch(Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - protected String assertProcessIsRegistered(String processQFName, String queryString) throws Exception { - try { - Thread.sleep(5000); - - LOG.debug("Searching for process with qualified name {} and query {}", processQFName, queryString); - - return assertEntityIsRegistered(ImpalaDataType.IMPALA_PROCESS.getName(), ATTRIBUTE_QUALIFIED_NAME, processQFName, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - List recentQueries = (List) entity.getAttribute(ATTRIBUTE_RECENT_QUERIES); - - Assert.assertEquals(recentQueries.get(0), lower(queryString)); - } - }); - } catch(Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - private String assertProcessExecutionIsRegistered(AtlasEntity impalaProcess, final String queryString) throws Exception { - try { - Thread.sleep(5000); - - String guid = ""; - List processExecutions = toAtlasObjectIdList(impalaProcess.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS_EXECUTIONS)); - for (AtlasObjectId processExecution : processExecutions) { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2. - getEntityByGuid(processExecution.getGuid()); - - AtlasEntity entity = atlasEntityWithExtInfo.getEntity(); - if (String.valueOf(entity.getAttribute(ATTRIBUTE_QUERY_TEXT)).equals(queryString.toLowerCase().trim())) { - guid = entity.getGuid(); - break; - } - } - - return assertEntityIsRegisteredViaGuid(guid, new AssertPredicate() { - @Override - public void assertOnEntity(final AtlasEntity entity) throws Exception { - String queryText = (String) entity.getAttribute(ATTRIBUTE_QUERY_TEXT); - Assert.assertEquals(queryText, queryString.toLowerCase().trim()); - } - }); - } catch(Exception e) { - LOG.error("Exception : ", e); - throw e; - } - } - - protected AtlasObjectId toAtlasObjectId(Object obj) { - final AtlasObjectId ret; - - if (obj instanceof AtlasObjectId) { - ret = (AtlasObjectId) obj; - } else if (obj instanceof Map) { - ret = new AtlasObjectId((Map) obj); - } else if (obj != null) { - ret = new AtlasObjectId(obj.toString()); // guid - } else { - ret = null; - } - - return ret; - } - - protected List toAtlasObjectIdList(Object obj) { - final List ret; - - if (obj instanceof Collection) { - Collection coll = (Collection) obj; - - ret = new ArrayList<>(coll.size()); - - for (Object item : coll) { - AtlasObjectId objId = toAtlasObjectId(item); - - if (objId != null) { - ret.add(objId); - } - } - } else { - AtlasObjectId objId = toAtlasObjectId(obj); - - if (objId != null) { - ret = new ArrayList<>(1); - - ret.add(objId); - } else { - ret = null; - } - } - - return ret; - } - - - protected String assertDatabaseIsRegistered(String dbName) throws Exception { - return assertDatabaseIsRegistered(dbName, null); - } - - protected String assertDatabaseIsRegistered(String dbName, AssertPredicate assertPredicate) throws Exception { - LOG.debug("Searching for database: {}", dbName); - - String dbQualifiedName = dbName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME; - - dbQualifiedName = dbQualifiedName.toLowerCase(); - - return assertEntityIsRegistered(HIVE_TYPE_DB, REFERENCEABLE_ATTRIBUTE_NAME, dbQualifiedName, assertPredicate); - } - - protected String assertTableIsRegistered(String dbName, String tableName) throws Exception { - return assertTableIsRegistered(dbName, tableName, null, false); - } - - protected String assertTableIsRegistered(String fullTableName) throws Exception { - return assertTableIsRegistered(fullTableName, null, false); - } - - protected String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception { - LOG.debug("Searching for table {}.{}", dbName, tableName); - - String fullTableName = dbName + AtlasImpalaHookContext.QNAME_SEP_ENTITY_NAME + tableName; - - return assertTableIsRegistered(fullTableName, assertPredicate, isTemporary); - } - - protected String assertTableIsRegistered(String fullTableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception { - LOG.debug("Searching for table {}", fullTableName); - - String tableQualifiedName = (fullTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE).toLowerCase() + - CLUSTER_NAME; - - return assertEntityIsRegistered(HIVE_TYPE_TABLE, REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName, - assertPredicate); - } - - protected String createDatabase() throws Exception { - String dbName = dbName(); - - return createDatabase(dbName); - } - - protected String createDatabase(String dbName) throws Exception { - runCommandWithDelay("CREATE DATABASE IF NOT EXISTS " + dbName, 3000); - - return dbName; - } - - protected String createTable(String dbName, String columnsString) throws Exception { - return createTable(dbName, columnsString, false); - } - - protected String createTable(String dbName, String columnsString, boolean isPartitioned) throws Exception { - String tableName = tableName(); - return createTable(dbName, tableName, columnsString, isPartitioned); - } - - protected String createTable(String dbName, String tableName, String columnsString, boolean isPartitioned) throws Exception { - runCommandWithDelay("CREATE TABLE IF NOT EXISTS " + dbName + "." + tableName + " " + columnsString + " comment 'table comment' " + (isPartitioned ? " partitioned by(dt string)" : ""), 3000); - - return dbName + "." + tableName; - } - - protected AtlasEntity validateProcess(String processQFName, String queryString) throws Exception { - String processId = assertProcessIsRegistered(processQFName, queryString); - AtlasEntity processEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - return processEntity; - } - - protected AtlasEntity validateProcess(List processQFNames, String queryString) throws Exception { - String processId = assertProcessIsRegistered(processQFNames, queryString); - AtlasEntity processEntity = atlasClientV2.getEntityByGuid(processId).getEntity(); - - return processEntity; - } - - protected AtlasEntity validateProcessExecution(AtlasEntity impalaProcess, String queryString) throws Exception { - String processExecutionId = assertProcessExecutionIsRegistered(impalaProcess, queryString); - AtlasEntity processExecutionEntity = atlasClientV2.getEntityByGuid(processExecutionId).getEntity(); - return processExecutionEntity; - } - - protected int numberOfProcessExecutions(AtlasEntity impalaProcess) { - return toAtlasObjectIdList(impalaProcess.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS_EXECUTIONS)).size(); - } - - public interface AssertPredicate { - void assertOnEntity(AtlasEntity entity) throws Exception; - } - - public interface AssertPredicates { - String assertOnEntities(List entities) throws Exception; - } - - public interface PredicateWithReturn { - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - String evaluate() throws Exception; - } - - public interface Predicate { - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - void evaluate() throws Exception; - } - - /** - * Wait for a condition, expressed via a {@link Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected void waitFor(int timeout, Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - predicate.evaluate(); - return; - } catch(Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - Thread.sleep(5000); - } - } - } - - /** - * Wait for a condition, expressed via a {@link Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected String waitForWithReturn(int timeout, PredicateWithReturn predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - return predicate.evaluate(); - } catch(Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - Thread.sleep(5000); - } - } - } - - public static String lower(String str) { - if (StringUtils.isEmpty(str)) { - return null; - } - return str.toLowerCase().trim(); - } - - protected void runCommand(String cmd) throws Exception { - runCommandWithDelay(cmd, 0); - } - - protected void runCommandWithDelay(String cmd, int sleepMs) throws Exception { - runCommandWithDelay(driverWithoutContext, cmd, sleepMs); - } - - protected void runCommandWithDelay(Driver driver, String cmd, int sleepMs) throws Exception { - LOG.debug("Running command '{}'", cmd); - CommandProcessorResponse response = driver.run(cmd); - assertEquals(response.getResponseCode(), 0); - if (sleepMs != 0) { - Thread.sleep(sleepMs); - } - } - - protected String random() { - return RandomStringUtils.randomAlphanumeric(10); - } - - protected String tableName() { - return "table_" + random(); - } - protected String dbName() {return "db_" + random();} -} diff --git a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageToolIT.java b/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageToolIT.java deleted file mode 100644 index 53e9b1224a..0000000000 --- a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/ImpalaLineageToolIT.java +++ /dev/null @@ -1,655 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.impala; - -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_QUERY_TEXT; - -import java.util.ArrayList; -import java.util.List; -import org.apache.atlas.impala.hook.AtlasImpalaHookContext; -import org.apache.atlas.impala.hook.ImpalaLineageHook; -import org.apache.atlas.impala.hook.events.BaseImpalaEvent; -import org.apache.atlas.impala.model.ImpalaQuery; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.testng.Assert; -import org.testng.annotations.Test; - -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_DDL_QUERIES; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - -public class ImpalaLineageToolIT extends ImpalaLineageITBase { - public static final long TABLE_CREATE_TIME_SOURCE = 1554750070; - public static final long TABLE_CREATE_TIME = 1554750072; - private static String dir = System.getProperty("user.dir") + "/src/test/resources/"; - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "create view" command lineage - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateViewFromFile() { - // this file contains a single lineage record for "create view". - // It has table vertex with createTime - String IMPALA = dir + "impalaCreateView.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - List lineageList = new ArrayList<>(); - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - try { - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_1"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "view_1"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_3 - String createTime = new Long((long)(1554750072)*1000).toString(); - String processQFName = - "db_1.view_1" + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "create view db_1.view_1 as select count, id from db_1.table_1"; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } catch (Exception e) { - System.out.print("Appending file error"); - } - } - - /** - * This tests is for create view query with extra comment and spaces added in between: - * 1) ImpalaLineageTool can parse one lineage file that contains " create view" command lineage - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateViewWithCommentSpacesFromFile() { - // this file contains a single lineage record for "create view". - // It has table vertex with createTime - String IMPALA = dir + "impalaCreateViewWithCommentSpaces.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - List lineageList = new ArrayList<>(); - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - try { - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_8"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "view_1"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_3 - String createTime = new Long((long)(1554750072)*1000).toString(); - String processQFName = - "db_8.view_1" + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = " create /* comment1 */ view db_8.view_1 as select /* comment2 */ count, id from db_8.table_1"; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } catch (Exception e) { - System.out.print("Appending file error"); - } - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "create view" command lineage, - * but there is no table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateViewNoCreateTimeFromFile() { - // this file contains a single lineage record for "create view". - // there is no table vertex with createTime, which is lineage record generated by Impala - // originally. The table create time is hard-coded before Impala fixes this issue. - String IMPALA = dir + "impalaCreateViewNoCreateTime.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - List lineageList = new ArrayList<>(); - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - try { - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_2"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "view_1"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - Long beforeCreateTime = System.currentTimeMillis() / BaseImpalaEvent.MILLIS_CONVERT_FACTOR; - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - Long afterCreateTime = System.currentTimeMillis() / BaseImpalaEvent.MILLIS_CONVERT_FACTOR; - - String processQFNameWithoutTime = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS; - processQFNameWithoutTime = processQFNameWithoutTime.toLowerCase(); - - List processQFNames = new ArrayList<>(); - String createTime = new Long(beforeCreateTime.longValue()*1000).toString(); - processQFNames.add(processQFNameWithoutTime + createTime); - - if (beforeCreateTime != afterCreateTime) { - createTime = new Long(afterCreateTime.longValue() * 1000).toString(); - processQFNames.add(processQFNameWithoutTime + createTime); - } - - // verify the process is saved in Atlas. the value is from info in IMPALA_4. - // There is no createTime in lineage record, so we don't know the process qualified name - // And can only verify the process is created for the given query. - String queryString = "create view " + dbName + "." + targetTableName + " as select count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFNames, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } catch (Exception e) { - System.out.print("Appending file error"); - } - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "create table as select" command lineage, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateTableAsSelectFromFile() throws Exception { - String IMPALA = dir + "impalaCreateTableAsSelect.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_3"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "table_2"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime = new Long(TABLE_CREATE_TIME*1000).toString(); - String processQFName = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "create table " + dbName + "." + targetTableName + " as select count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } - - /** - * This tests is based on extra comment and spaces adding to create table as select query - * 1) ImpalaLineageTool can parse one lineage file that contains "create table as select" command lineage, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateTableAsSelectWithCommentSpacesFromFile() throws Exception { - String IMPALA = dir + "impalaCreateTableAsSelectWithCommentSpaces.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_9"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "table_2"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime = new Long(TABLE_CREATE_TIME*1000).toString(); - String processQFName = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "create /* Test */ table " + dbName + "." - + targetTableName + " as /* Test */ select count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "alter view as select" command lineage, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testAlterViewAsSelectFromFile() throws Exception { - String IMPALA = dir + "impalaAlterViewAsSelect.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_4"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "view_1"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime = new Long(TABLE_CREATE_TIME*1000).toString(); - String processQFName = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "alter view " + dbName + "." + targetTableName + " as select count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } - - /** - * This tests is for extra comment and spaces present in alter view as select query - * 1) ImpalaLineageTool can parse one lineage file that contains "alter view as select" command lineage, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testAlterViewAsSelectWithCommentSpacesFromFile() throws Exception { - String IMPALA = dir + "impalaAlterViewAsSelectWithCommentSpaces.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_10"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "view_1"; - createTable(dbName, targetTableName,"(count int, id string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime = new Long(TABLE_CREATE_TIME*1000).toString(); - String processQFName = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "alter /* comment1 */ view " + dbName + "." + targetTableName - + " as select /* comment1 */ count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "insert into" command lineage, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testInsertIntoAsSelectFromFile() throws Exception { - String IMPALA = dir + "impalaInsertIntoAsSelect.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_5"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "table_2"; - createTable(dbName, targetTableName,"(count int, id string, int_col int)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime1 = new Long(TABLE_CREATE_TIME_SOURCE*1000).toString(); - String createTime2 = new Long(TABLE_CREATE_TIME*1000).toString(); - String sourceQFName = dbName + "." + sourceTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime1; - String targetQFName = dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime2; - String processQFName = "QUERY:" + sourceQFName.toLowerCase() + "->:INSERT:" + targetQFName.toLowerCase(); - - String queryString = "insert into table " + dbName + "." + targetTableName + " (count, id) select count, id from " + dbName + "." + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 0); - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains multiple "insert into" command lineages, - * there is table vertex with createTime. - * 2) Lineage is sent to Atlas - * 3) Atlas can get these lineages from Atlas - */ - @Test - public void testMultipleInsertIntoAsSelectFromFile() throws Exception { - String IMPALA = dir + "impalaMultipleInsertIntoAsSelect1.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "db_6"; - createDatabase(dbName); - - String sourceTableName = "table_1"; - createTable(dbName, sourceTableName,"(id string, count int)", false); - - String targetTableName = "table_2"; - createTable(dbName, targetTableName,"(count int, id string, int_col int)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // re-run the same lineage record, should have the same process entity and another process execution entity - Thread.sleep(5000); - IMPALA = dir + "impalaMultipleInsertIntoAsSelect2.json"; - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - Thread.sleep(5000); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime1 = new Long(TABLE_CREATE_TIME_SOURCE*1000).toString(); - String createTime2 = new Long(TABLE_CREATE_TIME*1000).toString(); - String sourceQFName = dbName + "." + sourceTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime1; - String targetQFName = dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime2; - String processQFName = "QUERY:" + sourceQFName.toLowerCase() + "->:INSERT:" + targetQFName.toLowerCase(); - - String queryString = "insert into table " + dbName + "." + targetTableName + " (count, id) select count, id from " + dbName + "." + sourceTableName; - queryString = queryString.toLowerCase().trim(); - String queryString2 = queryString; - - Thread.sleep(5000); - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - - List processExecutions = toAtlasObjectIdList(processEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS_EXECUTIONS)); - Assert.assertEquals(processExecutions.size(), 2); - for (AtlasObjectId processExecutionId : processExecutions) { - AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = atlasClientV2. - getEntityByGuid(processExecutionId.getGuid()); - - AtlasEntity processExecutionEntity = atlasEntityWithExtInfo.getEntity(); - String entityQueryText = String.valueOf(processExecutionEntity.getAttribute(ATTRIBUTE_QUERY_TEXT)).toLowerCase().trim(); - if (!(queryString.equalsIgnoreCase(entityQueryText) || queryString2.equalsIgnoreCase(entityQueryText))) { - String errorMessage = String.format("process query text '%s' does not match expected value of '%s' or '%s'", entityQueryText, queryString, queryString2); - Assert.assertTrue(false, errorMessage); - } - } - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 0); - } - - /** - * This tests - * 1) ImpalaLineageTool can parse one lineage file that contains "create table as select" command lineage, - * there is table vertex with createTime. The target vertex's vertexId does not contain db name and table name - * 2) Lineage is sent to Atlas - * 3) Atlas can get this lineage from Atlas - */ - @Test - public void testCreateTableAsSelectVertexIdNoTableNameFromFile() throws Exception { - String IMPALA = dir + "impalaCreateTableAsSelectVertexIdNoTableName.json"; - String IMPALA_WAL = dir + "WALimpala.wal"; - - ImpalaLineageHook impalaLineageHook = new ImpalaLineageHook(); - - // create database and tables to simulate Impala behavior that Impala updates metadata - // to HMS and HMSHook sends the metadata to Atlas, which has to happen before - // Atlas can handle lineage notification - String dbName = "sales_db"; - createDatabase(dbName); - - String sourceTableName = "sales_asia"; - createTable(dbName, sourceTableName,"(id string, name string)", false); - - String targetTableName = "sales_china"; - createTable(dbName, targetTableName,"(id string, name string)", false); - - // process lineage record, and send corresponding notification to Atlas - String[] args = new String[]{"-d", "./", "-p", "impala"}; - ImpalaLineageTool toolInstance = new ImpalaLineageTool(args); - toolInstance.importHImpalaEntities(impalaLineageHook, IMPALA, IMPALA_WAL); - - // verify the process is saved in Atlas - // the value is from info in IMPALA_4. - String createTime = new Long((long)1560885039*1000).toString(); - String processQFName = - dbName + "." + targetTableName + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - String queryString = "create table " + targetTableName + " as select * from " + sourceTableName; - AtlasEntity processEntity1 = validateProcess(processQFName, queryString); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryString); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - String guid = assertTableIsRegistered(dbName, targetTableName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(guid).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/hook/ImpalaLineageHookIT.java b/addons/impala-bridge/src/test/java/org/apache/atlas/impala/hook/ImpalaLineageHookIT.java deleted file mode 100644 index 56d74fee3d..0000000000 --- a/addons/impala-bridge/src/test/java/org/apache/atlas/impala/hook/ImpalaLineageHookIT.java +++ /dev/null @@ -1,165 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.impala.hook; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.apache.atlas.impala.ImpalaLineageITBase; -import org.apache.atlas.impala.hook.events.BaseImpalaEvent; -import org.apache.atlas.impala.model.ImpalaDependencyType; -import org.apache.atlas.impala.model.ImpalaVertexType; -import org.apache.atlas.impala.model.LineageEdge; -import org.apache.atlas.impala.model.ImpalaQuery; -import org.apache.atlas.impala.model.LineageVertex; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.AfterClass; -import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.atlas.impala.hook.events.BaseImpalaEvent.ATTRIBUTE_DDL_QUERIES; -import static org.testng.Assert.assertFalse; - -public class ImpalaLineageHookIT extends ImpalaLineageITBase { - private static final Logger LOG = LoggerFactory.getLogger(ImpalaLineageHookIT.class); - private static ImpalaLineageHook impalaHook; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - impalaHook = new ImpalaLineageHook(); - } - - @AfterClass - public void testClean() { - impalaHook = null; - } - - @Test - public void testCreateView() throws Exception { - // first trigger HMS hook to create related entities - String dbName = createDatabase(); - assertDatabaseIsRegistered(dbName); - - String tableName = createTable(dbName, "(id string, count int)"); - String viewName = createTable(dbName, "(count int, id string)"); - - // then process lineage record to push lineage to Atlas - ImpalaQuery queryObj = new ImpalaQuery(); - List edges = new ArrayList<>(); - List vertices = new ArrayList<>(); - - queryObj.setQueryText("create view " + viewName + " as select count, id from " + tableName); - queryObj.setQueryId("3a441d0c130962f8:7f634aec00000000"); - queryObj.setHash("64ff0425ccdfaada53e3f2fd76f566f7"); - queryObj.setUser("admin"); - queryObj.setTimestamp((long)1554750072); - queryObj.setEndTime((long)1554750554); - - LineageEdge edge1 = new LineageEdge(); - edge1.setSources( Arrays.asList((long)1)); - edge1.setTargets( Arrays.asList((long)0)); - edge1.setEdgeType(ImpalaDependencyType.PROJECTION); - edges.add(edge1); - - LineageEdge edge2 = new LineageEdge(); - edge2.setSources( Arrays.asList((long)3)); - edge2.setTargets( Arrays.asList((long)2)); - edge2.setEdgeType(ImpalaDependencyType.PROJECTION); - edges.add(edge2); - - queryObj.setEdges(edges); - - LineageVertex vertex1 = new LineageVertex(); - vertex1.setId((long)0); - vertex1.setVertexType(ImpalaVertexType.COLUMN); - vertex1.setVertexId(viewName + ".count"); - vertices.add(vertex1); - - LineageVertex vertex2 = new LineageVertex(); - vertex2.setId((long)1); - vertex2.setVertexType(ImpalaVertexType.COLUMN); - vertex2.setVertexId(tableName + ".count"); - vertices.add(vertex2); - - LineageVertex vertex3 = new LineageVertex(); - vertex3.setId((long)2); - vertex3.setVertexType(ImpalaVertexType.COLUMN); - vertex3.setVertexId(viewName + ".id"); - vertices.add(vertex3); - - LineageVertex vertex4 = new LineageVertex(); - vertex4.setId((long)3); - vertex4.setVertexType(ImpalaVertexType.COLUMN); - vertex4.setVertexId(tableName + ".id"); - vertices.add(vertex4); - - LineageVertex vertex5 = new LineageVertex(); - vertex5.setId((long)4); - vertex5.setVertexType(ImpalaVertexType.TABLE); - vertex5.setVertexId(viewName); - vertex5.setCreateTime(System.currentTimeMillis() / 1000); - vertices.add(vertex5); - - LineageVertex vertex6 = new LineageVertex(); - vertex6.setId((long)5); - vertex6.setVertexType(ImpalaVertexType.TABLE); - vertex6.setVertexId(tableName); - vertex6.setCreateTime(System.currentTimeMillis() / 1000); - vertices.add(vertex6); - - queryObj.setVertices(vertices); - - try { - impalaHook.process(queryObj); - String createTime = new Long(BaseImpalaEvent.getTableCreateTime(vertex5)).toString(); - String processQFName = - vertex5.getVertexId() + AtlasImpalaHookContext.QNAME_SEP_METADATA_NAMESPACE + - CLUSTER_NAME + AtlasImpalaHookContext.QNAME_SEP_PROCESS + createTime; - - processQFName = processQFName.toLowerCase(); - - // check process and process execution entities - AtlasEntity processEntity1 = validateProcess(processQFName, queryObj.getQueryText()); - AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, queryObj.getQueryText()); - AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( - BaseImpalaEvent.ATTRIBUTE_PROCESS)); - Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); - Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); - - // check DDL entity - String viewId = assertTableIsRegistered(viewName); - AtlasEntity entity = atlasClientV2.getEntityByGuid(viewId).getEntity(); - List ddlQueries = (List) entity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); - - assertNotNull(ddlQueries); - assertEquals(ddlQueries.size(), 1); - } catch (Exception ex) { - LOG.error("process create_view failed: ", ex); - assertFalse(true); - } - } -} diff --git a/addons/impala-bridge/src/test/resources/atlas-application.properties b/addons/impala-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 898b69c999..0000000000 --- a/addons/impala-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,124 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/impala-bridge/src/test/resources/atlas-log4j.xml b/addons/impala-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100644 index c661d36f81..0000000000 --- a/addons/impala-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/impala-bridge/src/test/resources/hive-site.xml b/addons/impala-bridge/src/test/resources/hive-site.xml deleted file mode 100644 index edd0c54313..0000000000 --- a/addons/impala-bridge/src/test/resources/hive-site.xml +++ /dev/null @@ -1,94 +0,0 @@ - - - - - - - hive.exec.submit.local.task.via.child - false - - - - mapreduce.framework.name - local - - - - fs.default.name - file:/// - - - - hive.metastore.event.listeners - org.apache.atlas.hive.hook.HiveMetastoreHookImpl - - - - hive.support.concurrency - false - - - - hive.metastore.warehouse.dir - ${project.basedir}/target/metastore - - - - javax.jdo.option.ConnectionURL - jdbc:derby:;databaseName=${project.basedir}/target/metastore_db;create=true - - - - atlas.hook.hive.synchronous - true - - - - fs.pfile.impl - org.apache.hadoop.fs.ProxyLocalFileSystem - - - - hive.in.test - true - - - - hive.zookeeper.quorum - localhost:19026 - - - - hive.metastore.schema.verification - false - - - - hive.metastore.disallow.incompatible.col.type.changes - false - - - - datanucleus.schema.autoCreateAll - true - - - - hive.exec.scratchdir - ${project.basedir}/target/scratchdir - - - \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelect.json b/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelect.json deleted file mode 100644 index aca2661e81..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelect.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"alter view db_4.view_1 as select count, id from db_4.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_4.view_1.count", - "metadata": { - "tableName": "db_4.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_4.table_1.count", - "metadata": { - "tableName": "db_4.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_4.view_1.id", - "metadata": { - "tableName": "db_4.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_4.table_1.id", - "metadata": { - "tableName": "db_4.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelectWithCommentSpaces.json b/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelectWithCommentSpaces.json deleted file mode 100644 index 322abb5ea6..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaAlterViewAsSelectWithCommentSpaces.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"alter /* comment1 */ view db_10.view_1 as select /* comment1 */ count, id from db_10.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_10.view_1.count", - "metadata": { - "tableName": "db_10.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_10.table_1.count", - "metadata": { - "tableName": "db_10.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_10.view_1.id", - "metadata": { - "tableName": "db_10.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_10.table_1.id", - "metadata": { - "tableName": "db_10.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelect.json b/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelect.json deleted file mode 100644 index 7bf361c767..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelect.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"create table db_3.table_2 as select count, id from db_3.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_3.table_2.count", - "metadata": { - "tableName": "db_3.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_3.table_1.count", - "metadata": { - "tableName": "db_3.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_3.table_2.id", - "metadata": { - "tableName": "db_3.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_3.table_1.id", - "metadata": { - "tableName": "db_3.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectVertexIdNoTableName.json b/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectVertexIdNoTableName.json deleted file mode 100644 index 0fadcc8933..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectVertexIdNoTableName.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"create table sales_china as select * from sales_asia", - "queryId":"2940d0b242de53ea:e82ba8d300000000", - "hash":"a705a9ec851a5440afca0dfb8df86cd5", - "user":"root", - "timestamp":1560885032, - "endTime":1560885040, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"id", - "metadata":{ - "tableName":"sales_db.sales_china", - "tableCreateTime":1560885039 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"sales_db.sales_asia.id", - "metadata":{ - "tableName":"sales_db.sales_asia", - "tableCreateTime":1560884919 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"name", - "metadata":{ - "tableName":"sales_db.sales_china", - "tableCreateTime":1560885039 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"sales_db.sales_asia.name", - "metadata":{ - "tableName":"sales_db.sales_asia", - "tableCreateTime":1560884919 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectWithCommentSpaces.json b/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectWithCommentSpaces.json deleted file mode 100644 index f588190b09..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateTableAsSelectWithCommentSpaces.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"create /* Test */ table db_9.table_2 as /* Test */ select count, id from db_9.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_9.table_2.count", - "metadata": { - "tableName": "db_9.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_9.table_1.count", - "metadata": { - "tableName": "db_9.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_9.table_2.id", - "metadata": { - "tableName": "db_9.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_9.table_1.id", - "metadata": { - "tableName": "db_9.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateView.json b/addons/impala-bridge/src/test/resources/impalaCreateView.json deleted file mode 100644 index bf55d9f725..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateView.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":"create view db_1.view_1 as select count, id from db_1.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_1.view_1.count", - "metadata": { - "tableName": "db_1.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_1.table_1.count", - "metadata": { - "tableName": "db_1.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_1.view_1.id", - "metadata": { - "tableName": "db_1.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_1.table_1.id", - "metadata": { - "tableName": "db_1.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateViewNoCreateTime.json b/addons/impala-bridge/src/test/resources/impalaCreateViewNoCreateTime.json deleted file mode 100644 index b825a386c9..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateViewNoCreateTime.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "queryText":"create view db_2.view_1 as select count, id from db_2.table_1", - "queryId":"5a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_2.view_1.count" - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_2.table_1.count" - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_2.view_1.id" - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_2.table_1.id" - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaCreateViewWithCommentSpaces.json b/addons/impala-bridge/src/test/resources/impalaCreateViewWithCommentSpaces.json deleted file mode 100644 index e49b6b7c40..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaCreateViewWithCommentSpaces.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "queryText":" create /* comment1 */ view db_8.view_1 as select /* comment2 */ count, id from db_8.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_8.view_1.count", - "metadata": { - "tableName": "db_8.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_8.table_1.count", - "metadata": { - "tableName": "db_8.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_8.view_1.id", - "metadata": { - "tableName": "db_8.view_1", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_8.table_1.id", - "metadata": { - "tableName": "db_8.table_1", - "tableCreateTime": 1554750070 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaInsertIntoAsSelect.json b/addons/impala-bridge/src/test/resources/impalaInsertIntoAsSelect.json deleted file mode 100644 index deb14669c9..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaInsertIntoAsSelect.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "queryText":"insert into table db_5.table_2 (count, id) select count, id from db_5.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - ], - "targets":[ - 4 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_5.table_2.count", - "metadata": { - "tableName": "db_5.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_5.table_1.count", - "metadata": { - "tableName": "db_5.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_5.table_2.id", - "metadata": { - "tableName": "db_5.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_5.table_1.id", - "metadata": { - "tableName": "db_5.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":4, - "vertexType":"COLUMN", - "vertexId":"db_5.table_2.int_col", - "metadata": { - "tableName": "db_5.table_2", - "tableCreateTime": 1554750072 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect1.json b/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect1.json deleted file mode 100644 index 4e2783783f..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect1.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "queryText":"insert into table db_6.table_2 (count, id) select count, id from db_6.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750072, - "endTime":1554750554, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - ], - "targets":[ - 4 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.count", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_6.table_1.count", - "metadata": { - "tableName": "db_6.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.id", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_6.table_1.id", - "metadata": { - "tableName": "db_6.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":4, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.int_col", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - } - ] -} diff --git a/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect2.json b/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect2.json deleted file mode 100644 index ece6535d99..0000000000 --- a/addons/impala-bridge/src/test/resources/impalaMultipleInsertIntoAsSelect2.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "queryText":"insert into table db_6.table_2 (count, id) select count, id from db_6.table_1", - "queryId":"3a441d0c130962f8:7f634aec00000000", - "hash":"64ff0425ccdfaada53e3f2fd76f566f7", - "user":"admin", - "timestamp":1554750082, - "endTime":1554750584, - "edges":[ - { - "sources":[ - 1 - ], - "targets":[ - 0 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - 3 - ], - "targets":[ - 2 - ], - "edgeType":"PROJECTION" - }, - { - "sources":[ - ], - "targets":[ - 4 - ], - "edgeType":"PROJECTION" - } - ], - "vertices":[ - { - "id":0, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.count", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":1, - "vertexType":"COLUMN", - "vertexId":"db_6.table_1.count", - "metadata": { - "tableName": "db_6.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":2, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.id", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - }, - { - "id":3, - "vertexType":"COLUMN", - "vertexId":"db_6.table_1.id", - "metadata": { - "tableName": "db_6.table_1", - "tableCreateTime": 1554750070 - } - }, - { - "id":4, - "vertexType":"COLUMN", - "vertexId":"db_6.table_2.int_col", - "metadata": { - "tableName": "db_6.table_2", - "tableCreateTime": 1554750072 - } - } - ] -} \ No newline at end of file diff --git a/addons/impala-bridge/src/test/resources/users-credentials.properties b/addons/impala-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/impala-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/impala-hook-api/pom.xml b/addons/impala-hook-api/pom.xml deleted file mode 100644 index fd970675a5..0000000000 --- a/addons/impala-hook-api/pom.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - impala-hook-api - Apache Atlas Impala Hook API Module - Apache Atlas Impala Hook API - jar - - \ No newline at end of file diff --git a/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryCompleteContext.java b/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryCompleteContext.java deleted file mode 100644 index dc8e31716b..0000000000 --- a/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryCompleteContext.java +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.impala.hooks; - -import java.util.Objects; - -/** - * {@link QueryCompleteContext} encapsulates immutable information sent from the - * BE to a post-query hook. - */ -public class QueryCompleteContext { - private final String lineageGraph_; - - public QueryCompleteContext(String lineageGraph) { - lineageGraph_ = Objects.requireNonNull(lineageGraph); - } - - /** - * Returns the lineage graph sent from the backend during - * {@link QueryEventHook#onQueryComplete(QueryCompleteContext)}. This graph - * object will generally contain more information than it did when it was - * first constructed in the frontend, because the backend will have filled - * in additional information. - *

- * The returned object is a JSON representation of the lineage graph object - * for the query. The details of the JSON translation are not provided here - * as this is meant to be a temporary feature, and the String format will - * be changed to something more strongly-typed in the future. - *

- * - * @return lineage graph from the query that executed - */ - public String getLineageGraph() { return lineageGraph_; } - - @Override - public String toString() { - return "QueryCompleteContext{" + - "lineageGraph='" + lineageGraph_ + '\'' + - '}'; - } -} diff --git a/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryEventHook.java b/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryEventHook.java deleted file mode 100644 index cd4d2ec080..0000000000 --- a/addons/impala-hook-api/src/main/java/org/apache/impala/hooks/QueryEventHook.java +++ /dev/null @@ -1,116 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.impala.hooks; - -/** - * {@link QueryEventHook} is the interface for implementations that - * can hook into supported events in Impala query execution. - */ -public interface QueryEventHook { - /** - * Hook method invoked when the Impala daemon starts up. - *

- * This method will block completion of daemon startup, so you should - * execute any long-running actions asynchronously. - *

- *

Error-Handling

- *

- * Any {@link Exception} thrown from this method will effectively fail - * Impala startup with an error. Implementations should handle all - * exceptions as gracefully as they can, even if the end result is to - * throw them. - *

- */ - void onImpalaStartup(); - - /** - * Hook method invoked asynchronously when a (qualifying) Impala query - * has executed, but before it has returned. - *

- * This method will not block the invoking or subsequent queries, - * but may block future hook invocations if it runs for too long - *

- *

Error-Handling

- *

- * Any {@link Throwable} thrown from this method will only be caught - * and logged and will not affect the result of any query. Hook implementations - * should make a best-effort to handle their own exceptions. - *

- *

Important:

- *

- * This hook is actually invoked when the query is unregistered, - * which may happen a long time after the query has executed. - * e.g. the following sequence is possible: - *

    - *
  1. User executes query from Hue. - *
  2. User goes home for weekend, leaving Hue tab open in browser - *
  3. If we're lucky, the session timeout expires after some amount of idle time. - *
  4. The query gets unregistered, lineage record gets logged - *
- *

- *

Service Guarantees

- * - * Impala makes the following guarantees about how this method is executed - * with respect to other implementations that may be registered: - * - *

Hooks are executed asynchronously

- * - * All hook execution happens asynchronously of the query that triggered - * them. Hooks may still be executing after the query response has returned - * to the caller. Additionally, hooks may execute concurrently if the - * hook executor thread size is configured appropriately. - * - *

Hook Invocation is in Configuration Order

- * - * The submission of the hook execution tasks occurs in the order - * that the hooks were defined in configuration. This generally means that - * hooks will start executing in order, but there are no guarantees - * about finishing order. - *

- * For example, if configured with {@code query_event_hook_classes=hook1,hook2,hook3}, - * then hook1 will start before hook2, and hook2 will start before hook3. - * If you need to guarantee that hook1 completes before hook2 starts, then - * you should specify {@code query_event_hook_nthreads=1} for serial hook - * execution. - *

- * - *

Hook Execution Blocks

- * - * A hook will block the thread it executes on until it completes. If a hook hangs, - * then the thread also hangs. Impala (currently) will not check for hanging hooks to - * take any action. This means that if you have {@code query_event_hook_nthreads} - * less than the number of hooks, then 1 hook may effectively block others from - * executing. - * - *

Hook Exceptions are non-fatal

- * - * Any exception thrown from this hook method will be logged and ignored. Therefore, - * an exception in 1 hook will not affect another hook (when no shared resources are - * involved). - * - *

Hook Execution may end abruptly at Impala shutdown

- * - * If a hook is still executing when Impala is shutdown, there are no guarantees - * that it will complete execution before being killed. - * - * - * @param context object containing the post execution context - * of the query - */ - void onQueryComplete(QueryCompleteContext context); -} diff --git a/addons/kafka-bridge/pom.xml b/addons/kafka-bridge/pom.xml deleted file mode 100644 index 820b478441..0000000000 --- a/addons/kafka-bridge/pom.xml +++ /dev/null @@ -1,400 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - kafka-bridge - Apache Atlas Kafka Bridge Module - Apache Atlas Kafka Bridge - jar - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-client-v2 - - - - - com.sun.jersey - jersey-bundle - ${jersey.version} - test - - - - org.apache.atlas - atlas-webapp - war - test - - - - org.apache.hadoop - hadoop-hdfs - test - - - javax.servlet - servlet-api - - - - - - org.apache.hadoop - hadoop-annotations - test - - - - org.apache.hadoop - hadoop-minicluster - ${hadoop.version} - test - - - javax.servlet - servlet-api - - - - - - org.testng - testng - - - - org.mockito - mockito-all - - - - org.apache.httpcomponents - httpcore - ${httpcomponents-httpcore.version} - - - - org.eclipse.jetty - jetty-webapp - ${jetty.version} - compile - - - - org.eclipse.jetty - jetty-server - test - - - com.google.guava - guava - 12.0.1 - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - compile - - - javax.servlet - servlet-api - - - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/kafka/atlas-kafka-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey.contribs - jersey-multipart - ${jersey.version} - - - commons-configuration - commons-configuration - ${commons-conf.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - true - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - ${project.basedir}/../../webapp/target/test-classes/ - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - true - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - true - - - - - - - - - maven-resources-plugin - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - diff --git a/addons/kafka-bridge/src/bin/import-kafka.sh b/addons/kafka-bridge/src/bin/import-kafka.sh deleted file mode 100644 index fbc16f0d03..0000000000 --- a/addons/kafka-bridge/src/bin/import-kafka.sh +++ /dev/null @@ -1,150 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. See accompanying LICENSE file. -# - -# resolve links - $0 may be a softlink -PRG="${0}" - -[[ `uname -s` == *"CYGWIN"* ]] && CYGWIN=true - -while [ -h "${PRG}" ]; do - ls=`ls -ld "${PRG}"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "${PRG}"`/"$link" - fi -done - -echo ">>>>> $PRG" - -BASEDIR=`dirname ${PRG}` -BASEDIR=`cd ${BASEDIR}/..;pwd` - -echo ">>>>> $BASEDIR" - -allargs=$@ - -if test -z "${JAVA_HOME}" -then - JAVA_BIN=`which java` - JAR_BIN=`which jar` -else - JAVA_BIN="${JAVA_HOME}/bin/java" - JAR_BIN="${JAVA_HOME}/bin/jar" -fi -export JAVA_BIN - -if [ ! -e "${JAVA_BIN}" ] || [ ! -e "${JAR_BIN}" ]; then - echo "$JAVA_BIN and/or $JAR_BIN not found on the system. Please make sure java and jar commands are available." - exit 1 -fi - -# Construct Atlas classpath using jars from hook/kafka/atlas-kafka-plugin-impl/ directory. -for i in "${BASEDIR}/hook/kafka/atlas-kafka-plugin-impl/"*.jar; do - ATLASCPPATH="${ATLASCPPATH}:$i" -done - -if [ -z "${ATLAS_CONF_DIR}" ] && [ -e /etc/atlas/conf ];then - ATLAS_CONF_DIR=/etc/atlas/conf -fi -ATLASCPPATH=${ATLASCPPATH}:${ATLAS_CONF_DIR} - -# log dir for applications -ATLAS_LOG_DIR="${ATLAS_LOG_DIR:-/var/log/atlas}" -export ATLAS_LOG_DIR -LOGFILE="$ATLAS_LOG_DIR/import-kafka.log" - -TIME=`date +%Y%m%d%H%M%s` - -#Add Kafka conf in classpath -if [ ! -z "$KAFKA_CONF_DIR" ]; then - KAFKA_CONF=$KAFKA_CONF_DIR -elif [ ! -z "$KAFKA_HOME" ]; then - KAFKA_CONF="$KAFKA_HOME/conf" -elif [ -e /etc/kafka/conf ]; then - KAFKA_CONF="/etc/kafka/conf" -else - echo "Could not find a valid KAFKA configuration" - exit 1 -fi - -echo Using Kafka configuration directory "[$KAFKA_CONF]" - - -if [ -f "${KAFKA_CONF}/kafka-env.sh" ]; then - . "${KAFKA_CONF}/kafka-env.sh" -fi - -if [ -z "$KAFKA_HOME" ]; then - if [ -d "${BASEDIR}/../kafka" ]; then - KAFKA_HOME=${BASEDIR}/../kafka - else - echo "Please set KAFKA_HOME to the root of Kafka installation" - exit 1 - fi -fi - -KAFKA_CP="${KAFKA_CONF}" - -for i in "${KAFKA_HOME}/libs/"*.jar; do - KAFKA_CP="${KAFKA_CP}:$i" -done - - -#Add hadoop conf in classpath -if [ ! -z "$HADOOP_CLASSPATH" ]; then - HADOOP_CP=$HADOOP_CLASSPATH -elif [ ! -z "$HADOOP_HOME" ]; then - HADOOP_CP=`$HADOOP_HOME/bin/hadoop classpath` -elif [ $(command -v hadoop) ]; then - HADOOP_CP=`hadoop classpath` - #echo $HADOOP_CP -else - echo "Environment variable HADOOP_CLASSPATH or HADOOP_HOME need to be set" - exit 1 -fi - -CP="${ATLASCPPATH}:${HADOOP_CP}:${KAFKA_CP}" - -# If running in cygwin, convert pathnames and classpath to Windows format. -if [ "${CYGWIN}" == "true" ] -then - ATLAS_LOG_DIR=`cygpath -w ${ATLAS_LOG_DIR}` - LOGFILE=`cygpath -w ${LOGFILE}` - KAFKA_CP=`cygpath -w ${KAFKA_CP}` - HADOOP_CP=`cygpath -w ${HADOOP_CP}` - CP=`cygpath -w -p ${CP}` -fi - -JAVA_PROPERTIES="$ATLAS_OPTS -Datlas.log.dir=$ATLAS_LOG_DIR -Datlas.log.file=import-kafka.log --Dlog4j.configuration=atlas-kafka-import-log4j.xml" -shift - -while [[ ${1} =~ ^\-D ]]; do - JAVA_PROPERTIES="${JAVA_PROPERTIES} ${1}" - shift -done - -echo "Log file for import is $LOGFILE" - -"${JAVA_BIN}" ${JAVA_PROPERTIES} -cp "${CP}" org.apache.atlas.kafka.bridge.KafkaBridge $allargs - -RETVAL=$? -[ $RETVAL -eq 0 ] && echo Kafka Data Model imported successfully!!! -[ $RETVAL -ne 0 ] && echo Failed to import Kafka Data Model!!! - -exit $RETVAL - diff --git a/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/bridge/KafkaBridge.java b/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/bridge/KafkaBridge.java deleted file mode 100644 index f954824436..0000000000 --- a/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/bridge/KafkaBridge.java +++ /dev/null @@ -1,361 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.kafka.bridge; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.kafka.model.KafkaDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.utils.AtlasConfigurationUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.KafkaUtils; -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.security.UserGroupInformation; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.regex.Pattern; - -public class KafkaBridge { - private static final Logger LOG = LoggerFactory.getLogger(KafkaBridge.class); - private static final int EXIT_CODE_SUCCESS = 0; - private static final int EXIT_CODE_FAILED = 1; - private static final String ATLAS_ENDPOINT = "atlas.rest.address"; - private static final String DEFAULT_ATLAS_URL = "http://localhost:21000/"; - private static final String CLUSTER_NAME_KEY = "atlas.cluster.name"; - private static final String KAFKA_METADATA_NAMESPACE = "atlas.metadata.namespace"; - private static final String DEFAULT_CLUSTER_NAME = "primary"; - private static final String ATTRIBUTE_QUALIFIED_NAME = "qualifiedName"; - private static final String DESCRIPTION_ATTR = "description"; - private static final String PARTITION_COUNT = "partitionCount"; - private static final String REPLICATION_FACTOR = "replicationFactor"; - private static final String NAME = "name"; - private static final String URI = "uri"; - private static final String CLUSTERNAME = "clusterName"; - private static final String TOPIC = "topic"; - private static final String FORMAT_KAFKA_TOPIC_QUALIFIED_NAME = "%s@%s"; - - private final List availableTopics; - private final String metadataNamespace; - private final AtlasClientV2 atlasClientV2; - private final KafkaUtils kafkaUtils; - - - public static void main(String[] args) { - int exitCode = EXIT_CODE_FAILED; - AtlasClientV2 atlasClientV2 = null; - KafkaUtils kafkaUtils = null; - - try { - Options options = new Options(); - options.addOption("t","topic", true, "topic"); - options.addOption("f", "filename", true, "filename"); - - CommandLineParser parser = new BasicParser(); - CommandLine cmd = parser.parse(options, args); - String topicToImport = cmd.getOptionValue("t"); - String fileToImport = cmd.getOptionValue("f"); - Configuration atlasConf = ApplicationProperties.get(); - String[] urls = atlasConf.getStringArray(ATLAS_ENDPOINT); - - if (urls == null || urls.length == 0) { - urls = new String[] { DEFAULT_ATLAS_URL }; - } - - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - String[] basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput(); - - atlasClientV2 = new AtlasClientV2(urls, basicAuthUsernamePassword); - } else { - UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - - atlasClientV2 = new AtlasClientV2(ugi, ugi.getShortUserName(), urls); - } - - kafkaUtils = new KafkaUtils(atlasConf); - - KafkaBridge importer = new KafkaBridge(atlasConf, atlasClientV2, kafkaUtils); - - if (StringUtils.isNotEmpty(fileToImport)) { - File f = new File(fileToImport); - - if (f.exists() && f.canRead()) { - BufferedReader br = new BufferedReader(new FileReader(f)); - String line; - - while ((line = br.readLine()) != null) { - topicToImport = line.trim(); - - importer.importTopic(topicToImport); - } - - exitCode = EXIT_CODE_SUCCESS; - } else { - LOG.error("Failed to read the file"); - } - } else { - importer.importTopic(topicToImport); - - exitCode = EXIT_CODE_SUCCESS; - } - } catch(ParseException e) { - LOG.error("Failed to parse arguments. Error: ", e.getMessage()); - - printUsage(); - } catch(Exception e) { - System.out.println("ImportKafkaEntities failed. Please check the log file for the detailed error message"); - - e.printStackTrace(); - - LOG.error("ImportKafkaEntities failed", e); - } finally { - if (atlasClientV2 != null) { - atlasClientV2.close(); - } - - if (kafkaUtils != null) { - kafkaUtils.close(); - } - } - - System.exit(exitCode); - } - - public KafkaBridge(Configuration atlasConf, AtlasClientV2 atlasClientV2, KafkaUtils kafkaUtils) throws Exception { - this.atlasClientV2 = atlasClientV2; - this.metadataNamespace = getMetadataNamespace(atlasConf); - this.kafkaUtils = kafkaUtils; - this.availableTopics = this.kafkaUtils.listAllTopics(); - } - - private String getMetadataNamespace(Configuration config) { - return AtlasConfigurationUtil.getRecentString(config, KAFKA_METADATA_NAMESPACE, getClusterName(config)); - } - - private String getClusterName(Configuration config) { - return config.getString(CLUSTER_NAME_KEY, DEFAULT_CLUSTER_NAME); - } - - public void importTopic(String topicToImport) throws Exception { - List topics = availableTopics; - - if (StringUtils.isNotEmpty(topicToImport)) { - List topics_subset = new ArrayList<>(); - - for (String topic : topics) { - if (Pattern.compile(topicToImport).matcher(topic).matches()) { - topics_subset.add(topic); - } - } - - topics = topics_subset; - } - - if (CollectionUtils.isNotEmpty(topics)) { - for (String topic : topics) { - createOrUpdateTopic(topic); - } - } - } - - @VisibleForTesting - AtlasEntityWithExtInfo createOrUpdateTopic(String topic) throws Exception { - String topicQualifiedName = getTopicQualifiedName(metadataNamespace, topic); - AtlasEntityWithExtInfo topicEntity = findTopicEntityInAtlas(topicQualifiedName); - - if (topicEntity == null) { - System.out.println("Adding Kafka topic " + topic); - LOG.info("Importing Kafka topic: {}", topicQualifiedName); - - AtlasEntity entity = getTopicEntity(topic, null); - - topicEntity = createEntityInAtlas(new AtlasEntityWithExtInfo(entity)); - } else { - System.out.println("Updating Kafka topic " + topic); - LOG.info("Kafka topic {} already exists in Atlas. Updating it..", topicQualifiedName); - - AtlasEntity entity = getTopicEntity(topic, topicEntity.getEntity()); - - topicEntity.setEntity(entity); - - topicEntity = updateEntityInAtlas(topicEntity); - } - - return topicEntity; - } - - @VisibleForTesting - AtlasEntity getTopicEntity(String topic, AtlasEntity topicEntity) throws Exception { - final AtlasEntity ret; - - if (topicEntity == null) { - ret = new AtlasEntity(KafkaDataTypes.KAFKA_TOPIC.getName()); - } else { - ret = topicEntity; - } - - String qualifiedName = getTopicQualifiedName(metadataNamespace, topic); - - ret.setAttribute(ATTRIBUTE_QUALIFIED_NAME, qualifiedName); - ret.setAttribute(CLUSTERNAME, metadataNamespace); - ret.setAttribute(TOPIC, topic); - ret.setAttribute(NAME,topic); - ret.setAttribute(DESCRIPTION_ATTR, topic); - ret.setAttribute(URI, topic); - - try { - ret.setAttribute(PARTITION_COUNT, kafkaUtils.getPartitionCount(topic)); - ret.setAttribute(REPLICATION_FACTOR, kafkaUtils.getReplicationFactor(topic)); - } catch (ExecutionException | InterruptedException e) { - LOG.error("Error while getting partition data for topic :" + topic, e); - - throw new Exception("Error while getting partition data for topic :" + topic, e); - } - - return ret; - } - - @VisibleForTesting - static String getTopicQualifiedName(String metadataNamespace, String topic) { - return String.format(FORMAT_KAFKA_TOPIC_QUALIFIED_NAME, topic.toLowerCase(), metadataNamespace); - } - - private AtlasEntityWithExtInfo findTopicEntityInAtlas(String topicQualifiedName) { - AtlasEntityWithExtInfo ret = null; - - try { - ret = findEntityInAtlas(KafkaDataTypes.KAFKA_TOPIC.getName(), topicQualifiedName); - - clearRelationshipAttributes(ret); - } catch (Exception e) { - ret = null; // entity doesn't exist in Atlas - } - - return ret; - } - - @VisibleForTesting - AtlasEntityWithExtInfo findEntityInAtlas(String typeName, String qualifiedName) throws Exception { - Map attributes = Collections.singletonMap(ATTRIBUTE_QUALIFIED_NAME, qualifiedName); - - return atlasClientV2.getEntityByAttribute(typeName, attributes); - } - - @VisibleForTesting - AtlasEntityWithExtInfo createEntityInAtlas(AtlasEntityWithExtInfo entity) throws Exception { - AtlasEntityWithExtInfo ret = null; - EntityMutationResponse response = atlasClientV2.createEntity(entity); - List entities = response.getCreatedEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - AtlasEntityWithExtInfo getByGuidResponse = atlasClientV2.getEntityByGuid(entities.get(0).getGuid()); - - ret = getByGuidResponse; - - LOG.info("Created {} entity: name={}, guid={}", ret.getEntity().getTypeName(), ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), ret.getEntity().getGuid()); - } - - return ret; - } - - @VisibleForTesting - AtlasEntityWithExtInfo updateEntityInAtlas(AtlasEntityWithExtInfo entity) throws Exception { - AtlasEntityWithExtInfo ret; - EntityMutationResponse response = atlasClientV2.updateEntity(entity); - - if (response != null) { - List entities = response.getUpdatedEntities(); - - if (CollectionUtils.isNotEmpty(entities)) { - AtlasEntityWithExtInfo getByGuidResponse = atlasClientV2.getEntityByGuid(entities.get(0).getGuid()); - - ret = getByGuidResponse; - - LOG.info("Updated {} entity: name={}, guid={} ", ret.getEntity().getTypeName(), ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), ret.getEntity().getGuid()); - } else { - LOG.info("Entity: name={} ", entity.toString() + " not updated as it is unchanged from what is in Atlas" ); - - ret = entity; - } - } else { - LOG.info("Entity: name={} ", entity.toString() + " not updated as it is unchanged from what is in Atlas" ); - - ret = entity; - } - - return ret; - } - - private static void printUsage(){ - System.out.println("Usage 1: import-kafka.sh"); - System.out.println("Usage 2: import-kafka.sh [-t OR --topic ]"); - System.out.println("Usage 3: import-kafka.sh [-f ]" ); - System.out.println(" Format:"); - System.out.println(" topic1 OR topic1 regex"); - System.out.println(" topic2 OR topic2 regex"); - System.out.println(" topic3 OR topic3 regex"); - } - - - private void clearRelationshipAttributes(AtlasEntityWithExtInfo entity) { - if (entity != null) { - clearRelationshipAttributes(entity.getEntity()); - - if (entity.getReferredEntities() != null) { - clearRelationshipAttributes(entity.getReferredEntities().values()); - } - } - } - - private void clearRelationshipAttributes(Collection entities) { - if (entities != null) { - for (AtlasEntity entity : entities) { - clearRelationshipAttributes(entity); - } - } - } - - private void clearRelationshipAttributes(AtlasEntity entity) { - if (entity != null && entity.getRelationshipAttributes() != null) { - entity.getRelationshipAttributes().clear(); - } - } -} diff --git a/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/model/KafkaDataTypes.java b/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/model/KafkaDataTypes.java deleted file mode 100644 index 0f81b4c37a..0000000000 --- a/addons/kafka-bridge/src/main/java/org/apache/atlas/kafka/model/KafkaDataTypes.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.kafka.model; - -/** - * HBASE Data Types for model and bridge. - */ -public enum KafkaDataTypes { - // Classes - KAFKA_TOPIC; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/kafka-bridge/src/main/resources/atlas-kafka-import-log4j.xml b/addons/kafka-bridge/src/main/resources/atlas-kafka-import-log4j.xml deleted file mode 100644 index 3fc2dcf9c3..0000000000 --- a/addons/kafka-bridge/src/main/resources/atlas-kafka-import-log4j.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/kafka-bridge/src/test/java/org/apache/atlas/kafka/bridge/KafkaBridgeTest.java b/addons/kafka-bridge/src/test/java/org/apache/atlas/kafka/bridge/KafkaBridgeTest.java deleted file mode 100644 index f86ceb58fd..0000000000 --- a/addons/kafka-bridge/src/test/java/org/apache/atlas/kafka/bridge/KafkaBridgeTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.kafka.bridge; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.kafka.model.KafkaDataTypes; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.utils.KafkaUtils; -import org.mockito.ArgumentCaptor; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.Collections; - - -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; - -public class KafkaBridgeTest { - - private static final String TEST_TOPIC_NAME = "test_topic"; - public static final AtlasEntity.AtlasEntityWithExtInfo TOPIC_WITH_EXT_INFO = new AtlasEntity.AtlasEntityWithExtInfo( - getTopicEntityWithGuid("0dd466a4-3838-4537-8969-6abb8b9e9185")); - private static final String CLUSTER_NAME = "primary"; - private static final String TOPIC_QUALIFIED_NAME = KafkaBridge.getTopicQualifiedName(CLUSTER_NAME, TEST_TOPIC_NAME); - - @BeforeMethod - public void initializeMocks() { - MockitoAnnotations.initMocks(this); - } - - private static AtlasEntity getTopicEntityWithGuid(String guid) { - AtlasEntity ret = new AtlasEntity(KafkaDataTypes.KAFKA_TOPIC.getName()); - ret.setGuid(guid); - return ret; - } - - @Test - public void testImportTopic() throws Exception { - KafkaUtils mockKafkaUtils = mock(KafkaUtils.class); - when(mockKafkaUtils.listAllTopics()) - .thenReturn(Collections.singletonList(TEST_TOPIC_NAME)); - when(mockKafkaUtils.getPartitionCount(TEST_TOPIC_NAME)) - .thenReturn(3); - - EntityMutationResponse mockCreateResponse = mock(EntityMutationResponse.class); - AtlasEntityHeader mockAtlasEntityHeader = mock(AtlasEntityHeader.class); - when(mockAtlasEntityHeader.getGuid()).thenReturn(TOPIC_WITH_EXT_INFO.getEntity().getGuid()); - when(mockCreateResponse.getCreatedEntities()) - .thenReturn(Collections.singletonList(mockAtlasEntityHeader)); - - AtlasClientV2 mockAtlasClientV2 = mock(AtlasClientV2.class); - when(mockAtlasClientV2.createEntity(any())) - .thenReturn(mockCreateResponse); - when(mockAtlasClientV2.getEntityByGuid(TOPIC_WITH_EXT_INFO.getEntity().getGuid())) - .thenReturn(TOPIC_WITH_EXT_INFO); - - KafkaBridge bridge = new KafkaBridge(ApplicationProperties.get(), mockAtlasClientV2, mockKafkaUtils); - bridge.importTopic(TEST_TOPIC_NAME); - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(AtlasEntity.AtlasEntityWithExtInfo.class); - verify(mockAtlasClientV2).createEntity(argumentCaptor.capture()); - AtlasEntity.AtlasEntityWithExtInfo entity = argumentCaptor.getValue(); - assertEquals(entity.getEntity().getAttribute("qualifiedName"), TOPIC_QUALIFIED_NAME); - } - - @Test - public void testCreateTopic() throws Exception { - KafkaUtils mockKafkaUtils = mock(KafkaUtils.class); - when(mockKafkaUtils.listAllTopics()) - .thenReturn(Collections.singletonList(TEST_TOPIC_NAME)); - when(mockKafkaUtils.getPartitionCount(TEST_TOPIC_NAME)) - .thenReturn(3); - - EntityMutationResponse mockCreateResponse = mock(EntityMutationResponse.class); - AtlasEntityHeader mockAtlasEntityHeader = mock(AtlasEntityHeader.class); - when(mockAtlasEntityHeader.getGuid()).thenReturn(TOPIC_WITH_EXT_INFO.getEntity().getGuid()); - when(mockCreateResponse.getCreatedEntities()) - .thenReturn(Collections.singletonList(mockAtlasEntityHeader)); - - AtlasClientV2 mockAtlasClientV2 = mock(AtlasClientV2.class); - when(mockAtlasClientV2.createEntity(any())) - .thenReturn(mockCreateResponse); - when(mockAtlasClientV2.getEntityByGuid(TOPIC_WITH_EXT_INFO.getEntity().getGuid())) - .thenReturn(TOPIC_WITH_EXT_INFO); - - KafkaBridge bridge = new KafkaBridge(ApplicationProperties.get(), mockAtlasClientV2, mockKafkaUtils); - AtlasEntity.AtlasEntityWithExtInfo ret = bridge.createOrUpdateTopic(TEST_TOPIC_NAME); - - assertEquals(TOPIC_WITH_EXT_INFO, ret); - } - - @Test - public void testUpdateTopic() throws Exception { - KafkaUtils mockKafkaUtils = mock(KafkaUtils.class); - when(mockKafkaUtils.listAllTopics()) - .thenReturn(Collections.singletonList(TEST_TOPIC_NAME)); - when(mockKafkaUtils.getPartitionCount(TEST_TOPIC_NAME)) - .thenReturn(3); - - EntityMutationResponse mockUpdateResponse = mock(EntityMutationResponse.class); - AtlasEntityHeader mockAtlasEntityHeader = mock(AtlasEntityHeader.class); - when(mockAtlasEntityHeader.getGuid()).thenReturn(TOPIC_WITH_EXT_INFO.getEntity().getGuid()); - when(mockUpdateResponse.getUpdatedEntities()) - .thenReturn(Collections.singletonList(mockAtlasEntityHeader)); - - AtlasClientV2 mockAtlasClientV2 = mock(AtlasClientV2.class); - when(mockAtlasClientV2.getEntityByAttribute(eq(KafkaDataTypes.KAFKA_TOPIC.getName()), any())) - .thenReturn(TOPIC_WITH_EXT_INFO); - when(mockAtlasClientV2.updateEntity(any())) - .thenReturn(mockUpdateResponse); - when(mockAtlasClientV2.getEntityByGuid(TOPIC_WITH_EXT_INFO.getEntity().getGuid())) - .thenReturn(TOPIC_WITH_EXT_INFO); - - KafkaBridge bridge = new KafkaBridge(ApplicationProperties.get(), mockAtlasClientV2, mockKafkaUtils); - AtlasEntity.AtlasEntityWithExtInfo ret = bridge.createOrUpdateTopic(TEST_TOPIC_NAME); - - assertEquals(TOPIC_WITH_EXT_INFO, ret); - } -} \ No newline at end of file diff --git a/addons/kafka-bridge/src/test/resources/atlas-application.properties b/addons/kafka-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 4a12cf6c83..0000000000 --- a/addons/kafka-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,125 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase -atlas.graph.index.search.solr.wait-searcher=true - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2 - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/kafka-bridge/src/test/resources/atlas-log4j.xml b/addons/kafka-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 0b9b36e36b..0000000000 --- a/addons/kafka-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/kafka-bridge/src/test/resources/users-credentials.properties b/addons/kafka-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/kafka-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/sqoop-bridge-shim/pom.xml b/addons/sqoop-bridge-shim/pom.xml deleted file mode 100755 index 533a1f1a8c..0000000000 --- a/addons/sqoop-bridge-shim/pom.xml +++ /dev/null @@ -1,47 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - sqoop-bridge-shim - Apache Atlas Sqoop Bridge Shim Module - Apache Atlas Sqoop Bridge Shim - jar - - - - - org.apache.atlas - atlas-plugin-classloader - - - - org.apache.sqoop - sqoop - ${sqoop.version} - compile - - - diff --git a/addons/sqoop-bridge-shim/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java b/addons/sqoop-bridge-shim/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java deleted file mode 100644 index 08b858728e..0000000000 --- a/addons/sqoop-bridge-shim/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.sqoop.hook; - - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.sqoop.SqoopJobDataPublisher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Sqoop hook used for atlas entity registration. - */ -public class SqoopHook extends SqoopJobDataPublisher { - private static final Logger LOG = LoggerFactory.getLogger(SqoopHook.class); - - private static final String ATLAS_PLUGIN_TYPE = "sqoop"; - private static final String ATLAS_SQOOP_HOOK_IMPL_CLASSNAME = "org.apache.atlas.sqoop.hook.SqoopHook"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private SqoopJobDataPublisher sqoopHookImpl = null; - - public SqoopHook() { - this.initialize(); - } - - @Override - public void publish(SqoopJobDataPublisher.Data data) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> SqoopHook.run({})", data); - } - - try { - activatePluginClassLoader(); - sqoopHookImpl.publish(data); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== SqoopHook.run({})", data); - } - } - - private void initialize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveHook.initialize()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = (Class) Class - .forName(ATLAS_SQOOP_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - sqoopHookImpl = cls.newInstance(); - } catch (Exception excp) { - LOG.error("Error instantiating Atlas hook implementation", excp); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveHook.initialize()"); - } - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} diff --git a/addons/sqoop-bridge/pom.xml b/addons/sqoop-bridge/pom.xml deleted file mode 100644 index 021e93f56d..0000000000 --- a/addons/sqoop-bridge/pom.xml +++ /dev/null @@ -1,472 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - sqoop-bridge - Apache Atlas Sqoop Bridge Module - Apache Atlas Sqoop Bridge - jar - - - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - - - org.apache.hive - hive-metastore - ${hive.version} - provided - - - org.mortbay.jetty - * - - - com.github.stephenc.findbugs - findbugs-annotations - - - - - - - org.apache.hive - hive-exec - ${hive.version} - provided - - - - org.apache.hive - hive-cli - ${hive.version} - test - - - javax.servlet - * - - - javax.ws.rs - * - - - org.eclipse.jetty.aggregate - * - - - javax.servlet - servlet-api - - - - - - - org.apache.sqoop - sqoop - ${sqoop.version} - compile - - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-notification - - - - org.apache.atlas - hive-bridge - - - - org.apache.hive - hive-common - ${hive.version} - - - - - org.apache.atlas - atlas-webapp - war - test - - - - org.apache.hadoop - hadoop-client - - - javax.servlet - servlet-api - - - - - - org.apache.hadoop - hadoop-annotations - - - - org.testng - testng - - - - org.eclipse.jetty - jetty-server - test - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/sqoop/atlas-sqoop-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - hive-bridge - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/sqoop - false - false - true - - - ${project.groupId} - sqoop-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - - / - ${project.basedir}/../../webapp/src/test/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - true - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/0010-base_model.json - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - - diff --git a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java deleted file mode 100644 index 0a8cb96675..0000000000 --- a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java +++ /dev/null @@ -1,258 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.sqoop.hook; - - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasConstants; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.hook.AtlasHookException; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.sqoop.model.SqoopDataTypes; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AtlasConfigurationUtil; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang3.StringUtils; -import org.apache.sqoop.SqoopJobDataPublisher; -import org.apache.sqoop.util.ImportException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.Map; -import java.util.HashMap; -import java.util.Properties; -import java.util.List; -import java.util.Date; - -import static org.apache.atlas.repository.Constants.SQOOP_SOURCE; - -/** - * AtlasHook sends lineage information to the AtlasSever. - */ -public class SqoopHook extends SqoopJobDataPublisher { - private static final Logger LOG = LoggerFactory.getLogger(SqoopHook.class); - - public static final String CLUSTER_NAME_KEY = "atlas.cluster.name"; - public static final String ATLAS_METADATA_NAMESPACE = "atlas.metadata.namespace"; - public static final String DEFAULT_CLUSTER_NAME = "primary"; - - public static final String USER = "userName"; - public static final String DB_STORE_TYPE = "dbStoreType"; - public static final String DB_STORE_USAGE = "storeUse"; - public static final String SOURCE = "source"; - public static final String DESCRIPTION = "description"; - public static final String STORE_URI = "storeUri"; - public static final String OPERATION = "operation"; - public static final String START_TIME = "startTime"; - public static final String END_TIME = "endTime"; - public static final String CMD_LINE_OPTS = "commandlineOpts"; - public static final String INPUTS = "inputs"; - public static final String OUTPUTS = "outputs"; - public static final String ATTRIBUTE_DB = "db"; - - public static final String RELATIONSHIP_HIVE_TABLE_DB = "hive_table_db"; - public static final String RELATIONSHIP_DATASET_PROCESS_INPUTS = "dataset_process_inputs"; - public static final String RELATIONSHIP_PROCESS_DATASET_OUTPUTS = "process_dataset_outputs"; - - private static final AtlasHookImpl atlasHook; - - static { - org.apache.hadoop.conf.Configuration.addDefaultResource("sqoop-site.xml"); - - atlasHook = new AtlasHookImpl(); - } - - @Override - public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException { - try { - Configuration atlasProperties = ApplicationProperties.get(); - String metadataNamespace = - AtlasConfigurationUtil.getRecentString(atlasProperties, ATLAS_METADATA_NAMESPACE, getClusterName(atlasProperties)); - - AtlasEntity entDbStore = toSqoopDBStoreEntity(data); - AtlasEntity entHiveDb = toHiveDatabaseEntity(metadataNamespace, data.getHiveDB()); - AtlasEntity entHiveTable = data.getHiveTable() != null ? toHiveTableEntity(entHiveDb, data.getHiveTable()) : null; - AtlasEntity entProcess = toSqoopProcessEntity(entDbStore, entHiveDb, entHiveTable, data, metadataNamespace); - - - AtlasEntitiesWithExtInfo entities = new AtlasEntitiesWithExtInfo(entProcess); - - entities.addReferredEntity(entDbStore); - entities.addReferredEntity(entHiveDb); - if (entHiveTable != null) { - entities.addReferredEntity(entHiveTable); - } - - HookNotification message = new EntityCreateRequestV2(AtlasHook.getUser(), entities); - - atlasHook.sendNotification(message); - } catch(Exception e) { - LOG.error("SqoopHook.publish() failed", e); - - throw new AtlasHookException("SqoopHook.publish() failed.", e); - } - } - - private String getClusterName(Configuration config) { - return config.getString(CLUSTER_NAME_KEY, DEFAULT_CLUSTER_NAME); - } - - private AtlasEntity toHiveDatabaseEntity(String metadataNamespace, String dbName) { - AtlasEntity entHiveDb = new AtlasEntity(HiveDataTypes.HIVE_DB.getName()); - String qualifiedName = HiveMetaStoreBridge.getDBQualifiedName(metadataNamespace, dbName); - - entHiveDb.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, metadataNamespace); - entHiveDb.setAttribute(AtlasClient.NAME, dbName); - entHiveDb.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); - - return entHiveDb; - } - - private AtlasEntity toHiveTableEntity(AtlasEntity entHiveDb, String tableName) { - AtlasEntity entHiveTable = new AtlasEntity(HiveDataTypes.HIVE_TABLE.getName()); - String qualifiedName = HiveMetaStoreBridge.getTableQualifiedName((String)entHiveDb.getAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE), (String)entHiveDb.getAttribute(AtlasClient.NAME), tableName); - - entHiveTable.setAttribute(AtlasClient.NAME, tableName.toLowerCase()); - entHiveTable.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); - entHiveTable.setRelationshipAttribute(ATTRIBUTE_DB, AtlasTypeUtil.getAtlasRelatedObjectId(entHiveDb, RELATIONSHIP_HIVE_TABLE_DB)); - - return entHiveTable; - } - - private AtlasEntity toSqoopDBStoreEntity(SqoopJobDataPublisher.Data data) throws ImportException { - String table = data.getStoreTable(); - String query = data.getStoreQuery(); - - if (StringUtils.isBlank(table) && StringUtils.isBlank(query)) { - throw new ImportException("Both table and query cannot be empty for DBStoreInstance"); - } - - String usage = table != null ? "TABLE" : "QUERY"; - String source = table != null ? table : query; - String name = getSqoopDBStoreName(data); - - AtlasEntity entDbStore = new AtlasEntity(SqoopDataTypes.SQOOP_DBDATASTORE.getName()); - - entDbStore.setAttribute(AtlasClient.NAME, name); - entDbStore.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - entDbStore.setAttribute(SqoopHook.DB_STORE_TYPE, data.getStoreType()); - entDbStore.setAttribute(SqoopHook.DB_STORE_USAGE, usage); - entDbStore.setAttribute(SqoopHook.STORE_URI, data.getUrl()); - entDbStore.setAttribute(SqoopHook.SOURCE, source); - entDbStore.setAttribute(SqoopHook.DESCRIPTION, ""); - entDbStore.setAttribute(AtlasClient.OWNER, data.getUser()); - - return entDbStore; - } - - private AtlasEntity toSqoopProcessEntity(AtlasEntity entDbStore, AtlasEntity entHiveDb, AtlasEntity entHiveTable, - SqoopJobDataPublisher.Data data, String metadataNamespace) { - AtlasEntity entProcess = new AtlasEntity(SqoopDataTypes.SQOOP_PROCESS.getName()); - String sqoopProcessName = getSqoopProcessName(data, metadataNamespace); - Map sqoopOptionsMap = new HashMap<>(); - Properties options = data.getOptions(); - - for (Object k : options.keySet()) { - sqoopOptionsMap.put((String)k, (String) options.get(k)); - } - - entProcess.setAttribute(AtlasClient.NAME, sqoopProcessName); - entProcess.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, sqoopProcessName); - entProcess.setAttribute(SqoopHook.OPERATION, data.getOperation()); - - List sqoopObjects = Collections.singletonList(AtlasTypeUtil.getAtlasObjectId(entDbStore)); - List hiveObjects = Collections.singletonList(AtlasTypeUtil.getAtlasObjectId(entHiveTable != null ? entHiveTable : entHiveDb)); - - if (isImportOperation(data)) { - entProcess.setRelationshipAttribute(SqoopHook.INPUTS, AtlasTypeUtil.getAtlasRelatedObjectIdList(sqoopObjects, RELATIONSHIP_DATASET_PROCESS_INPUTS)); - entProcess.setRelationshipAttribute(SqoopHook.OUTPUTS, AtlasTypeUtil.getAtlasRelatedObjectIdList(hiveObjects, RELATIONSHIP_PROCESS_DATASET_OUTPUTS)); - } else { - entProcess.setRelationshipAttribute(SqoopHook.INPUTS, AtlasTypeUtil.getAtlasRelatedObjectIdList(hiveObjects, RELATIONSHIP_DATASET_PROCESS_INPUTS)); - entProcess.setRelationshipAttribute(SqoopHook.OUTPUTS, AtlasTypeUtil.getAtlasRelatedObjectIdList(sqoopObjects, RELATIONSHIP_PROCESS_DATASET_OUTPUTS)); - } - - entProcess.setAttribute(SqoopHook.USER, data.getUser()); - entProcess.setAttribute(SqoopHook.START_TIME, new Date(data.getStartTime())); - entProcess.setAttribute(SqoopHook.END_TIME, new Date(data.getEndTime())); - entProcess.setAttribute(SqoopHook.CMD_LINE_OPTS, sqoopOptionsMap); - - return entProcess; - } - - private boolean isImportOperation(SqoopJobDataPublisher.Data data) { - return data.getOperation().toLowerCase().equals("import"); - } - - static String getSqoopProcessName(Data data, String metadataNamespace) { - StringBuilder name = new StringBuilder(String.format("sqoop %s --connect %s", data.getOperation(), data.getUrl())); - - if (StringUtils.isNotEmpty(data.getHiveTable())) { - name.append(" --table ").append(data.getStoreTable()); - } else { - name.append(" --database ").append(data.getHiveDB()); - } - - if (StringUtils.isNotEmpty(data.getStoreQuery())) { - name.append(" --query ").append(data.getStoreQuery()); - } - - if (data.getHiveTable() != null) { - name.append(String.format(" --hive-%s --hive-database %s --hive-table %s --hive-cluster %s", data.getOperation(), data.getHiveDB().toLowerCase(), data.getHiveTable().toLowerCase(), metadataNamespace)); - } else { - name.append(String.format("--hive-%s --hive-database %s --hive-cluster %s", data.getOperation(), data.getHiveDB(), metadataNamespace)); - } - - return name.toString(); - } - - static String getSqoopDBStoreName(SqoopJobDataPublisher.Data data) { - StringBuilder name = new StringBuilder(String.format("%s --url %s", data.getStoreType(), data.getUrl())); - - if (StringUtils.isNotEmpty(data.getHiveTable())) { - name.append(" --table ").append(data.getStoreTable()); - } else { - name.append(" --database ").append(data.getHiveDB()); - } - - if (StringUtils.isNotEmpty(data.getStoreQuery())) { - name.append(" --query ").append(data.getStoreQuery()); - } - - return name.toString(); - } - - private static class AtlasHookImpl extends AtlasHook { - - public String getMessageSource() { - return SQOOP_SOURCE; - } - - public void sendNotification(HookNotification notification) { - super.notifyEntities(Collections.singletonList(notification), null); - } - } -} diff --git a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/model/SqoopDataTypes.java b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/model/SqoopDataTypes.java deleted file mode 100644 index e71220ab90..0000000000 --- a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/model/SqoopDataTypes.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.sqoop.model; - -/** - * Hive Data Types for model and bridge. - */ -public enum SqoopDataTypes { - - // Classes - SQOOP_DBDATASTORE, - SQOOP_PROCESS, - ; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/sqoop-bridge/src/test/java/org/apache/atlas/sqoop/hook/SqoopHookIT.java b/addons/sqoop-bridge/src/test/java/org/apache/atlas/sqoop/hook/SqoopHookIT.java deleted file mode 100644 index 71a8779dba..0000000000 --- a/addons/sqoop-bridge/src/test/java/org/apache/atlas/sqoop/hook/SqoopHookIT.java +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.sqoop.hook; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hive.model.HiveDataTypes; -import org.apache.atlas.sqoop.model.SqoopDataTypes; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.commons.configuration.Configuration; -import org.apache.sqoop.SqoopJobDataPublisher; -import org.slf4j.Logger; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.Properties; - -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -public class SqoopHookIT { - public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(SqoopHookIT.class); - private static final String CLUSTER_NAME = "primary"; - public static final String DEFAULT_DB = "default"; - private static final int MAX_WAIT_TIME = 2000; - private AtlasClient atlasClient; - - @BeforeClass - public void setUp() throws Exception { - //Set-up sqoop session - Configuration configuration = ApplicationProperties.get(); - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClient = new AtlasClient(configuration.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT), new String[]{"admin", "admin"}); - } else { - atlasClient = new AtlasClient(configuration.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT)); - } - } - - @Test - public void testSqoopImport() throws Exception { - SqoopJobDataPublisher.Data d = new SqoopJobDataPublisher.Data("import", "jdbc:mysql:///localhost/db", - "mysqluser", "mysql", "myTable", null, "default", "hiveTable", new Properties(), - System.currentTimeMillis() - 100, System.currentTimeMillis()); - SqoopHook hook = new SqoopHook(); - hook.publish(d); - Thread.sleep(1000); - String storeName = SqoopHook.getSqoopDBStoreName(d); - assertDBStoreIsRegistered(storeName); - String name = SqoopHook.getSqoopProcessName(d, CLUSTER_NAME); - assertSqoopProcessIsRegistered(name); - assertHiveTableIsRegistered(DEFAULT_DB, "hiveTable"); - } - - @Test - public void testSqoopExport() throws Exception { - SqoopJobDataPublisher.Data d = new SqoopJobDataPublisher.Data("export", "jdbc:mysql:///localhost/db", - "mysqluser", "mysql", "myTable", null, "default", "hiveTable", new Properties(), - System.currentTimeMillis() - 100, System.currentTimeMillis()); - SqoopHook hook = new SqoopHook(); - hook.publish(d); - Thread.sleep(1000); - String storeName = SqoopHook.getSqoopDBStoreName(d); - assertDBStoreIsRegistered(storeName); - String name = SqoopHook.getSqoopProcessName(d, CLUSTER_NAME); - assertSqoopProcessIsRegistered(name); - assertHiveTableIsRegistered(DEFAULT_DB, "hiveTable"); - } - - private String assertDBStoreIsRegistered(String storeName) throws Exception { - LOG.debug("Searching for db store {}", storeName); - return assertEntityIsRegistered(SqoopDataTypes.SQOOP_DBDATASTORE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, storeName, null); - } - - private String assertHiveTableIsRegistered(String dbName, String tableName) throws Exception { - LOG.debug("Searching for table {}.{}", dbName, tableName); - return assertEntityIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableName), null); - } - - private String assertSqoopProcessIsRegistered(String processName) throws Exception { - LOG.debug("Searching for sqoop process {}", processName); - return assertEntityIsRegistered(SqoopDataTypes.SQOOP_PROCESS.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, processName, null); - } - - protected String assertEntityIsRegistered(final String typeName, final String property, final String value, - final AssertPredicate assertPredicate) throws Exception { - waitFor(80000, new Predicate() { - @Override - public void evaluate() throws Exception { - Referenceable entity = atlasClient.getEntity(typeName, property, value); - assertNotNull(entity); - if (assertPredicate != null) { - assertPredicate.assertOnEntity(entity); - } - } - }); - Referenceable entity = atlasClient.getEntity(typeName, property, value); - return entity.getId()._getId(); - } - - public interface Predicate { - void evaluate() throws Exception; - } - - public interface AssertPredicate { - void assertOnEntity(Referenceable entity) throws Exception; - } - - protected void waitFor(int timeout, Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - long mustEnd = System.currentTimeMillis() + timeout; - - while (true) { - try { - predicate.evaluate(); - return; - } catch(Error | Exception e) { - if (System.currentTimeMillis() >= mustEnd) { - fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e); - } - LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e); - Thread.sleep(5000); - } - } - } -} diff --git a/addons/sqoop-bridge/src/test/resources/atlas-application.properties b/addons/sqoop-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index 898b69c999..0000000000 --- a/addons/sqoop-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,124 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/sqoop-bridge/src/test/resources/atlas-log4j.xml b/addons/sqoop-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 262a710f7a..0000000000 --- a/addons/sqoop-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/sqoop-bridge/src/test/resources/hive-site.xml b/addons/sqoop-bridge/src/test/resources/hive-site.xml deleted file mode 100644 index f058c2edc2..0000000000 --- a/addons/sqoop-bridge/src/test/resources/hive-site.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - - hive.exec.post.hooks - org.apache.atlas.hive.hook.HiveHook - - - - hive.support.concurrency - false - - - - hive.metastore.warehouse.dir - ${user.dir}/target/metastore - - - - javax.jdo.option.ConnectionURL - jdbc:derby:${user.dir}/target/metastore_db;create=true - - - - atlas.hook.hive.synchronous - true - - - - fs.pfile.impl - org.apache.hadoop.fs.ProxyLocalFileSystem - - \ No newline at end of file diff --git a/addons/sqoop-bridge/src/test/resources/sqoop-site.xml b/addons/sqoop-bridge/src/test/resources/sqoop-site.xml deleted file mode 100644 index a63e7e4e8d..0000000000 --- a/addons/sqoop-bridge/src/test/resources/sqoop-site.xml +++ /dev/null @@ -1,190 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - atlas.rest.address - http://localhost:21000/ - - - - sqoop.job.data.publish.class - org.apache.atlas.sqoop.hook.SqoopHook - - - - atlas.cluster.name - primary - - diff --git a/addons/sqoop-bridge/src/test/resources/users-credentials.properties b/addons/sqoop-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/sqoop-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/addons/storm-bridge-shim/pom.xml b/addons/storm-bridge-shim/pom.xml deleted file mode 100755 index f4e75927f0..0000000000 --- a/addons/storm-bridge-shim/pom.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - storm-bridge-shim - Apache Atlas Storm Bridge Shim Module - Apache Atlas Storm Bridge Shim - jar - - - - - org.apache.atlas - atlas-plugin-classloader - - - - - org.apache.storm - storm-core - ${storm.version} - jar - - - org.slf4j - slf4j-log4j12 - - - javax.servlet - servlet-api - - - io.dropwizard.metrics - metrics-core - - - io.dropwizard.metrics - metrics-graphite - - - io.dropwizard.metrics - metrics-ganglia - - - - - - - io.dropwizard.metrics - metrics-core - ${dropwizard-metrics} - - - io.dropwizard.metrics - metrics-graphite - ${dropwizard-metrics} - - - io.dropwizard.metrics - metrics-ganglia - ${dropwizard-metrics} - - - - diff --git a/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java b/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java deleted file mode 100644 index 0ce7633aa2..0000000000 --- a/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.hook; - - -import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader; -import org.apache.storm.ISubmitterHook; -import org.apache.storm.generated.StormTopology; -import org.apache.storm.generated.TopologyInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - -/** - * Storm hook used for atlas entity registration. - */ -public class StormAtlasHook implements ISubmitterHook { - private static final Logger LOG = LoggerFactory.getLogger(StormAtlasHook.class); - - - private static final String ATLAS_PLUGIN_TYPE = "storm"; - private static final String ATLAS_STORM_HOOK_IMPL_CLASSNAME = "org.apache.atlas.storm.hook.StormAtlasHook"; - - private AtlasPluginClassLoader atlasPluginClassLoader = null; - private ISubmitterHook stormHook = null; - - - public StormAtlasHook() { - this.initialize(); - } - - @Override - public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology) - throws IllegalAccessException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology); - } - - try { - activatePluginClassLoader(); - stormHook.notify(topologyInfo, stormConf, stormTopology); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology); - } - } - - private void initialize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> StormAtlasHook.initialize()"); - } - - try { - atlasPluginClassLoader = AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass()); - - @SuppressWarnings("unchecked") - Class cls = (Class) Class - .forName(ATLAS_STORM_HOOK_IMPL_CLASSNAME, true, atlasPluginClassLoader); - - activatePluginClassLoader(); - - stormHook = cls.newInstance(); - } catch (Exception excp) { - LOG.error("Error instantiating Atlas hook implementation", excp); - } finally { - deactivatePluginClassLoader(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== StormAtlasHook.initialize()"); - } - } - - private void activatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.activate(); - } - } - - private void deactivatePluginClassLoader() { - if (atlasPluginClassLoader != null) { - atlasPluginClassLoader.deactivate(); - } - } -} diff --git a/addons/storm-bridge/pom.xml b/addons/storm-bridge/pom.xml deleted file mode 100644 index 77dce71536..0000000000 --- a/addons/storm-bridge/pom.xml +++ /dev/null @@ -1,591 +0,0 @@ - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - storm-bridge - Apache Atlas Storm Bridge Module - Apache Atlas Storm Bridge - jar - - - - - org.apache.atlas - atlas-client-v1 - - - - org.apache.atlas - atlas-client-v2 - - - - org.apache.atlas - atlas-notification - - - - org.apache.atlas - hive-bridge - - - - org.apache.hive - hive-exec - ${hive.version} - - - javax.servlet - * - - - provided - - - - org.apache.hbase - hbase-common - ${hbase.version} - - - javax.servlet - servlet-api - - - org.eclipse.jetty - * - - - - - - org.mockito - mockito-all - - - - - - org.apache.storm - storm-core - ${storm.version} - - - org.slf4j - log4j-over-slf4j - - - javax.servlet - servlet-api - - - io.dropwizard.metrics - metrics-core - - - io.dropwizard.metrics - metrics-graphite - - - io.dropwizard.metrics - metrics-ganglia - - - - - - - io.dropwizard.metrics - metrics-core - ${dropwizard-metrics} - - - io.dropwizard.metrics - metrics-graphite - ${dropwizard-metrics} - - - io.dropwizard.metrics - metrics-ganglia - ${dropwizard-metrics} - - - - - org.testng - testng - - - - org.eclipse.jetty - jetty-server - test - - - - org.eclipse.jetty - jetty-util - ${jetty.version} - test - - - - commons-collections - commons-collections - - - - - - org.apache.atlas - atlas-graphdb-impls - pom - test - - - - org.apache.atlas - atlas-webapp - war - test - - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - test - - - - - - dist - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-hook - package - - copy - - - ${project.build.directory}/dependency/hook/storm/atlas-storm-plugin-impl - false - false - true - - - ${project.groupId} - ${project.artifactId} - ${project.version} - - - ${project.groupId} - atlas-client-common - ${project.version} - - - ${project.groupId} - atlas-client-v1 - ${project.version} - - - ${project.groupId} - atlas-client-v2 - ${project.version} - - - ${project.groupId} - hive-bridge - ${project.version} - - - ${project.groupId} - atlas-intg - ${project.version} - - - ${project.groupId} - atlas-notification - ${project.version} - - - ${project.groupId} - atlas-common - ${project.version} - - - org.apache.kafka - kafka_${kafka.scala.binary.version} - ${kafka.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - aopalliance - aopalliance - ${aopalliance.version} - - - commons-configuration - commons-configuration - ${commons-conf.version} - - - org.apache.commons - commons-configuration2 - ${commons-conf2.version} - - - commons-logging - commons-logging - ${commons-logging.version} - - - commons-collections - commons-collections - ${commons-collections.version} - - - javax.inject - javax.inject - ${javax-inject.version} - - - org.codehaus.jettison - jettison - ${jettison.version} - - - org.codehaus.jettison - jettison - ${jettison.version} - - - com.thoughtworks.paranamer - paranamer - ${paranamer.version} - - - org.apache.hive - hive-exec - ${hive.version} - - - org.apache.hbase - hbase-common - ${hbase.version} - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - org.apache.hadoop - hadoop-auth - ${hadoop.version} - - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - org.codehaus.woodstox - stax2-api - ${codehaus.woodstox.stax2-api.version} - - - org.apache.hadoop - hadoop-hdfs-client - ${hadoop.hdfs-client.version} - - - - ch.qos.reload4j - reload4j - ${reload4j.version} - - - com.sun.jersey - jersey-json - ${jersey.version} - - - javax.ws.rs - jsr311-api - ${jsr.version} - - - com.fasterxml.woodstox - woodstox-core - ${woodstox-core.version} - - - - - - copy-hook-shim - package - - copy - - - ${project.build.directory}/dependency/hook/storm - false - false - true - - - ${project.groupId} - storm-bridge-shim - ${project.version} - - - ${project.groupId} - atlas-plugin-classloader - ${project.version} - - - - - - - - - - - - - - - org.eclipse.jetty - jetty-maven-plugin - ${jetty.version} - - ${skipTests} - - - 31000 - 60000 - - ../../webapp/target/atlas-webapp-${project.version}.war - true - ../../webapp/src/main/webapp - - / - ${project.basedir}/../../webapp/src/main/webapp/WEB-INF/web.xml - - true - - true - - atlas.home - ${project.build.directory} - - - atlas.conf - ${project.build.directory}/test-classes - - - atlas.data - ${project.build.directory}/data - - - atlas.log.dir - ${project.build.directory}/logs - - - atlas.log.file - application.log - - - log4j.configuration - file:///${project.build.directory}/test-classes/atlas-log4j.xml - - - atlas.graphdb.backend - ${graphdb.backend.impl} - - - embedded.solr.directory - ${project.build.directory} - - - solr.log.dir - ${project.build.directory}/logs - - - org.eclipse.jetty.annotations.maxWait - 5000 - - - atlas-stop - 31001 - ${jetty-maven-plugin.stopWait} - jar - - - - org.apache.curator - curator-client - ${curator.version} - - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - start-jetty - pre-integration-test - - - stop - deploy-war - - - true - - - - stop-jetty - post-integration-test - - stop - - - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-twiki - ${doxia.version} - - - org.apache.maven.doxia - doxia-core - ${doxia.version} - - - - - - site - - prepare-package - - - - false - false - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - false - - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-resources - validate - - copy-resources - - - ${basedir}/target/models - - - ${basedir}/../models - - 0000-Area0/0010-base_model.json - 1000-Hadoop/** - - - - - - - copy-solr-resources - validate - - copy-resources - - - ${project.build.directory}/solr - - - ${basedir}/../../test-tools/src/main/resources/solr - - - - - - - - - - diff --git a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java deleted file mode 100644 index 69d58d5743..0000000000 --- a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java +++ /dev/null @@ -1,416 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.hook; - -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityExtInfo; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.model.notification.HookNotification.EntityCreateRequestV2; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.HdfsNameServiceResolver; -import org.apache.commons.collections.CollectionUtils; -import org.apache.storm.ISubmitterHook; -import org.apache.storm.generated.Bolt; -import org.apache.storm.generated.SpoutSpec; -import org.apache.storm.generated.StormTopology; -import org.apache.storm.generated.TopologyInfo; -import org.apache.storm.utils.Utils; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasConstants; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.hook.AtlasHook; -import org.apache.atlas.storm.model.StormDataTypes; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.slf4j.Logger; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Date; - -import static org.apache.atlas.repository.Constants.STORM_SOURCE; - -/** - * StormAtlasHook sends storm topology metadata information to Atlas - * via a Kafka Broker for durability. - *

- * This is based on the assumption that the same topology name is used - * for the various lifecycle stages. - */ -public class StormAtlasHook extends AtlasHook implements ISubmitterHook { - public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(StormAtlasHook.class); - - public static final String ANONYMOUS_OWNER = "anonymous"; // if Storm topology does not contain the owner instance; possible if Storm is running in unsecure mode. - public static final String HBASE_NAMESPACE_DEFAULT = "default"; - public static final String ATTRIBUTE_DB = "db"; - - public static final String RELATIONSHIP_STORM_TOPOLOGY_NODES = "storm_topology_nodes"; - public static final String RELATIONSHIP_DATASET_PROCESS_INPUTS = "dataset_process_inputs"; - public static final String RELATIONSHIP_PROCESS_DATASET_OUTPUTS = "process_dataset_outputs"; - public static final String HBASE_TABLE_QUALIFIED_NAME_FORMAT = "%s:%s@%s"; - - /** - * This is the client-side hook that storm fires when a topology is added. - * - * @param topologyInfo topology info - * @param stormConf configuration - * @param stormTopology a storm topology - */ - @Override - public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology) { - LOG.info("Collecting metadata for a new storm topology: {}", topologyInfo.get_name()); - - try { - String user = getUser(topologyInfo.get_owner(), null); - AtlasEntity topology = createTopologyInstance(topologyInfo, stormConf); - AtlasEntitiesWithExtInfo entity = new AtlasEntitiesWithExtInfo(topology); - - addTopologyDataSets(stormTopology, topologyInfo.get_owner(), stormConf, topology, entity); - - // create the graph for the topology - List graphNodes = createTopologyGraph(stormTopology, stormTopology.get_spouts(), stormTopology.get_bolts()); - - if (CollectionUtils.isNotEmpty(graphNodes)) { - // add the connection from topology to the graph - topology.setRelationshipAttribute("nodes", AtlasTypeUtil.getAtlasRelatedObjectIds(graphNodes, RELATIONSHIP_STORM_TOPOLOGY_NODES)); - - for (AtlasEntity graphNode : graphNodes) { - entity.addReferredEntity(graphNode); - } - } - - List hookNotifications = Collections.singletonList(new EntityCreateRequestV2(user, entity)); - - notifyEntities(hookNotifications, null); - } catch (Exception e) { - throw new RuntimeException("Atlas hook is unable to process the topology.", e); - } - } - - private AtlasEntity createTopologyInstance(TopologyInfo topologyInfo, Map stormConf) { - AtlasEntity topology = new AtlasEntity(StormDataTypes.STORM_TOPOLOGY.getName()); - String owner = topologyInfo.get_owner(); - - if (StringUtils.isEmpty(owner)) { - owner = ANONYMOUS_OWNER; - } - - topology.setAttribute("id", topologyInfo.get_id()); - topology.setAttribute(AtlasClient.NAME, topologyInfo.get_name()); - topology.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, topologyInfo.get_name()); - topology.setAttribute(AtlasClient.OWNER, owner); - topology.setAttribute("startTime", new Date(System.currentTimeMillis())); - topology.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getMetadataNamespace()); - - return topology; - } - - private void addTopologyDataSets(StormTopology stormTopology, String topologyOwner, Map stormConf, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) { - // add each spout as an input data set - addTopologyInputs(stormTopology.get_spouts(), stormConf, topologyOwner, topology, entityExtInfo); - - // add the appropriate bolts as output data sets - addTopologyOutputs(stormTopology, topologyOwner, stormConf, topology, entityExtInfo); - } - - private void addTopologyInputs(Map spouts, Map stormConf, String topologyOwner, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) { - List inputs = new ArrayList<>(); - - for (Map.Entry entry : spouts.entrySet()) { - Serializable instance = Utils.javaDeserialize(entry.getValue().get_spout_object().get_serialized_java(), Serializable.class); - String dsType = instance.getClass().getSimpleName(); - AtlasEntity dsEntity = addDataSet(dsType, topologyOwner, instance, stormConf, entityExtInfo); - - if (dsEntity != null) { - inputs.add(dsEntity); - } - } - - topology.setRelationshipAttribute("inputs", AtlasTypeUtil.getAtlasRelatedObjectIds(inputs, RELATIONSHIP_DATASET_PROCESS_INPUTS)); - } - - private void addTopologyOutputs(StormTopology stormTopology, String topologyOwner, Map stormConf, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) { - List outputs = new ArrayList<>(); - Map bolts = stormTopology.get_bolts(); - Set boltNames = StormTopologyUtil.getTerminalUserBoltNames(stormTopology); - - for (String boltName : boltNames) { - Serializable instance = Utils.javaDeserialize(bolts.get(boltName).get_bolt_object().get_serialized_java(), Serializable.class); - String dsType = instance.getClass().getSimpleName(); - AtlasEntity dsEntity = addDataSet(dsType, topologyOwner, instance, stormConf, entityExtInfo); - - if (dsEntity != null) { - outputs.add(dsEntity); - } - } - - topology.setRelationshipAttribute("outputs", AtlasTypeUtil.getAtlasRelatedObjectIds(outputs, RELATIONSHIP_PROCESS_DATASET_OUTPUTS)); - } - - private AtlasEntity addDataSet(String dataSetType, String topologyOwner, Serializable instance, Map stormConf, AtlasEntityExtInfo entityExtInfo) { - Map config = StormTopologyUtil.getFieldValues(instance, true, null); - AtlasEntity ret = null; - String metadataNamespace = getMetadataNamespace(); - - // todo: need to redo this with a config driven approach - switch (dataSetType) { - case "KafkaSpout": { - String topicName = config.get("KafkaSpout.kafkaSpoutConfig.translator.topic"); - String uri = config.get("KafkaSpout.kafkaSpoutConfig.kafkaProps.bootstrap.servers"); - - if (StringUtils.isEmpty(topicName)) { - topicName = config.get("KafkaSpout._spoutConfig.topic"); - } - - if (StringUtils.isEmpty(uri)) { - uri = config.get("KafkaSpout._spoutConfig.hosts.brokerZkStr"); - } - - if (StringUtils.isEmpty(topologyOwner)) { - topologyOwner = ANONYMOUS_OWNER; - } - - if (topicName == null) { - LOG.error("Kafka topic name not found"); - } else { - ret = new AtlasEntity(StormDataTypes.KAFKA_TOPIC.getName()); - - ret.setAttribute("topic", topicName); - ret.setAttribute("uri", uri); - ret.setAttribute(AtlasClient.OWNER, topologyOwner); - ret.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getKafkaTopicQualifiedName(metadataNamespace, topicName)); - ret.setAttribute(AtlasClient.NAME, topicName); - } - } - break; - - case "HBaseBolt": { - final String hbaseTableName = config.get("HBaseBolt.tableName"); - String uri = config.get("hbase.rootdir"); - - if (StringUtils.isEmpty(uri)) { - uri = hbaseTableName; - } - - metadataNamespace = extractComponentMetadataNamespace(HBaseConfiguration.create(), stormConf); - - if (hbaseTableName == null) { - LOG.error("HBase table name not found"); - } else { - ret = new AtlasEntity(StormDataTypes.HBASE_TABLE.getName()); - - ret.setAttribute("uri", hbaseTableName); - ret.setAttribute(AtlasClient.NAME, uri); - ret.setAttribute(AtlasClient.OWNER, stormConf.get("storm.kerberos.principal")); - //TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName - ret.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHbaseTableQualifiedName(metadataNamespace, HBASE_NAMESPACE_DEFAULT, hbaseTableName)); - } - } - break; - - case "HdfsBolt": { - final String hdfsUri = config.get("HdfsBolt.rotationActions") == null ? config.get("HdfsBolt.fileNameFormat.path") : config.get("HdfsBolt.rotationActions"); - final String hdfsPathStr = config.get("HdfsBolt.fsUrl") + hdfsUri; - final Path hdfsPath = new Path(hdfsPathStr); - final String nameServiceID = HdfsNameServiceResolver.getNameServiceIDForPath(hdfsPathStr); - - ret = new AtlasEntity(HiveMetaStoreBridge.HDFS_PATH); - - ret.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, metadataNamespace); - ret.setAttribute(AtlasClient.OWNER, stormConf.get("hdfs.kerberos.principal")); - ret.setAttribute(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(hdfsPath).toString().toLowerCase()); - - if (StringUtils.isNotEmpty(nameServiceID)) { - String updatedPath = HdfsNameServiceResolver.getPathWithNameServiceID(hdfsPathStr); - - ret.setAttribute("path", updatedPath); - ret.setAttribute("nameServiceId", nameServiceID); - ret.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHdfsPathQualifiedName(metadataNamespace, updatedPath)); - } else { - ret.setAttribute("path", hdfsPathStr); - ret.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHdfsPathQualifiedName(metadataNamespace, hdfsPathStr)); - } - } - break; - - case "HiveBolt": { - metadataNamespace = extractComponentMetadataNamespace(new HiveConf(), stormConf); - - final String dbName = config.get("HiveBolt.options.databaseName"); - final String tblName = config.get("HiveBolt.options.tableName"); - - if (dbName == null || tblName ==null) { - LOG.error("Hive database or table name not found"); - } else { - AtlasEntity dbEntity = new AtlasEntity("hive_db"); - - dbEntity.setAttribute(AtlasClient.NAME, dbName); - dbEntity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getDBQualifiedName(metadataNamespace, dbName)); - dbEntity.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, metadataNamespace); - - entityExtInfo.addReferredEntity(dbEntity); - - // todo: verify if hive table has everything needed to retrieve existing table - ret = new AtlasEntity("hive_table"); - - ret.setAttribute(AtlasClient.NAME, tblName); - ret.setRelationshipAttribute(ATTRIBUTE_DB, AtlasTypeUtil.getAtlasRelatedObjectId(dbEntity, "hive_table_db")); - ret.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getTableQualifiedName(metadataNamespace, dbName, tblName)); - } - } - break; - - default: - // custom node - create a base dataset class with name attribute - //TODO - What should we do for custom data sets. Not sure what name we can set here? - return null; - } - - if (ret != null) { - entityExtInfo.addReferredEntity(ret); - } - - return ret; - } - - private List createTopologyGraph(StormTopology stormTopology, Map spouts, Map bolts) { - // Add graph of nodes in the topology - Map nodeEntities = new HashMap<>(); - - addSpouts(spouts, nodeEntities); - addBolts(bolts, nodeEntities); - - addGraphConnections(stormTopology, nodeEntities); - - return new ArrayList<>(nodeEntities.values()); - } - - private void addSpouts(Map spouts, Map nodeEntities) { - for (Map.Entry entry : spouts.entrySet()) { - String spoutName = entry.getKey(); - AtlasEntity spout = createSpoutInstance(spoutName, entry.getValue()); - - nodeEntities.put(spoutName, spout); - } - } - - private void addBolts(Map bolts, Map nodeEntities) { - for (Map.Entry entry : bolts.entrySet()) { - String boltName = entry.getKey(); - AtlasEntity boltInstance = createBoltInstance(boltName, entry.getValue()); - - nodeEntities.put(boltName, boltInstance); - } - } - - private AtlasEntity createSpoutInstance(String spoutName, SpoutSpec stormSpout) { - AtlasEntity spout = new AtlasEntity(StormDataTypes.STORM_SPOUT.getName()); - Serializable instance = Utils.javaDeserialize(stormSpout.get_spout_object().get_serialized_java(), Serializable.class); - Map flatConfigMap = StormTopologyUtil.getFieldValues(instance, true, null); - - spout.setAttribute(AtlasClient.NAME, spoutName); - spout.setAttribute("driverClass", instance.getClass().getName()); - spout.setAttribute("conf", flatConfigMap); - - return spout; - } - - private AtlasEntity createBoltInstance(String boltName, Bolt stormBolt) { - AtlasEntity bolt = new AtlasEntity(StormDataTypes.STORM_BOLT.getName()); - Serializable instance = Utils.javaDeserialize(stormBolt.get_bolt_object().get_serialized_java(), Serializable.class); - Map flatConfigMap = StormTopologyUtil.getFieldValues(instance, true, null); - - bolt.setAttribute(AtlasClient.NAME, boltName); - bolt.setAttribute("driverClass", instance.getClass().getName()); - bolt.setAttribute("conf", flatConfigMap); - - return bolt; - } - - private void addGraphConnections(StormTopology stormTopology, Map nodeEntities) { - // adds connections between spouts and bolts - Map> adjacencyMap = StormTopologyUtil.getAdjacencyMap(stormTopology, true); - - for (Map.Entry> entry : adjacencyMap.entrySet()) { - String nodeName = entry.getKey(); - Set adjacencyList = adjacencyMap.get(nodeName); - - if (CollectionUtils.isEmpty(adjacencyList)) { - continue; - } - - // add outgoing links - AtlasEntity node = nodeEntities.get(nodeName); - List outputs = new ArrayList<>(adjacencyList.size()); - - outputs.addAll(adjacencyList); - node.setAttribute("outputs", outputs); - - // add incoming links - for (String adjacentNodeName : adjacencyList) { - AtlasEntity adjacentNode = nodeEntities.get(adjacentNodeName); - @SuppressWarnings("unchecked") - List inputs = (List) adjacentNode.getAttribute("inputs"); - - if (inputs == null) { - inputs = new ArrayList<>(); - } - - inputs.add(nodeName); - adjacentNode.setAttribute("inputs", inputs); - } - } - } - - public static String getKafkaTopicQualifiedName(String metadataNamespace, String topicName) { - return String.format("%s@%s", topicName.toLowerCase(), metadataNamespace); - } - - public static String getHbaseTableQualifiedName(String metadataNamespace, String nameSpace, String tableName) { - return String.format(HBASE_TABLE_QUALIFIED_NAME_FORMAT, nameSpace.toLowerCase(), tableName.toLowerCase(), metadataNamespace); - } - - public static String getHdfsPathQualifiedName(String metadataNamespace, String hdfsPath) { - return String.format("%s@%s", hdfsPath.toLowerCase(), metadataNamespace); - } - - private String extractComponentMetadataNamespace(Configuration configuration, Map stormConf) { - String clusterName = configuration.get(CLUSTER_NAME_KEY, null); - - if (clusterName == null) { - clusterName = getMetadataNamespace(); - } - - return clusterName; - } - - @Override - public String getMessageSource() { - return STORM_SOURCE; - } -} \ No newline at end of file diff --git a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java deleted file mode 100644 index b903dbc694..0000000000 --- a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java +++ /dev/null @@ -1,248 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.hook; - -import org.apache.commons.lang.StringUtils; -import org.apache.storm.generated.Bolt; -import org.apache.storm.generated.GlobalStreamId; -import org.apache.storm.generated.Grouping; -import org.apache.storm.generated.StormTopology; -import com.google.common.base.Joiner; -import org.slf4j.Logger; - -import java.lang.reflect.Field; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -/** - * A storm topology utility class. - */ -public final class StormTopologyUtil { - public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(StormTopologyUtil.class); - - private StormTopologyUtil() { - } - - public static Set getTerminalUserBoltNames(StormTopology topology) { - Set terminalBolts = new HashSet<>(); - Set inputs = new HashSet<>(); - for (Map.Entry entry : topology.get_bolts().entrySet()) { - String name = entry.getKey(); - Set inputsForBolt = entry.getValue().get_common().get_inputs().keySet(); - if (!isSystemComponent(name)) { - for (GlobalStreamId streamId : inputsForBolt) { - inputs.add(streamId.get_componentId()); - } - } - } - - for (String boltName : topology.get_bolts().keySet()) { - if (!isSystemComponent(boltName) && !inputs.contains(boltName)) { - terminalBolts.add(boltName); - } - } - - return terminalBolts; - } - - public static boolean isSystemComponent(String componentName) { - return componentName.startsWith("__"); - } - - public static Map> getAdjacencyMap(StormTopology topology, - boolean removeSystemComponent) { - Map> adjacencyMap = new HashMap<>(); - - for (Map.Entry entry : topology.get_bolts().entrySet()) { - String boltName = entry.getKey(); - Map inputs = entry.getValue().get_common().get_inputs(); - for (Map.Entry input : inputs.entrySet()) { - String inputComponentId = input.getKey().get_componentId(); - Set components = adjacencyMap.containsKey(inputComponentId) - ? adjacencyMap.get(inputComponentId) : new HashSet(); - components.add(boltName); - components = removeSystemComponent ? removeSystemComponents(components) - : components; - if (!removeSystemComponent || !isSystemComponent(inputComponentId)) { - adjacencyMap.put(inputComponentId, components); - } - } - } - - return adjacencyMap; - } - - public static Set removeSystemComponents(Set components) { - Set userComponents = new HashSet<>(); - for (String component : components) { - if (!isSystemComponent(component)) - userComponents.add(component); - } - - return userComponents; - } - - private static final Set WRAPPER_TYPES = new HashSet() {{ - add(Boolean.class); - add(Character.class); - add(Byte.class); - add(Short.class); - add(Integer.class); - add(Long.class); - add(Float.class); - add(Double.class); - add(Void.class); - add(String.class); - }}; - - public static boolean isWrapperType(Class clazz) { - return WRAPPER_TYPES.contains(clazz); - } - - public static boolean isCollectionType(Class clazz) { - return Collection.class.isAssignableFrom(clazz); - } - - public static boolean isMapType(Class clazz) { - return Map.class.isAssignableFrom(clazz); - } - - public static Map getFieldValues(Object instance, - boolean prependClassName, - Set objectsToSkip) { - if (objectsToSkip == null) { - objectsToSkip = new HashSet<>(); - } - - Map output = new HashMap<>(); - - try { - if (objectsToSkip.add(instance)) { - Class clazz = instance.getClass(); - for (Class c = clazz; c != null; c = c.getSuperclass()) { - Field[] fields = c.getDeclaredFields(); - for (Field field : fields) { - if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) { - continue; - } - - String key; - if (prependClassName) { - key = String.format("%s.%s", clazz.getSimpleName(), field.getName()); - } else { - key = field.getName(); - } - - boolean accessible = field.isAccessible(); - if (!accessible) { - field.setAccessible(true); - } - Object fieldVal = field.get(instance); - if (fieldVal == null) { - continue; - } else if (fieldVal.getClass().isPrimitive() || - isWrapperType(fieldVal.getClass())) { - if (toString(fieldVal, false).isEmpty()) continue; - output.put(key, toString(fieldVal, false)); - } else if (isMapType(fieldVal.getClass())) { - //TODO: check if it makes more sense to just stick to json - // like structure instead of a flatten output. - Map map = (Map) fieldVal; - for (Object entry : map.entrySet()) { - Object mapKey = ((Map.Entry) entry).getKey(); - Object mapVal = ((Map.Entry) entry).getValue(); - - String keyStr = getString(mapKey, false, objectsToSkip); - String valStr = getString(mapVal, false, objectsToSkip); - if (StringUtils.isNotEmpty(valStr)) { - output.put(String.format("%s.%s", key, keyStr), valStr); - } - } - } else if (isCollectionType(fieldVal.getClass())) { - //TODO check if it makes more sense to just stick to - // json like structure instead of a flatten output. - Collection collection = (Collection) fieldVal; - if (collection.size() == 0) continue; - String outStr = ""; - for (Object o : collection) { - outStr += getString(o, false, objectsToSkip) + ","; - } - if (outStr.length() > 0) { - outStr = outStr.substring(0, outStr.length() - 1); - } - output.put(key, String.format("%s", outStr)); - } else { - Map nestedFieldValues = getFieldValues(fieldVal, false, objectsToSkip); - for (Map.Entry entry : nestedFieldValues.entrySet()) { - output.put(String.format("%s.%s", key, entry.getKey()), entry.getValue()); - } - } - if (!accessible) { - field.setAccessible(false); - } - } - } - } - } - catch (Exception e){ - LOG.warn("Exception while constructing topology", e); - } - return output; - } - - private static String getString(Object instance, - boolean wrapWithQuote, - Set objectsToSkip) { - if (instance == null) { - return null; - } else if (instance.getClass().isPrimitive() || isWrapperType(instance.getClass())) { - return toString(instance, wrapWithQuote); - } else { - return getString(getFieldValues(instance, false, objectsToSkip), wrapWithQuote); - } - } - - private static String getString(Map flattenFields, boolean wrapWithQuote) { - String outStr = ""; - if (flattenFields != null && !flattenFields.isEmpty()) { - if (wrapWithQuote) { - outStr += "\"" + Joiner.on(",").join(flattenFields.entrySet()) + "\","; - } else { - outStr += Joiner.on(",").join(flattenFields.entrySet()) + ","; - } - } - if (outStr.length() > 0) { - outStr = outStr.substring(0, outStr.length() - 1); - } - return outStr; - } - - private static String toString(Object instance, boolean wrapWithQuote) { - if (instance instanceof String) - if (wrapWithQuote) - return "\"" + instance + "\""; - else - return instance.toString(); - else - return instance.toString(); - } -} diff --git a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/model/StormDataTypes.java b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/model/StormDataTypes.java deleted file mode 100644 index 7eb1e3cb87..0000000000 --- a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/model/StormDataTypes.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.model; - - -/** - * Storm Data Types for model and hook. - */ -public enum StormDataTypes { - - // Topology Classes - STORM_TOPOLOGY, // represents the topology containing the DAG - - STORM_NODE, // base abstraction for producer and processor - STORM_SPOUT, // data producer node having only outputs - STORM_BOLT, // data processing node having both inputs and outputs - - // Data Sets - KAFKA_TOPIC, // kafka data set - JMS_TOPIC, // jms data set - HBASE_TABLE, // hbase table data set - ; - - public String getName() { - return name().toLowerCase(); - } -} diff --git a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java b/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java deleted file mode 100644 index e11e1b8b0a..0000000000 --- a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.hook; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.storm.model.StormDataTypes; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.configuration.Configuration; -import org.apache.storm.ILocalCluster; -import org.apache.storm.generated.StormTopology; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -@Test -public class StormAtlasHookIT { - - public static final Logger LOG = LoggerFactory.getLogger(StormAtlasHookIT.class); - - private static final String ATLAS_URL = "http://localhost:21000/"; - private static final String TOPOLOGY_NAME = "word-count"; - - private ILocalCluster stormCluster; - private AtlasClient atlasClient; - - @BeforeClass - public void setUp() throws Exception { - // start a local storm cluster - stormCluster = StormTestUtil.createLocalStormCluster(); - LOG.info("Created a storm local cluster"); - - Configuration configuration = ApplicationProperties.get(); - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClient = new AtlasClient(configuration.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT), new String[]{"admin", "admin"}); - } else { - atlasClient = new AtlasClient(configuration.getStringArray(HiveMetaStoreBridge.ATLAS_ENDPOINT)); - } - } - - - @AfterClass - public void tearDown() throws Exception { - LOG.info("Shutting down storm local cluster"); - stormCluster.shutdown(); - - atlasClient = null; - } - - @Test - public void testAddEntities() throws Exception { - StormTopology stormTopology = StormTestUtil.createTestTopology(); - StormTestUtil.submitTopology(stormCluster, TOPOLOGY_NAME, stormTopology); - LOG.info("Submitted topology {}", TOPOLOGY_NAME); - - // todo: test if topology metadata is registered in atlas - String guid = getTopologyGUID(); - Assert.assertNotNull(guid); - LOG.info("GUID is {}", guid); - - Referenceable topologyReferenceable = atlasClient.getEntity(guid); - Assert.assertNotNull(topologyReferenceable); - } - - private String getTopologyGUID() throws Exception { - LOG.debug("Searching for topology {}", TOPOLOGY_NAME); - String query = String.format("from %s where name = \"%s\"", - StormDataTypes.STORM_TOPOLOGY.getName(), TOPOLOGY_NAME); - - JsonNode results = atlasClient.search(query, 10, 0); - JsonNode row = results.get(0); - - return row.has("$id$") ? row.get("$id$").get("id").asText() : null; - } -} diff --git a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormTestUtil.java b/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormTestUtil.java deleted file mode 100644 index d869f18cd7..0000000000 --- a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormTestUtil.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.storm.hook; - -import org.apache.storm.Config; -import org.apache.storm.ILocalCluster; -import org.apache.storm.Testing; -import org.apache.storm.generated.StormTopology; -import org.apache.storm.testing.TestGlobalCount; -import org.apache.storm.testing.TestWordCounter; -import org.apache.storm.testing.TestWordSpout; -import org.apache.storm.topology.TopologyBuilder; -import org.apache.storm.utils.Utils; - -import java.util.HashMap; - -/** - * An until to create a test topology. - */ -final class StormTestUtil { - - private StormTestUtil() { - } - - public static ILocalCluster createLocalStormCluster() { - // start a local storm cluster - HashMap localClusterConf = new HashMap<>(); - localClusterConf.put("nimbus-daemon", true); - return Testing.getLocalCluster(localClusterConf); - } - - public static StormTopology createTestTopology() { - TopologyBuilder builder = new TopologyBuilder(); - builder.setSpout("words", new TestWordSpout(), 10); - builder.setBolt("count", new TestWordCounter(), 3).shuffleGrouping("words"); - builder.setBolt("globalCount", new TestGlobalCount(), 2).shuffleGrouping("count"); - - return builder.createTopology(); - } - - public static Config submitTopology(ILocalCluster stormCluster, String topologyName, - StormTopology stormTopology) throws Exception { - Config stormConf = new Config(); - stormConf.putAll(Utils.readDefaultConfig()); - stormConf.put("storm.cluster.mode", "local"); - stormConf.setDebug(true); - stormConf.setMaxTaskParallelism(3); - stormConf.put(Config.STORM_TOPOLOGY_SUBMISSION_NOTIFIER_PLUGIN, - org.apache.atlas.storm.hook.StormAtlasHook.class.getName()); - - stormCluster.submitTopology(topologyName, stormConf, stormTopology); - - Thread.sleep(10000); - return stormConf; - } -} diff --git a/addons/storm-bridge/src/test/resources/atlas-application.properties b/addons/storm-bridge/src/test/resources/atlas-application.properties deleted file mode 100644 index b822578947..0000000000 --- a/addons/storm-bridge/src/test/resources/atlas-application.properties +++ /dev/null @@ -1,126 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Atlas Server Configs ######### -atlas.rest.address=http://localhost:31000 - - - -######### Graph Database Configs ######### - - -# Graph database implementation. Value inserted by maven. -atlas.graphdb.backend=org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase - -# Graph Storage -atlas.graph.storage.backend=berkeleyje - -# Entity repository implementation -atlas.EntityAuditRepository.impl=org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - -# Graph Search Index Backend -atlas.graph.index.search.backend=solr - -#Berkeley storage directory -atlas.graph.storage.directory=${sys:atlas.data}/berkley - -#hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here - -atlas.graph.storage.hostname=${graph.storage.hostname} -atlas.graph.storage.hbase.regions-per-server=1 -atlas.graph.storage.lock.wait-time=10000 - -#ElasticSearch -atlas.graph.index.search.directory=${sys:atlas.data}/es -atlas.graph.index.search.elasticsearch.client-only=false -atlas.graph.index.search.elasticsearch.local-mode=true -atlas.graph.index.search.elasticsearch.create.sleep=2000 - -# Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address} -atlas.graph.index.search.solr.embedded=true -atlas.graph.index.search.max-result-set-size=150 - - -######### Notification Configs ######### -atlas.notification.embedded=true - -atlas.kafka.zookeeper.connect=localhost:19026 -atlas.kafka.bootstrap.servers=localhost:19027 -atlas.kafka.data=${sys:atlas.data}/kafka -atlas.kafka.zookeeper.session.timeout.ms=4000 -atlas.kafka.zookeeper.sync.time.ms=20 -atlas.kafka.consumer.timeout.ms=4000 -atlas.kafka.auto.commit.interval.ms=100 -atlas.kafka.hook.group.id=atlas -atlas.kafka.entities.group.id=atlas_entities -#atlas.kafka.auto.commit.enable=false - -atlas.kafka.enable.auto.commit=false -atlas.kafka.auto.offset.reset=earliest -atlas.kafka.session.timeout.ms=30000 -atlas.kafka.offsets.topic.replication.factor=1 - - - -######### Entity Audit Configs ######### -atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS -atlas.audit.zookeeper.session.timeout.ms=1000 -atlas.audit.hbase.zookeeper.quorum=localhost -atlas.audit.hbase.zookeeper.property.clientPort=19026 - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false -atlas.server.https.port=31443 - -######### Security Properties ######### - -hbase.security.authentication=simple - -atlas.hook.falcon.synchronous=true - -######### JAAS Configuration ######## - -atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule -atlas.jaas.KafkaClient.loginModuleControlFlag = required -atlas.jaas.KafkaClient.option.useKeyTab = true -atlas.jaas.KafkaClient.option.storeKey = true -atlas.jaas.KafkaClient.option.serviceName = kafka -atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab -atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM - -######### High Availability Configuration ######## -atlas.server.ha.enabled=false -#atlas.server.ids=id1 -#atlas.server.address.id1=localhost:21000 - -######### Atlas Authorization ######### -atlas.authorizer.impl=none -# atlas.authorizer.impl=simple -# atlas.authorizer.simple.authz.policy.file=atlas-simple-authz-policy.json - -######### Atlas Authentication ######### -atlas.authentication.method.file=true -atlas.authentication.method.ldap.type=none -atlas.authentication.method.kerberos=false -# atlas.authentication.method.file.filename=users-credentials.properties diff --git a/addons/storm-bridge/src/test/resources/atlas-log4j.xml b/addons/storm-bridge/src/test/resources/atlas-log4j.xml deleted file mode 100755 index 262a710f7a..0000000000 --- a/addons/storm-bridge/src/test/resources/atlas-log4j.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/addons/storm-bridge/src/test/resources/users-credentials.properties b/addons/storm-bridge/src/test/resources/users-credentials.properties deleted file mode 100644 index 5046dbaf64..0000000000 --- a/addons/storm-bridge/src/test/resources/users-credentials.properties +++ /dev/null @@ -1,3 +0,0 @@ -#username=group::sha256-password -admin=ADMIN::a4a88c0872bf652bb9ed803ece5fd6e82354838a9bf59ab4babb1dab322154e1 -rangertagsync=RANGER_TAG_SYNC::0afe7a1968b07d4c3ff4ed8c2d809a32ffea706c66cd795ead9048e81cfaf034 diff --git a/atlas-examples/pom.xml b/atlas-examples/pom.xml deleted file mode 100644 index 1ff213cb38..0000000000 --- a/atlas-examples/pom.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - - 4.0.0 - - atlas-examples - pom - - sample-app - - - - \ No newline at end of file diff --git a/atlas-examples/sample-app/README.md b/atlas-examples/sample-app/README.md deleted file mode 100644 index 65ac54c3dc..0000000000 --- a/atlas-examples/sample-app/README.md +++ /dev/null @@ -1,13 +0,0 @@ -## Introduction -This is a simple application to demonstrate the use of AtlasClient. SampleApp provides examples to use following Client APIs. -1. TypeDef -2. Entity -3. Lineage -4. Search -5. Glossary - -## Setting up -1. cd ~/Desktop/atlas/atlas-examples/sample-app (the location where you have downloaded sample-app) -2. mvn clean install -3. mvn exec:java -4. Then it will ask you to enter AtlasServer URL, username and password. \ No newline at end of file diff --git a/atlas-examples/sample-app/pom.xml b/atlas-examples/sample-app/pom.xml deleted file mode 100644 index aa3c6374fe..0000000000 --- a/atlas-examples/sample-app/pom.xml +++ /dev/null @@ -1,104 +0,0 @@ - - - - - atlas-examples - org.apache.atlas - 3.0.0-SNAPSHOT - - 4.0.0 - - sample-app - - - - org.apache.atlas - atlas-client-common - ${project.version} - - - org.apache.atlas - atlas-common - ${project.version} - - - - org.apache.atlas - atlas-client-v2 - ${project.version} - - - - org.apache.atlas - atlas-intg - ${project.version} - - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.version} - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-annotations - - - - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - - - - - - org.codehaus.mojo - exec-maven-plugin - - - - java - - - - - org.apache.atlas.examples.sampleapp.SampleApp - - - - - - \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/DiscoveryExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/DiscoveryExample.java deleted file mode 100644 index d309c9ed44..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/DiscoveryExample.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.discovery.AtlasQuickSearchResult; -import org.apache.atlas.model.discovery.AtlasSearchResult; -import org.apache.atlas.model.instance.AtlasEntityHeader; - -import java.util.List; - -public class DiscoveryExample { - private static final String[] DSL_QUERIES = new String[] { "from DataSet", "from Process" }; - - private final AtlasClientV2 client; - - DiscoveryExample(AtlasClientV2 client) { - this.client = client; - } - - public void testSearch() { - for (String dslQuery : DSL_QUERIES) { - try { - AtlasSearchResult result = client.dslSearchWithParams(dslQuery, 10, 0); - List entities = result != null ? result.getEntities() : null; - int resultCount = entities == null ? 0 : entities.size(); - - SampleApp.log("DSL Query: " + dslQuery); - SampleApp.log(" result count: " + resultCount); - - for (int i = 0; i < resultCount; i++) { - SampleApp.log(" result # " + (i + 1) + ": " + entities.get(i)); - } - } catch (Exception e) { - SampleApp.log("query -: " + dslQuery + " failed"); - } - } - } - - public void quickSearch(String searchString) { - try { - AtlasQuickSearchResult result = client.quickSearch(searchString, SampleAppConstants.TABLE_TYPE, false, 2, 0); - List entities = result != null && result.getSearchResults() != null ? result.getSearchResults().getEntities() : null; - int resultCount = entities == null ? 0 : entities.size(); - - SampleApp.log("Quick search: query-string=" + searchString); - SampleApp.log(" result count: " + resultCount); - - for (int i = 0; i < resultCount; i++) { - SampleApp.log(" result # " + (i + 1) + ": " + entities.get(i)); - } - } catch (AtlasServiceException e) { - e.printStackTrace(); - } - } - - public void basicSearch(String typeName, String classification, String query) { - try { - AtlasSearchResult result = client.basicSearch(typeName, classification, query, false, 2, 0); - List entities = result != null ? result.getEntities() : null; - int resultCount = entities == null ? 0 : entities.size(); - - SampleApp.log("Basic search: typeName=" + typeName + ", classification=" + classification + ", query=" + query); - SampleApp.log(" result count: " + resultCount); - - for (int i = 0; i < resultCount; i++) { - SampleApp.log(" result # " + (i + 1) + ": " + entities.get(i)); - } - } catch (AtlasServiceException e) { - e.printStackTrace(); - } - } -} diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/EntityExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/EntityExample.java deleted file mode 100644 index 3d9d4845b0..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/EntityExample.java +++ /dev/null @@ -1,277 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.instance.EntityMutations; -import org.apache.atlas.type.AtlasTypeUtil; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import static java.util.Arrays.asList; -import static org.apache.atlas.examples.sampleapp.SampleAppConstants.*; -import static org.apache.atlas.type.AtlasTypeUtil.toAtlasRelatedObjectId; -import static org.apache.atlas.type.AtlasTypeUtil.toAtlasRelatedObjectIds; - -public class EntityExample { - private static final String DATABASE_NAME = "employee_db_entity"; - private static final String TABLE_NAME = "employee_table_entity"; - private static final String PROCESS_NAME = "employee_process_entity"; - private static final String METADATA_NAMESPACE_SUFFIX = "@cl1"; - private static final String MANAGED_TABLE = "Managed"; - private static final String ATTR_NAME = "name"; - private static final String ATTR_DESCRIPTION = "description"; - private static final String ATTR_QUALIFIED_NAME = "qualifiedName"; - private static final String REFERENCEABLE_ATTRIBUTE_NAME = ATTR_QUALIFIED_NAME; - private static final String COLUMN_TIME_ID = "time_id"; - private static final String COLUMN_CUSTOMER_ID = "customer_id"; - private static final String COLUMN_COMPANY_ID = "company_id"; - - private final AtlasClientV2 client; - private AtlasEntity dbEntity; - private AtlasEntity tableEntityUS; - private AtlasEntity tableEntityCanada; - private AtlasEntityHeader loadProcess; - - EntityExample(AtlasClientV2 client) { - this.client = client; - } - - public void createEntities() throws Exception { - if (dbEntity == null) { - dbEntity = createDatabaseEntity(DATABASE_NAME); - - SampleApp.log("Created entity: typeName=" + dbEntity.getTypeName() + ", qualifiedName=" + dbEntity.getAttribute(ATTR_QUALIFIED_NAME) + ", guid=" + dbEntity.getGuid()); - } - - if (tableEntityCanada == null) { - tableEntityCanada = createTableEntity(TABLE_NAME + "_CANADA"); - - SampleApp.log("Created entity: typeName=" + tableEntityCanada.getTypeName() + ", qualifiedName=" + tableEntityCanada.getAttribute(ATTR_QUALIFIED_NAME) + ", guid=" + tableEntityCanada.getGuid()); - } - - if (tableEntityUS == null) { - tableEntityUS = createTableEntity(TABLE_NAME + "_US"); - - SampleApp.log("Created entity: typeName=" + tableEntityUS.getTypeName() + ", qualifiedName=" + tableEntityUS.getAttribute(ATTR_QUALIFIED_NAME) + ", guid=" + tableEntityUS.getGuid()); - } - - if (loadProcess == null) { - loadProcess = createProcessEntity(PROCESS_NAME); - - SampleApp.log("Created entity: typeName=" + loadProcess.getTypeName() + ", qualifiedName=" + loadProcess.getAttribute(ATTR_QUALIFIED_NAME) + ", guid=" + loadProcess.getGuid()); - } - } - - public AtlasEntity getTableEntity() { - return tableEntityUS; - } - - public void getEntityByGuid(String entityGuid) throws Exception { - AtlasEntityWithExtInfo entity = client.getEntityByGuid(entityGuid); - - if (entity != null) { - SampleApp.log("Retrieved entity with guid=" + entityGuid); - SampleApp.log(" " + entity); - } - } - - public void deleteEntities() throws Exception { - client.deleteEntityByGuid(loadProcess.getGuid()); - - SampleApp.log("Deleted entity: guid=" + loadProcess.getGuid()); - - List entityGuids = Arrays.asList(tableEntityUS.getGuid(), tableEntityCanada.getGuid(), dbEntity.getGuid()); - - client.deleteEntitiesByGuids(entityGuids); - - SampleApp.log("Deleted entities:"); - for (String entityGuid : entityGuids) { - SampleApp.log(" guid=" + entityGuid); - } - } - - private AtlasEntity createTableEntity(String tableName) throws Exception { - return createHiveTable(dbEntity, tableName, MANAGED_TABLE, - Arrays.asList(createColumn(COLUMN_TIME_ID, "int", "time id"), - createColumn(COLUMN_CUSTOMER_ID, "int", "customer id", SampleAppConstants.PII_TAG), - createColumn(COLUMN_COMPANY_ID, "double", "company id", SampleAppConstants.FINANCE_TAG)), - SampleAppConstants.METRIC_TAG); - } - - private AtlasEntityHeader createProcessEntity(String processName) throws Exception { - return createProcess(processName, "hive query for monthly avg salary", "user ETL", - asList(tableEntityUS), - asList(tableEntityCanada), - "create table as select ", "plan", "id", "graph", SampleAppConstants.CLASSIFIED_TAG); - } - - private AtlasEntityHeader createProcess(String name, String description, String user, List inputs, List outputs, - String queryText, String queryPlan, String queryId, String queryGraph, String... classificationNames) throws Exception { - - AtlasEntity entity = new AtlasEntity(SampleAppConstants.PROCESS_TYPE); - - entity.setAttribute(ATTR_NAME, name); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + METADATA_NAMESPACE_SUFFIX); - entity.setAttribute(ATTR_DESCRIPTION, description); - entity.setAttribute(ATTR_USERNAME, user); - entity.setAttribute(ATTR_START_TIME, System.currentTimeMillis()); - entity.setAttribute(ATTR_END_TIME, System.currentTimeMillis() + 10000); - entity.setAttribute(ATTR_QUERY_TEXT, queryText); - entity.setAttribute(ATTR_QUERY_PLAN, queryPlan); - entity.setAttribute(ATTR_QUERY_ID, queryId); - entity.setAttribute(ATTR_QUERY_GRAPH, queryGraph); - entity.setAttribute(ATTR_OPERATION_TYPE, "testOperation"); - - entity.setRelationshipAttribute(ATTR_INPUTS, toAtlasRelatedObjectIds(inputs)); - entity.setRelationshipAttribute(ATTR_OUTPUTS, toAtlasRelatedObjectIds(outputs)); - - entity.setClassifications(toAtlasClassifications(classificationNames)); - - return createEntity(new AtlasEntityWithExtInfo(entity)); - } - - private AtlasEntity createColumn(String name, String dataType, String comment, String... classificationNames) { - AtlasEntity ret = new AtlasEntity(SampleAppConstants.COLUMN_TYPE); - - ret.setAttribute(ATTR_NAME, name); - ret.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + METADATA_NAMESPACE_SUFFIX); - ret.setAttribute(ATTR_DATA_TYPE, dataType); - ret.setAttribute(ATTR_COMMENT, comment); - - ret.setClassifications(toAtlasClassifications(classificationNames)); - - return ret; - } - - private List toAtlasClassifications(String[] classificationNames) { - List ret = new ArrayList<>(); - - if (classificationNames != null) { - for (String classificationName : classificationNames) { - ret.add(new AtlasClassification(classificationName)); - } - } - - return ret; - } - - private AtlasEntityHeader createEntity(AtlasEntityWithExtInfo atlasEntityWithExtInfo) { - EntityMutationResponse entity; - - try { - entity = client.createEntity(atlasEntityWithExtInfo); - - if (entity != null && entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE) != null) { - List list = entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE); - - if (list.size() > 0) { - return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0); - } - } - } catch (AtlasServiceException e) { - SampleApp.log("failed in create entity"); - e.printStackTrace(); - } - - return null; - } - - private AtlasEntity createDatabaseEntity(String dbName) { - AtlasEntity hiveDBInstance = createHiveDBInstance(dbName); - AtlasEntityHeader entityHeader = createEntity(new AtlasEntityWithExtInfo(hiveDBInstance)); - - if (entityHeader != null && entityHeader.getGuid() != null) { - hiveDBInstance.setGuid(entityHeader.getGuid()); - } - - return hiveDBInstance; - } - - protected AtlasEntity createHiveDBInstance(String dbName) { - AtlasEntity entity = new AtlasEntity(SampleAppConstants.DATABASE_TYPE); - - entity.setAttribute(ATTR_NAME, dbName); - entity.setAttribute(ATTR_DESCRIPTION, "employee database"); - entity.setAttribute(METADATA_NAMESPACE_SUFFIX, "employeeCluster"); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, dbName + METADATA_NAMESPACE_SUFFIX); - entity.setAttribute(ATTR_OWNER, "user"); - entity.setAttribute(ATTR_LOCATION_URI, "/tmp"); - entity.setAttribute(ATTR_CREATE_TIME, 1000); - - return entity; - } - - private AtlasEntity createHiveTable(AtlasEntity database, String tableName, String tableType, List columns, String... classificationNames) throws Exception { - AtlasEntityWithExtInfo entityWithExtInfo = new AtlasEntityWithExtInfo(); - - AtlasEntity hiveTableInstance = createHiveTable(database, tableName, tableType, classificationNames); - entityWithExtInfo.setEntity(hiveTableInstance); - hiveTableInstance.setRelationshipAttribute(ATTR_COLUMNS, toAtlasRelatedObjectIds(columns)); - - for (AtlasEntity column : columns) { - column.setRelationshipAttribute(ATTR_TABLE, toAtlasRelatedObjectId(hiveTableInstance)); - entityWithExtInfo.addReferredEntity(column); - } - - AtlasEntityHeader createdHeader = createEntity(entityWithExtInfo); - - if (createdHeader != null && createdHeader.getGuid() != null) { - hiveTableInstance.setGuid(createdHeader.getGuid()); - } - - return hiveTableInstance; - } - - private AtlasEntity createHiveTable(AtlasEntity database, String tableName, String tableType, String... classificationNames) throws Exception { - AtlasEntity table = new AtlasEntity(SampleAppConstants.TABLE_TYPE); - - table.setAttribute(ATTR_NAME, tableName); - table.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, database.getAttribute(ATTR_NAME) + "." + tableName + METADATA_NAMESPACE_SUFFIX); - table.setAttribute(ATTR_TABLE_TYPE, tableType); - table.setRelationshipAttribute(ATTR_DB, AtlasTypeUtil.getAtlasRelatedObjectId(database, TABLE_DATABASE_TYPE)); - - table.setAttribute(ATTR_DESCRIPTION, "emp table"); - table.setAttribute(ATTR_LAST_ACCESS_TIME, "2014-07-11T08:00:00.000Z"); - table.setAttribute(ATTR_LEVEL, 2); - table.setAttribute(ATTR_COMPRESSED, false); - table.setClassifications(toAtlasClassifications(classificationNames)); - - AtlasStruct serde1 = new AtlasStruct(STRUCT_TYPE_SERDE); - - serde1.setAttribute(ATTR_NAME, "serde1"); - serde1.setAttribute(ATTR_SERDE, "serde1"); - table.setAttribute(ATTR_SERDE1, serde1); - - AtlasStruct serde2 = new AtlasStruct(STRUCT_TYPE_SERDE); - serde2.setAttribute(ATTR_NAME, "serde2"); - serde2.setAttribute(ATTR_SERDE, "serde2"); - table.setAttribute(ATTR_SERDE2, serde2); - - return table; - } -} \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/GlossaryExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/GlossaryExample.java deleted file mode 100644 index 7d5690ce8c..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/GlossaryExample.java +++ /dev/null @@ -1,112 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.model.glossary.AtlasGlossary; -import org.apache.atlas.model.glossary.AtlasGlossary.AtlasGlossaryExtInfo; -import org.apache.atlas.model.glossary.AtlasGlossaryCategory; -import org.apache.atlas.model.glossary.AtlasGlossaryTerm; -import org.apache.atlas.model.glossary.relations.AtlasGlossaryHeader; - -public class GlossaryExample { - private static final String GLOSSARY_NAME = "EmployeeCountry"; - - private final AtlasClientV2 client; - private AtlasGlossary empGlossary; - private AtlasGlossaryTerm empSalaryTerm; - private AtlasGlossaryCategory empCompanyCategory; - - GlossaryExample(AtlasClientV2 client) { - this.client = client; - } - - public void createGlossary() throws Exception { - AtlasGlossary glossary = new AtlasGlossary(); - - glossary.setName(GLOSSARY_NAME); - glossary.setLanguage("English"); - glossary.setShortDescription("This is a test Glossary"); - - empGlossary = client.createGlossary(glossary); - } - - public void getGlossaryDetail() throws Exception { - AtlasGlossaryExtInfo extInfo = client.getGlossaryExtInfo(empGlossary.getGuid()); - - assert (extInfo != null); - - SampleApp.log("Glossary extended info: " + extInfo.getGuid() + "; name: " + extInfo.getName() + "; language: " + extInfo.getLanguage()); - } - - public void createGlossaryTerm() throws Exception { - if (empSalaryTerm != null) { - SampleApp.log("EmpSalaryTerm: term already exists"); - return; - } - - AtlasGlossaryHeader glossary = new AtlasGlossaryHeader(); - AtlasGlossaryTerm term = new AtlasGlossaryTerm(); - - glossary.setGlossaryGuid(empGlossary.getGuid()); - glossary.setDisplayText(empGlossary.getName()); - - term.setAnchor(glossary); - term.setName("EmpSalaryTerm"); - - empSalaryTerm = client.createGlossaryTerm(term); - - if (empSalaryTerm != null) { - SampleApp.log("Created term for Employee Salary: " + empSalaryTerm); - } - } - - public void createGlossaryCategory() throws Exception { - if (empCompanyCategory != null) { - SampleApp.log("EmpSalaryCategory: category already exists"); - return; - } - - AtlasGlossaryHeader glossary = new AtlasGlossaryHeader(); - AtlasGlossaryCategory category = new AtlasGlossaryCategory(); - - glossary.setGlossaryGuid(empGlossary.getGuid()); - glossary.setDisplayText(empGlossary.getName()); - - category.setAnchor(glossary); - category.setName("EmpSalaryCategory"); - - empCompanyCategory = client.createGlossaryCategory(category); - - if (empCompanyCategory != null) { - SampleApp.log("Created Category for Employee Category :- " + empCompanyCategory); - } - } - - public void deleteGlossary() throws Exception { - if (empGlossary != null) { - client.deleteGlossaryByGuid(empGlossary.getGuid()); - - SampleApp.log("empGlossary is not present. Skipping the delete operation."); - } - - empGlossary = null; - empSalaryTerm = null; - empCompanyCategory = null; - } -} diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/LineageExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/LineageExample.java deleted file mode 100644 index aba3b812d6..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/LineageExample.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.lineage.AtlasLineageInfo; - -import java.util.Map; -import java.util.Set; - -public class LineageExample { - private AtlasClientV2 atlasClient; - - LineageExample(AtlasClientV2 atlasClient) { - this.atlasClient = atlasClient; - } - - public void lineage(String guid) throws AtlasServiceException { - AtlasLineageInfo lineageInfo = atlasClient.getLineageInfo(guid, AtlasLineageInfo.LineageDirection.BOTH, 0); - Set relations = lineageInfo.getRelations(); - Map guidEntityMap = lineageInfo.getGuidEntityMap(); - - for (AtlasLineageInfo.LineageRelation relation : relations) { - AtlasEntityHeader fromEntity = guidEntityMap.get(relation.getFromEntityId()); - AtlasEntityHeader toEntity = guidEntityMap.get(relation.getToEntityId()); - - SampleApp.log(fromEntity.getDisplayText() + "(" + fromEntity.getTypeName() + ") -> " + - toEntity.getDisplayText() + "(" + toEntity.getTypeName() + ")"); - } - } -} \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java deleted file mode 100644 index f663de16be..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasException; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.utils.AuthenticationUtil; - -import java.util.Date; - -public class SampleApp { - private AtlasClientV2 client; - - SampleApp(String[] atlasServerUrls, String[] basicAuthUsernamePassword) { - client = new AtlasClientV2(atlasServerUrls, basicAuthUsernamePassword); - } - - SampleApp(String[] atlasServerUrls) throws AtlasException { - client = new AtlasClientV2(atlasServerUrls); - } - - public static void main(String[] args) throws Exception { - SampleApp sampleApp = null; - - try { - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - String[] atlasServerUrls = getServerUrl(); - String[] basicAuthUsernamePassword = getUserInput(); - - sampleApp = new SampleApp(atlasServerUrls, basicAuthUsernamePassword); - } else { - String[] atlasServerUrls = getServerUrl(); - - sampleApp = new SampleApp(atlasServerUrls); - } - - // TypeDef Examples - TypeDefExample typeDefExample = new TypeDefExample(sampleApp.getClient()); - - typeDefExample.createTypeDefinitions(); - typeDefExample.printTypeDefinitions(); - - // Entity Examples - EntityExample entityExample = new EntityExample(sampleApp.getClient()); - - entityExample.createEntities(); - - AtlasEntity createdEntity = entityExample.getTableEntity(); - - entityExample.getEntityByGuid(createdEntity.getGuid()); - - // Lineage Examples - sampleApp.lineageExample(createdEntity.getGuid()); - - // Discovery/Search Examples - sampleApp.discoveryExample(createdEntity); - - // Glossary Examples - sampleApp.glossaryExample(); - - entityExample.deleteEntities(); - } finally { - if (sampleApp != null && sampleApp.getClient() != null) { - sampleApp.getClient().close(); - } - } - } - - public static void log(String message) { - System.out.println("[" + new Date() + "] " + message); - } - - public AtlasClientV2 getClient() { - return client; - } - - private void lineageExample(String entityGuid) throws Exception { - LineageExample lineageExample = new LineageExample(client); - - lineageExample.lineage(entityGuid); - } - - private void discoveryExample(AtlasEntity entity) { - DiscoveryExample discoveryExample = new DiscoveryExample(client); - - discoveryExample.testSearch(); - discoveryExample.quickSearch(entity.getTypeName()); - discoveryExample.basicSearch(entity.getTypeName(), SampleAppConstants.METRIC_TAG, (String)entity.getAttribute(SampleAppConstants.ATTR_NAME)); - } - - private void glossaryExample() throws Exception { - GlossaryExample glossaryExample = new GlossaryExample(client); - - glossaryExample.createGlossary(); - glossaryExample.createGlossaryTerm(); - glossaryExample.getGlossaryDetail(); - glossaryExample.createGlossaryCategory(); - glossaryExample.deleteGlossary(); - } - - private static String[] getUserInput() { - try { - String username = System.console().readLine("Enter username: "); - char[] pwChar = System.console().readPassword("Enter password: "); - String password = (pwChar != null) ? new String(pwChar) : ""; - - return new String[] { username, password }; - } catch (Exception e) { - System.out.print("Error while reading user input"); - System.exit(1); - } - - return null; // will not reach here - } - - private static String[] getServerUrl() { - try { - String atlasServerUrl = System.console().readLine("Enter Atlas server URL: "); - - return new String[] { atlasServerUrl }; - } catch (Exception e) { - System.out.print("Error while reading user input"); - System.exit(1); - } - - return null; // will not reach here - } -} \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleAppConstants.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleAppConstants.java deleted file mode 100644 index 4b7b44dc95..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleAppConstants.java +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -public final class SampleAppConstants { - public static final String ATTR_NAME = "name"; - public static final String ATTR_DESCRIPTION = "description"; - public static final String ATTR_CREATE_TIME = "createTime"; - public static final String ATTR_OWNER = "owner"; - public static final String ATTR_TABLE_TYPE = "tableType"; - public static final String ATTR_LAST_ACCESS_TIME = "lastAccessTime"; - public static final String ATTR_RANDOM_TABLE = "randomTable"; - public static final String ATTR_TEMPORARY = "temporary"; - public static final String ATTR_DATA_TYPE = "dataType"; - public static final String ATTR_COMMENT = "comment"; - public static final String ATTR_LOCATION_URI = "locationUri"; - public static final String ATTR_USERNAME = "userName"; - public static final String ATTR_START_TIME = "startTime"; - public static final String ATTR_END_TIME = "endTime"; - public static final String ATTR_QUERY_TEXT = "queryText"; - public static final String ATTR_QUERY_PLAN = "queryPlan"; - public static final String ATTR_QUERY_ID = "queryId"; - public static final String ATTR_QUERY_GRAPH = "queryGraph"; - public static final String ATTR_OPERATION_TYPE = "operationType"; - public static final String ATTR_LEVEL = "level"; - public static final String ATTR_COMPRESSED = "compressed"; - public static final String ATTR_SERDE = "serde"; - public static final String ATTR_SERDE1 = "serde1"; - public static final String ATTR_SERDE2 = "serde2"; - public static final String ATTR_ATTR1 = "attr1"; - public static final String ATTR_ATTR2 = "attr2"; - public static final String ATTR_ATTR8 = "attr8"; - public static final String ATTR_ATTR11 = "attr11"; - public static final String ATTR_ATTR18 = "attr88"; - public static final String ATTR_INPUTS = "inputs"; - public static final String ATTR_OUTPUTS = "outputs"; - public static final String ATTR_DB = "db"; - public static final String ATTR_TABLE = "table"; - public static final String ATTR_COLUMNS = "columns"; - - public static final String ENTITY_TYPE_DATASET = "DataSet"; - public static final String ENTITY_TYPE_PROCESS = "Process"; - - public static final String PII_TAG = "SAMPLE_PII"; - public static final String FINANCE_TAG = "SAMPLE_FINANCE"; - public static final String CLASSIFIED_TAG = "SAMPLE_CLASSIFIED"; - public static final String METRIC_TAG = "SAMPLE_METRIC"; - - public static final String DATABASE_TYPE = "sample_db_type"; - public static final String PROCESS_TYPE = "sample_process_type"; - public static final String TABLE_TYPE = "sample_table_type"; - public static final String COLUMN_TYPE = "sample_column_type"; - - public static final String TABLE_DATABASE_TYPE = "sample_db_tables"; - public static final String TABLE_COLUMNS_TYPE = "sample_table_columns"; - public static final String ENUM_TABLE_TYPE = "sample_tableType"; - public static final String BUSINESS_METADATA_TYPE = "sample_bmWithAllTypes"; - public static final String BUSINESS_METADATA_TYPE_MV = "sample_bmWithAllTypesMV"; - public static final String STRUCT_TYPE_SERDE = "sample_serdeType"; -} diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/TypeDefExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/TypeDefExample.java deleted file mode 100644 index be89bd1b60..0000000000 --- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/TypeDefExample.java +++ /dev/null @@ -1,274 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.examples.sampleapp; - -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.SearchFilter; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.model.typedef.AtlasBusinessMetadataDef; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasEnumDef; -import org.apache.atlas.model.typedef.AtlasRelationshipDef; -import org.apache.atlas.model.typedef.AtlasStructDef; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasTypeUtil; - -import javax.ws.rs.core.MultivaluedMap; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.examples.sampleapp.SampleAppConstants.*; -import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.*; -import static org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory.AGGREGATION; -import static org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory.COMPOSITION; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality.SET; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE; -import static org.apache.atlas.type.AtlasTypeUtil.createBusinessMetadataDef; -import static org.apache.atlas.type.AtlasTypeUtil.createOptionalAttrDef; -import static org.apache.atlas.type.AtlasTypeUtil.createRelationshipEndDef; -import static org.apache.atlas.type.AtlasTypeUtil.createRelationshipTypeDef; -import static org.apache.atlas.type.AtlasTypeUtil.createTraitTypeDef; - -public class TypeDefExample { - private static final String[] SAMPLE_APP_TYPES = { - SampleAppConstants.DATABASE_TYPE, - SampleAppConstants.TABLE_TYPE, - SampleAppConstants.COLUMN_TYPE, - SampleAppConstants.PROCESS_TYPE, - SampleAppConstants.PII_TAG, - SampleAppConstants.CLASSIFIED_TAG, - SampleAppConstants.FINANCE_TAG, - SampleAppConstants.METRIC_TAG - }; - - private final AtlasClientV2 client; - private AtlasTypesDef typesDef; - - TypeDefExample(AtlasClientV2 client) { - this.client = client; - } - - public void createTypeDefinitions() throws Exception { - AtlasEntityDef databaseDef = createDatabaseDef(); - AtlasEntityDef tableDef = createTableDef(); - AtlasEntityDef columnDef = createColumnDef(); - AtlasEntityDef processDef = createProcessDef(); - AtlasStructDef serDeDef = createSerDeDef(); - AtlasEnumDef tableTypeDef = createTableTypeEnumDef(); - - List classificationDefs = createClassificationDefs(); - List businessMetadataDef = createBusinessMetadataDefs(); - List relationshipDefs = createAtlasRelationshipDef(); - - AtlasTypesDef typesDef = new AtlasTypesDef(Collections.singletonList(tableTypeDef), - Collections.singletonList(serDeDef), - classificationDefs, - Arrays.asList(databaseDef, tableDef, columnDef, processDef), - relationshipDefs, - businessMetadataDef); - - this.typesDef = batchCreateTypes(typesDef); - } - - public void printTypeDefinitions() throws AtlasServiceException { - for (String typeName : SAMPLE_APP_TYPES) { - MultivaluedMap searchParams = new MultivaluedMapImpl(); - - searchParams.add(SearchFilter.PARAM_NAME, typeName); - - SearchFilter searchFilter = new SearchFilter(searchParams); - - AtlasTypesDef typesDef = client.getAllTypeDefs(searchFilter); - - assert (!typesDef.isEmpty()); - - SampleApp.log("Created type: " + typeName); - } - } - - public void removeTypeDefinitions() throws AtlasServiceException { - if (typesDef != null) { - client.deleteAtlasTypeDefs(typesDef); - - typesDef = null; - - SampleApp.log("Deleted TypesDef successfully!"); - } - } - - private AtlasEntityDef createDatabaseDef() { - return AtlasTypeUtil.createClassTypeDef(SampleAppConstants.DATABASE_TYPE, - Collections.singleton(ENTITY_TYPE_DATASET), - AtlasTypeUtil.createOptionalAttrDef("locationUri", "string"), - AtlasTypeUtil.createOptionalAttrDef(ATTR_CREATE_TIME, "long"), - new AtlasAttributeDef(ATTR_RANDOM_TABLE, - AtlasBaseTypeDef.getArrayTypeName(SampleAppConstants.TABLE_TYPE), - true, AtlasAttributeDef.Cardinality.SET)); - } - - private AtlasEntityDef createTableDef() { - return AtlasTypeUtil.createClassTypeDef(SampleAppConstants.TABLE_TYPE, - Collections.singleton(ENTITY_TYPE_DATASET), - createOptionalAttrDef(ATTR_CREATE_TIME, "long"), - createOptionalAttrDef(ATTR_LAST_ACCESS_TIME, "date"), - createOptionalAttrDef(ATTR_TEMPORARY, "boolean"), - createOptionalAttrDef(ATTR_TABLE_TYPE, ENUM_TABLE_TYPE), - createOptionalAttrDef(ATTR_SERDE1, STRUCT_TYPE_SERDE), - createOptionalAttrDef(ATTR_SERDE2, STRUCT_TYPE_SERDE)); - } - - private AtlasEntityDef createColumnDef() { - return AtlasTypeUtil.createClassTypeDef(SampleAppConstants.COLUMN_TYPE, - Collections.singleton(ENTITY_TYPE_DATASET), - AtlasTypeUtil.createOptionalAttrDef(ATTR_DATA_TYPE, "string"), - AtlasTypeUtil.createOptionalAttrDef(ATTR_COMMENT, "string")); - } - - private AtlasEntityDef createProcessDef() { - return AtlasTypeUtil.createClassTypeDef(SampleAppConstants.PROCESS_TYPE, - Collections.singleton(ENTITY_TYPE_PROCESS), - AtlasTypeUtil.createOptionalAttrDef(ATTR_USERNAME, "string"), - AtlasTypeUtil.createOptionalAttrDef(ATTR_START_TIME, "long"), - AtlasTypeUtil.createOptionalAttrDef(ATTR_END_TIME, "long"), - AtlasTypeUtil.createRequiredAttrDef(ATTR_QUERY_TEXT, "string"), - AtlasTypeUtil.createRequiredAttrDef(ATTR_QUERY_PLAN, "string"), - AtlasTypeUtil.createRequiredAttrDef(ATTR_QUERY_ID, "string"), - AtlasTypeUtil.createRequiredAttrDef(ATTR_QUERY_GRAPH, "string")); - } - - private AtlasStructDef createSerDeDef() { - return AtlasTypeUtil.createStructTypeDef(SampleAppConstants.STRUCT_TYPE_SERDE, - AtlasTypeUtil.createRequiredAttrDef(SampleAppConstants.ATTR_NAME, "string"), - AtlasTypeUtil.createRequiredAttrDef(ATTR_SERDE, "string")); - } - - private AtlasEnumDef createTableTypeEnumDef() { - return new AtlasEnumDef(SampleAppConstants.ENUM_TABLE_TYPE, - SampleAppConstants.ATTR_DESCRIPTION, - Arrays.asList(new AtlasEnumDef.AtlasEnumElementDef("MANAGED", null, 1), - new AtlasEnumDef.AtlasEnumElementDef("EXTERNAL", null, 2))); - } - - private List createClassificationDefs() { - AtlasClassificationDef classification = createTraitTypeDef(SampleAppConstants.CLASSIFIED_TAG, Collections.emptySet(), AtlasTypeUtil.createRequiredAttrDef("tag", "string")); - AtlasClassificationDef pii = createTraitTypeDef(SampleAppConstants.PII_TAG, Collections.emptySet()); - AtlasClassificationDef finance = createTraitTypeDef(SampleAppConstants.FINANCE_TAG, Collections.emptySet()); - AtlasClassificationDef metric = createTraitTypeDef(SampleAppConstants.METRIC_TAG, Collections.emptySet()); - - return Arrays.asList(classification, pii, finance, metric); - } - - private List createBusinessMetadataDefs() { - String description = "description"; - - Map options = new HashMap<>(); - - options.put("maxStrLength", "20"); - options.put("applicableEntityTypes", "[\"" + SampleAppConstants.DATABASE_TYPE + "\",\"" + SampleAppConstants.TABLE_TYPE + "\"]"); - - AtlasBusinessMetadataDef bmWithAllTypes = createBusinessMetadataDef(SampleAppConstants.BUSINESS_METADATA_TYPE, - description, - "1.0", - createOptionalAttrDef(ATTR_ATTR1, ATLAS_TYPE_BOOLEAN, options, description), - createOptionalAttrDef(ATTR_ATTR2, ATLAS_TYPE_BYTE, options, description), - createOptionalAttrDef(ATTR_ATTR8, ATLAS_TYPE_STRING, options, description)); - - AtlasBusinessMetadataDef bmWithAllTypesMV = createBusinessMetadataDef(SampleAppConstants.BUSINESS_METADATA_TYPE_MV, - description, - "1.0", - createOptionalAttrDef(ATTR_ATTR11, "array", options, description), - createOptionalAttrDef(ATTR_ATTR18, "array", options, description)); - - return Arrays.asList(bmWithAllTypes, bmWithAllTypesMV); - } - - private List createAtlasRelationshipDef() { - AtlasRelationshipDef dbTablesDef = createRelationshipTypeDef(SampleAppConstants.TABLE_DATABASE_TYPE, SampleAppConstants.TABLE_DATABASE_TYPE, - "1.0", AGGREGATION, AtlasRelationshipDef.PropagateTags.NONE, - createRelationshipEndDef(SampleAppConstants.TABLE_TYPE, "db", SINGLE, false), - createRelationshipEndDef(SampleAppConstants.DATABASE_TYPE, "tables", SET, true)); - - AtlasRelationshipDef tableColumnsDef = createRelationshipTypeDef(SampleAppConstants.TABLE_COLUMNS_TYPE, SampleAppConstants.TABLE_COLUMNS_TYPE, - "1.0", COMPOSITION, AtlasRelationshipDef.PropagateTags.NONE, - createRelationshipEndDef(SampleAppConstants.TABLE_TYPE, "columns", SET, true), - createRelationshipEndDef(SampleAppConstants.COLUMN_TYPE, "table", SINGLE, false)); - - return Arrays.asList(dbTablesDef, tableColumnsDef); - } - - private AtlasTypesDef batchCreateTypes(AtlasTypesDef typesDef) throws AtlasServiceException { - AtlasTypesDef typesToCreate = new AtlasTypesDef(); - - for (AtlasEnumDef enumDef : typesDef.getEnumDefs()) { - if (client.typeWithNameExists(enumDef.getName())) { - SampleApp.log(enumDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getEnumDefs().add(enumDef); - } - } - - for (AtlasStructDef structDef : typesDef.getStructDefs()) { - if (client.typeWithNameExists(structDef.getName())) { - SampleApp.log(structDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getStructDefs().add(structDef); - } - } - - for (AtlasEntityDef entityDef : typesDef.getEntityDefs()) { - if (client.typeWithNameExists(entityDef.getName())) { - SampleApp.log(entityDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getEntityDefs().add(entityDef); - } - } - - for (AtlasClassificationDef classificationDef : typesDef.getClassificationDefs()) { - if (client.typeWithNameExists(classificationDef.getName())) { - SampleApp.log(classificationDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getClassificationDefs().add(classificationDef); - } - } - - for (AtlasRelationshipDef relationshipDef : typesDef.getRelationshipDefs()) { - if (client.typeWithNameExists(relationshipDef.getName())) { - SampleApp.log(relationshipDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getRelationshipDefs().add(relationshipDef); - } - } - - for (AtlasBusinessMetadataDef businessMetadataDef : typesDef.getBusinessMetadataDefs()) { - if (client.typeWithNameExists(businessMetadataDef.getName())) { - SampleApp.log(businessMetadataDef.getName() + ": type already exists. Skipping"); - } else { - typesToCreate.getBusinessMetadataDefs().add(businessMetadataDef); - } - } - - return client.createAtlasTypeDefs(typesToCreate); - } -} \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/python/README.md b/atlas-examples/sample-app/src/main/python/README.md deleted file mode 100644 index b199ecd6eb..0000000000 --- a/atlas-examples/sample-app/src/main/python/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Python Sample App - -This is a Python sample app to showcase basic functionality of Atlas. We are using Python client -to call Atlas APIs. Make sure to install Atlas Python client first before trying to run this project. Currently, compatible with Python 3.5+ - -## Installation - -1. Using `setup.py` - -```bash -cd /atlas-examples/sample-app/src/main/python - -# To check if apache-atlas client is installed try the following command -python3 ->>> import apache_atlas ->>> - -# If there is no error, then client is installed otherwise follow client Readme file to install it first. -``` - - -To Run this project - -```bash -python sample_client.py -``` -This will prompt for url of the Atlas server, username and password. diff --git a/atlas-examples/sample-app/src/main/python/discovery_example.py b/atlas-examples/sample-app/src/main/python/discovery_example.py deleted file mode 100644 index 78f5337390..0000000000 --- a/atlas-examples/sample-app/src/main/python/discovery_example.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from utils import TABLE_TYPE - -LOG = logging.getLogger('discovery-example') - - -class DiscoveryExample: - DSL_QUERIES = {"from DataSet", "from Process"} - - def __init__(self, client): - self.typesDef = None - self.client = client - - def dsl_search(self): - for dsl_query in DiscoveryExample.DSL_QUERIES: - try: - result = self.client.discovery.dsl_search_with_params(dsl_query, 10, 0) - - if result: - entities_result = result.entities - - if entities_result: - LOG.info("query: '%s' retrieved: %s rows", dsl_query, len(entities_result)) - - except Exception as e: - LOG.exception("query: %s failed in dsl search", dsl_query) - - def quick_search(self, search_string): - try: - result = self.client.discovery.quick_search(search_string, TABLE_TYPE, False, 2, 0) - - if result: - LOG.info("Quick-search result: %s", result.searchResults) - - except Exception as e: - LOG.exception("query: '%s' failed in quick search", search_string) - - def basic_search(self, type_name, classification, query): - try: - result = self.client.discovery.basic_search(type_name, classification, query, False, None, 'ASCENDING', 2, 0) - - if result: - LOG.info("Basic-search result: %s", result) - - except Exception as e: - LOG.exception("query: '%s' failed in basic search", query) diff --git a/atlas-examples/sample-app/src/main/python/entity_example.py b/atlas-examples/sample-app/src/main/python/entity_example.py deleted file mode 100644 index cb2d98d4d9..0000000000 --- a/atlas-examples/sample-app/src/main/python/entity_example.py +++ /dev/null @@ -1,189 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import logging - -from apache_atlas.model.enums import EntityOperation -from apache_atlas.model.instance import AtlasEntityWithExtInfo, AtlasRelatedObjectId -from apache_atlas.utils import type_coerce - - -LOG = logging.getLogger('entity-example') - - -class EntityExample: - DATABASE_NAME = "employee_db_entity" - TABLE_NAME = "employee_table_entity" - PROCESS_NAME = "employee_process_entity" - METADATA_NAMESPACE_SUFFIX = "@cl1" - MANAGED_TABLE = "Managed" - ATTR_NAME = "name" - ATTR_DESCRIPTION = "description" - ATTR_QUALIFIED_NAME = "qualifiedName" - REFERENCEABLE_ATTRIBUTE_NAME = ATTR_QUALIFIED_NAME - ATTR_TIME_ID_COLUMN = "time_id" - ATTR_CUSTOMER_ID_COLUMN = "customer_id" - ATTR_COMPANY_ID_COLUMN = "company_id" - - def __init__(self, client): - self.client = client - self.entity_db = None - self.entity_table_us = None - self.entity_table_canada = None - self.entity_process = None - - def create_entities(self): - self.__create_db() - self.__create_us_table() - self.__create_canada_table() - self.__create_process() - - def get_table_entity(self): - if self.entity_table_us: - return self.entity_table_us - - return None - - def get_entity_by_guid(self, guid): - entity = self.client.entity.get_entity_by_guid(guid) - - LOG.info("Entity(guid=%s): typeName=%s, attr.name=%s", guid, entity.entity.typeName, entity.entity.attributes['name']) - - def remove_entities(self): - entity_list = [ self.entity_process.guid, self.entity_table_us.guid, self.entity_table_canada.guid, self.entity_db.guid ] - - self.client.entity.delete_entities_by_guids(entity_list) - - response = self.client.entity.purge_entities_by_guids(entity_list) - - if response is not None: - LOG.info("Purged entities") - else: - LOG.info("Purge failed!") - - def __create_db(self): - if not self.entity_db: - with open('request_json/entity_create_db.json') as f: - entity = type_coerce(json.load(f), AtlasEntityWithExtInfo) - - self.entity_db = self.__create_db_helper(entity) - - if self.entity_db: - LOG.info("Created database entity: guid=%s, attr.name=%s", self.entity_db.guid, self.entity_db.attributes['name']) - else: - LOG.info("Failed to create database entity") - - def __create_us_table(self): - if not self.entity_table_us: - with open('request_json/entity_create_table_us.json') as f: - entity = type_coerce(json.load(f), AtlasEntityWithExtInfo) - - self.entity_table_us = self.__create_table_helper(entity) - - if self.entity_table_us: - LOG.info("Created US table entity: guid=%s, attr.name=%s", self.entity_table_us.guid, self.entity_table_us.attributes['name']) - else: - LOG.info("Failed to create US table entity") - - def __create_canada_table(self): - if not self.entity_table_canada: - with open('request_json/entity_create_table_canada.json') as f: - entity = type_coerce(json.load(f), AtlasEntityWithExtInfo) - - self.entity_table_canada = self.__create_table_helper(entity) - - if self.entity_table_canada: - LOG.info("Created Canada table entity: guid=%s, attr.name=%s", self.entity_table_canada.guid, self.entity_table_canada.attributes['name']) - else: - LOG.info("Failed to create Canada table entity") - - def __create_process(self): - if not self.entity_process: - with open('request_json/entity_create_process.json') as f: - entity = type_coerce(json.load(f), AtlasEntityWithExtInfo) - - self.entity_process = self.__create_process_helper(entity) - - if self.entity_process: - LOG.info("Created process entity: guid=%s, attr.name=%s", self.entity_process.guid, self.entity_process.attributes['name']) - else: - LOG.info("Failed to createa process entity") - - def __create_db_helper(self, entity): - self.__create_entity(entity) - - return entity.entity - - def __create_table_helper(self, entity): - table = entity.entity - - if self.entity_db: - dbId = AtlasRelatedObjectId({ 'guid': self.entity_db.guid }) - - LOG.info("setting: table(%s).db=%s", table.guid, dbId) - - table.relationshipAttributes['db'] = dbId - - self.__create_entity(entity) - - return table - - def __create_process_helper(self, entity): - process = entity.entity - - process.relationshipAttributes = {} - - if self.entity_table_us: - process.relationshipAttributes['inputs'] = [ AtlasRelatedObjectId({ 'guid': self.entity_table_us.guid }) ] - - if self.entity_table_canada: - process.relationshipAttributes['outputs'] = [ AtlasRelatedObjectId({'guid': self.entity_table_canada.guid }) ] - - return self.__create_entity(entity) - - def __create_entity(self, entity): - try: - response = self.client.entity.create_entity(entity) - - guid = None - - if response and response.mutatedEntities: - if EntityOperation.CREATE.name in response.mutatedEntities: - header_list = response.mutatedEntities[EntityOperation.CREATE.name] - elif EntityOperation.UPDATE.name in response.mutatedEntities: - header_list = response.mutatedEntities[EntityOperation.UPDATE.name] - - if header_list and len(header_list) > 0: - guid = header_list[0].guid - elif response and response.guidAssignments: - if entity.entity is not None and entity.entity.guid is not None: - in_guid = entity.entity.guid - else: - in_guid = None - - if in_guid and response.guidAssignments[in_guid]: - guid = response.guidAssignments[in_guid] - - if guid: - entity.entity.guid = guid - except Exception as e: - LOG.exception("failed to create entity %s. error=%s", entity, e) - - return entity.entity if entity and entity.entity else None diff --git a/atlas-examples/sample-app/src/main/python/glossary_example.py b/atlas-examples/sample-app/src/main/python/glossary_example.py deleted file mode 100644 index 449c2bce77..0000000000 --- a/atlas-examples/sample-app/src/main/python/glossary_example.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from apache_atlas.model.glossary import AtlasGlossary, AtlasGlossaryCategory, AtlasGlossaryTerm, AtlasGlossaryHeader - -LOG = logging.getLogger('glossary-example') - - -class GlossaryExample: - glossaryName = "EmployeeCountry" - - def __init__(self, client): - self.client = client - self.emp_glossary = None - self.emp_salary_term = None - self.emp_company_category = None - - def create_glossary(self): - glossary = AtlasGlossary({ 'name': GlossaryExample.glossaryName, 'shortDescription': 'This is a test Glossary' }) - self.emp_glossary = self.client.glossary.create_glossary(glossary) - - LOG.info("Created glossary with name: %s and guid: %s", self.emp_glossary.name, self.emp_glossary.guid) - - return self.emp_glossary - - def get_glossary_detail(self): - ext_info = self.client.glossary.get_glossary_ext_info(self.emp_glossary.guid) - - if ext_info: - LOG.info("Glossary extended info: %s; name: %s; language: %s", ext_info.guid, ext_info.name, ext_info.language) - - def create_glossary_term(self): - header = AtlasGlossaryHeader({ 'glossaryGuid': self.emp_glossary.guid, 'displayText': self.emp_glossary.name }) - term = AtlasGlossaryTerm({ 'name': 'EmpSalaryTerm', 'anchor': header }) - - self.emp_salary_term = self.client.glossary.create_glossary_term(term) - - if self.emp_salary_term: - LOG.info("Created Term for Employee Salary: %s with guid: %s", self.emp_salary_term.name, self.emp_salary_term.guid) - - def create_glossary_category(self): - header = AtlasGlossaryHeader({ 'glossaryGuid': self.emp_glossary.guid, 'displayText': self.emp_glossary.name }) - category = AtlasGlossaryCategory({ 'name': 'EmpSalaryCategory', 'anchor': header }) - - self.emp_company_category = self.client.glossary.create_glossary_category(category) - - if self.emp_company_category: - LOG.info("Created Category for Employee Category: %s with guid: %s", self.emp_company_category.name, self.emp_company_category.guid) - - def delete_glossary(self): - if not self.emp_glossary: - LOG.info("empGlossary is not present. Skipping the delete operation.") - - self.client.glossary.delete_glossary_by_guid(self.emp_glossary.guid) - - LOG.info("Delete is complete for Glossary!") \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/python/lineage_example.py b/atlas-examples/sample-app/src/main/python/lineage_example.py deleted file mode 100644 index 5341edaf04..0000000000 --- a/atlas-examples/sample-app/src/main/python/lineage_example.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from apache_atlas.model.enums import LineageDirection - -LOG = logging.getLogger('lineage-example') - - -class LineageExample: - def __init__(self, client): - self.client = client - - def lineage(self, guid): - direction = LineageDirection.BOTH.name - lineage_info = self.client.lineage.get_lineage_info(guid, direction, 0) - - if not lineage_info: - LOG.info("Not able to find lineage info") - return - - relations = lineage_info.relations - guid_entity_map = lineage_info.guidEntityMap - - for relation in relations: - from_entity = guid_entity_map[relation.fromEntityId] - to_entity = guid_entity_map[relation.toEntityId] - - LOG.info("%s (%s) -> %s (%s)", from_entity.displayText, from_entity.typeName, to_entity.displayText, to_entity.typeName) \ No newline at end of file diff --git a/atlas-examples/sample-app/src/main/python/request_json/entity_create_db.json b/atlas-examples/sample-app/src/main/python/request_json/entity_create_db.json deleted file mode 100644 index f7dc5258d6..0000000000 --- a/atlas-examples/sample-app/src/main/python/request_json/entity_create_db.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "entity": { - "guid": "-1", - "typeName": "sample_db", - "attributes": { - "name": "employee_db", - "qualifiedName": "employee_db@cl1", - "description": "employee database", - "owner": "user", - "clusterName": "cl1", - "locationUri": "/hive/database/employee_db", - "createTime": 1607476058882 - } - } -} diff --git a/atlas-examples/sample-app/src/main/python/request_json/entity_create_process.json b/atlas-examples/sample-app/src/main/python/request_json/entity_create_process.json deleted file mode 100644 index 26addbbdca..0000000000 --- a/atlas-examples/sample-app/src/main/python/request_json/entity_create_process.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "entity": { - "typeName": "sample_process", - "attributes": { - "name": "employee_process", - "description": "hive query for monthly avg salary", - "qualifiedName": "employee_process@cl1", - "userName": "user ETL", - "startTime": 1607476549507, - "endTime": 1607476552529, - "queryText": "create table as select ", - "queryId": "" - }, - "relationshipAttributes": { - "inputs": [ { "typeName": "sample_table", "uniqueAttributes": { "qualifiedName": "employee_db.employees_us@cl1" } } ], - "outputs": [ { "typeName": "sample_table", "uniqueAttributes": { "qualifiedName": "employee_db.employees_canada@cl1" } } ] - } - } -} diff --git a/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_canada.json b/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_canada.json deleted file mode 100644 index 25a114dc31..0000000000 --- a/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_canada.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "entity": { - "guid": "-1", - "typeName": "sample_table", - "attributes": { - "name": "employees_canada", - "description": "Canada employees", - "qualifiedName": "employee_db.employees_canada@cl1", - "tableType": "Managed", - "serde1": { "typeName": "sample_serdeType", "attributes": { "name": "serde1", "serde": "serde1" } }, - "serde2": { "typeName": "sample_serdeType", "attributes": { "name": "serde2", "serde": "serde2" } } - }, - - "relationshipAttributes": { - "db": { "typeName": "sample_db", "uniqueAttributes": { "qualifiedName": "employee_db@cl1" } }, - "columns": [ - { "guid": "-2" }, - { "guid": "-3" }, - { "guid": "-4" } - ] - } - }, - - "referredEntities": { - "-2": { - "guid": "-2", - "typeName": "sample_column", - "attributes": { - "table": { "guid": "-1" }, - "name": "time_id", - "dataType": "int", - "comment": "time id", - "qualifiedName": "employee_db.employees_canada.time_id@cl1" - } - }, - "-3": { - "guid": "-3", - "typeName": "sample_column", - "attributes": { - "table": { "guid": "-1" }, - "name": "customer_id", - "dataType": "int", - "comment": "customer id", - "qualifiedName": "employee_db.employees_canada.customer_id@cl1" - } - }, - "-4": { - "guid": "-4", - "typeName": "sample_column", - "attributes": { - "table": { "guid": "-1" }, - "name": "company_id", - "dataType": "double", - "comment": "company id", - "qualifiedName": "employee_db.employees_canada.company_id@cl1" - } - } - } -} diff --git a/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_us.json b/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_us.json deleted file mode 100644 index 56ca736fec..0000000000 --- a/atlas-examples/sample-app/src/main/python/request_json/entity_create_table_us.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "entity": { - "guid": "-1", - "typeName": "sample_table", - "attributes": { - "name": "employees_us", - "description": "US employees", - "qualifiedName": "employee_db.employees_us@cl1", - "tableType": "Managed", - "serde1": { "typeName": "sample_serdeType", "attributes": { "name": "serde1", "serde": "serde1" } }, - "serde2": { "typeName": "sample_serdeType", "attributes": { "name": "serde2", "serde": "serde2" } } - }, - - "relationshipAttributes": { - "db": { "typeName": "sample_db", "uniqueAttributes": { "qualifiedName": "employee_db@cl1" } }, - "columns": [ - { "guid": "-2" }, - { "guid": "-3" }, - { "guid": "-4" } - ] - } - }, - - "referredEntities": { - "-2": { - "guid": "-2", - "typeName": "sample_column", - "attributes": { - "name": "time_id", - "dataType": "int", - "comment": "time id", - "qualifiedName": "employee_db.employees_us.time_id@cl1", - "table": { "guid": "-1" } - } - }, - "-3": { - "guid": "-3", - "typeName": "sample_column", - "attributes": { - "name": "customer_id", - "dataType": "int", - "comment": "customer id", - "qualifiedName": "employee_db.employees_us.customer_id@cl1", - "table": { "guid": "-1" } - } - }, - "-4": { - "guid": "-4", - "typeName": "sample_column", - "attributes": { - "name": "company_id", - "dataType": "double", - "comment": "company id", - "qualifiedName": "employee_db.employees_us.company_id@cl1", - "table": { "guid": "-1" } - } - } - } -} diff --git a/atlas-examples/sample-app/src/main/python/request_json/typedef_create.json b/atlas-examples/sample-app/src/main/python/request_json/typedef_create.json deleted file mode 100644 index 70018adc85..0000000000 --- a/atlas-examples/sample-app/src/main/python/request_json/typedef_create.json +++ /dev/null @@ -1,190 +0,0 @@ -{ - "enumDefs": [ - { - "name": "sample_tableType", - "description": "sample_tableType", - "category": "ENUM", - "typeVersion": "1.0", - "elementDefs": [ - { "value": "MANAGED", "ordinal": 1 }, - { "value": "EXTERNAL", "ordinal": 2 } - ] - } - ], - "structDefs": [ - { - "name": "sample_serdeType", - "category": "STRUCT", - "typeVersion": "1.0", - "attributeDefs": [ - { "name": "name", "typeName": "string", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true }, - { "name": "serde", "typeName": "string", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true } - ] - } - ], - "classificationDefs": [ - { - "name": "sample_pii", - "category": "CLASSIFICATION", - "typeVersion": "1.0" - }, - { - "name": "sample_finance", - "category": "CLASSIFICATION", - "typeVersion": "1.0" - }, - { - "name": "sample_metric", - "category": "CLASSIFICATION", - "typeVersion": "1.0" - } - ], - "entityDefs": [ - { - "name": "sample_db", - "superTypes": [ "DataSet" ], - "category": "ENTITY", - "typeVersion": "1.0", - "attributeDefs": [ - { "name": "locationUri", "typeName": "string", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true }, - { "name": "createTime", "typeName": "date", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true } - ] - }, - { - "name": "sample_table", - "superTypes": [ "DataSet" ], - "category": "ENTITY", - "typeVersion": "1.0", - "attributeDefs": [ - { "name": "createTime", "typeName": "date", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "tableType", "typeName": "sample_tableType", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "temporary", "typeName": "boolean", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": false }, - { "name": "serde1", "typeName": "sample_serdeType", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": false }, - { "name": "serde2", "typeName": "sample_serdeType", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": false } - ] - }, - { - "name": "sample_column", - "superTypes": [ "DataSet" ], - "category": "ENTITY", - "typeVersion": "1.0", - "attributeDefs": [ - { "name": "dataType", "typeName": "string", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "comment", "typeName": "string", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true } - ] - }, - { - "name": "sample_process", - "superTypes": [ "Process" ], - "category": "ENTITY", - "typeVersion": "1.0", - "attributeDefs": [ - { "name": "userName", "typeName": "string", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "startTime", "typeName": "long", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "endTime", "typeName": "long", "cardinality": "SINGLE", "isOptional": true, "isUnique": false, "isIndexable": true }, - { "name": "queryText", "typeName": "string", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true }, - { "name": "queryId", "typeName": "string", "cardinality": "SINGLE", "isOptional": false, "isUnique": false, "isIndexable": true } - ] - } - ], - "relationshipDefs": [ - { - "name": "sample_db_tables", - "description": "Tables of a db", - "category": "RELATIONSHIP", - "typeVersion": "1.0", - "relationshipCategory": "AGGREGATION", - "propagateTags": "NONE", - "endDef1": { "name": "db", "type": "sample_table", "cardinality": "SINGLE", "isContainer": false, "isLegacyAttribute": false }, - "endDef2": { "name": "tables", "type": "sample_db", "cardinality": "SET", "isContainer": true, "isLegacyAttribute": false } - }, - { - "name": "sample_table_columns", - "description": "Columns of a table", - "category": "RELATIONSHIP", - "typeVersion": "1.0", - "relationshipCategory": "COMPOSITION", - "propagateTags": "NONE", - "endDef1": { "name": "table", "type": "sample_column", "cardinality": "SINGLE", "isContainer": false, "isLegacyAttribute": false }, - "endDef2": { "name": "columns", "type": "sample_table", "cardinality": "SET", "isContainer": true, "isLegacyAttribute": false } - } - ], - "businessMetadataDefs": [ - { - "name": "sample_bm", - "description": "Sample business metadata", - "category": "BUSINESS_METADATA", - "typeVersion": "1.0", - "attributeDefs": [ - { - "name": "attr1", - "typeName": "boolean", - "description": "Boolean attribute", - "cardinality": "SINGLE", - "isOptional": true, - "options": { - "applicableEntityTypes": "[ \"sample_db\", \"sample_table\" ]", - "maxStrLength": 50 - } - }, - { - "name": "attr2", - "typeName": "byte", - "description": "Byte attribute", - "cardinality": "SINGLE", - "isOptional": true, - "isUnique": false, - "isIndexable": true, - "options": { - "applicableEntityTypes": "[ \"sample_db\", \"sample_table\" ]", - "maxStrLength": 50 - } - }, - { - "name": "attr3", - "typeName": "string", - "description": "String attribute", - "cardinality": "SINGLE", - "isOptional": true, - "isUnique": false, - "isIndexable": true, - "searchWeight": 0, - "options": { - "applicableEntityTypes": "[ \"sample_db\", \"sample_table\" ]", - "maxStrLength": 50 - } - } - ] - }, - { - "name": "sample_bm_mv", - "description": "Sample business metadata with multi-value attributes", - "category": "BUSINESS_METADATA", - "typeVersion": "1.0", - "attributeDefs": [ - { - "name": "mv_attr1", - "typeName": "array", - "description": "Array of booleans", - "cardinality": "SINGLE", - "isOptional": true, - "options": { - "applicableEntityTypes": "[\"sample_db\",\"sample_table\"]", - "maxStrLength": 50 - } - }, - { - "name": "mv_attr2", - "typeName": "array", - "description": "Array of strings", - "cardinality": "SINGLE", - "isOptional": true, - "options": { - "applicableEntityTypes": "[ \"sample_db\", \"sample_table\" ]", - "maxStrLength": 50 - } - } - ] - } - ] -} diff --git a/atlas-examples/sample-app/src/main/python/sample_client.py b/atlas-examples/sample-app/src/main/python/sample_client.py deleted file mode 100644 index 33c430f436..0000000000 --- a/atlas-examples/sample-app/src/main/python/sample_client.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import getpass - -from apache_atlas.client.base_client import AtlasClient -from typedef_example import TypeDefExample -from entity_example import EntityExample -from lineage_example import LineageExample -from glossary_example import GlossaryExample -from discovery_example import DiscoveryExample -from utils import METRIC_CLASSIFICATION, NAME - - -LOG = logging.getLogger('sample-example') - - -class SampleApp: - def __init__(self): - self.created_entity = None - - def main(self): - # Python3 - global input - try: input = raw_input - except NameError: pass - - url = input('Enter Atlas URL: ') - username = input('Enter username: ') - password = getpass.getpass('Enter password: ') - - client = AtlasClient(url, (username, password)) - - self.__typedef_example(client) - - self.__entity_example(client) - - self.__lineage_example(client) - - self.__discovery_example(client) - - self.__glossary_example(client) - - self.__entity_cleanup() - - - def __typedef_example(self, client): - LOG.info("\n---------- Creating Sample Types -----------") - - typedefExample = TypeDefExample(client) - - typedefExample.create_type_def() - - def __entity_example(self, client): - LOG.info("\n---------- Creating Sample Entities -----------") - - self.entityExample = EntityExample(client) - - self.entityExample.create_entities() - - self.created_entity = self.entityExample.get_table_entity() - - if self.created_entity and self.created_entity.guid: - self.entityExample.get_entity_by_guid(self.created_entity.guid) - - def __lineage_example(self, client): - LOG.info("\n---------- Lineage example -----------") - - lineage = LineageExample(client) - - if self.created_entity: - lineage.lineage(self.created_entity.guid) - else: - LOG.info("Create entity first to get lineage info") - - def __discovery_example(self, client): - LOG.info("\n---------- Search example -----------") - - discovery = DiscoveryExample(client) - - discovery.dsl_search() - - if not self.created_entity: - LOG.info("Create entity first to get search info") - return - - discovery.quick_search(self.created_entity.typeName) - - discovery.basic_search(self.created_entity.typeName, METRIC_CLASSIFICATION, self.created_entity.attributes[NAME]) - - def __glossary_example(self, client): - LOG.info("\n---------- Glossary Example -----------") - - glossary = GlossaryExample(client) - glossary_obj = glossary.create_glossary() - - if not glossary_obj: - LOG.info("Create glossary first") - return - - glossary.create_glossary_term() - glossary.get_glossary_detail() - glossary.create_glossary_category() - glossary.delete_glossary() - - def __entity_cleanup(self): - LOG.info("\n---------- Deleting Entities -----------") - - self.entityExample.remove_entities() - - -if __name__ == "__main__": - SampleApp().main() diff --git a/atlas-examples/sample-app/src/main/python/typedef_example.py b/atlas-examples/sample-app/src/main/python/typedef_example.py deleted file mode 100644 index 1aab7f1797..0000000000 --- a/atlas-examples/sample-app/src/main/python/typedef_example.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import logging -import utils - -from apache_atlas.utils import type_coerce -from apache_atlas.model.misc import SearchFilter -from apache_atlas.model.typedef import AtlasTypesDef - - -LOG = logging.getLogger('sample-example') - - -class TypeDefExample: - SAMPLE_APP_TYPES = [ - utils.DATABASE_TYPE, - utils.TABLE_TYPE, - utils.COLUMN_TYPE, - utils.PROCESS_TYPE, - utils.PII_TAG, - utils.FINANCE_TAG, - utils.METRIC_CLASSIFICATION - ] - - def __init__(self, client): - self.typesDef = None - self.client = client - - def create_type_def(self): - try: - if not self.typesDef: - with open('request_json/typedef_create.json') as f: - typedef = type_coerce(json.load(f), AtlasTypesDef) - self.typesDef = self.__create(typedef) - except Exception as e: - LOG.exception("Error in creating typeDef", exc_info=e) - - def print_typedefs(self): - for type_name in TypeDefExample.SAMPLE_APP_TYPES: - filter_params = {"name": type_name} - search = SearchFilter(filter_params) - response = self.client.typedef.get_all_typedefs(search) - - if response: - LOG.info("Created type: [%s]", type_name) - - def remove_typedefs(self): - if not self.typesDef: - LOG.info("There is no typeDef to delete.") - else: - for type_name in TypeDefExample.SAMPLE_APP_TYPES: - self.client.typedef.delete_type_by_name(type_name) - - self.typesDef = None - - LOG.info("Deleted typeDef successfully!") - - def __create(self, type_def): - types_to_create = AtlasTypesDef() - - types_to_create.enumDefs = [] - types_to_create.structDefs = [] - types_to_create.classificationDefs = [] - types_to_create.entityDefs = [] - types_to_create.relationshipDefs = [] - types_to_create.businessMetadataDefs = [] - - for enum_def in type_def.enumDefs: - if self.client.typedef.type_with_name_exists(enum_def.name): - LOG.info("Type with name %s already exists. Skipping.", enum_def.name) - else: - types_to_create.enumDefs.append(enum_def) - - for struct_def in type_def.structDefs: - if self.client.typedef.type_with_name_exists(struct_def.name): - LOG.info("Type with name %s already exists. Skipping.", struct_def.name) - else: - types_to_create.structDefs.append(struct_def) - - for classification_def in type_def.classificationDefs: - if self.client.typedef.type_with_name_exists(classification_def.name): - LOG.info("Type with name %s already exists. Skipping.", classification_def.name) - else: - types_to_create.classificationDefs.append(classification_def) - - for entity_def in type_def.entityDefs: - if self.client.typedef.type_with_name_exists(entity_def.name): - LOG.info("Type with name %s already exists. Skipping.", entity_def.name) - else: - types_to_create.entityDefs.append(entity_def) - - for relationship_def in type_def.relationshipDefs: - if self.client.typedef.type_with_name_exists(relationship_def.name): - LOG.info("Type with name %s already exists. Skipping.", relationship_def.name) - else: - types_to_create.relationshipDefs.append(relationship_def) - - for business_metadata_def in type_def.businessMetadataDefs: - if self.client.typedef.type_with_name_exists(business_metadata_def.name): - LOG.info("Type with name %s already exists. Skipping.", business_metadata_def.name) - else: - types_to_create.businessMetadataDefs.append(business_metadata_def) - - return self.client.typedef.create_atlas_typedefs(types_to_create) diff --git a/atlas-examples/sample-app/src/main/python/utils.py b/atlas-examples/sample-app/src/main/python/utils.py deleted file mode 100644 index c83472d789..0000000000 --- a/atlas-examples/sample-app/src/main/python/utils.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env/python - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -NAME = "name" -DESCRIPTION = "description" - -PII_TAG = "sample_pii" -FINANCE_TAG = "sample_finance" -METRIC_CLASSIFICATION = "sample_metric" - -DATABASE_TYPE = "sample_db" -PROCESS_TYPE = "sample_process" -TABLE_TYPE = "sample_table" -COLUMN_TYPE = "sample_column" - -TABLE_DATABASE_TYPE = "sample_db_tables" -TABLE_COLUMNS_TYPE = "sample_table_columns" -ENUM_TABLE_TYPE = "sample_tableType" -BUSINESS_METADATA_TYPE = "sample_bm" -BUSINESS_METADATA_TYPE_MV = "sample_bm_mv" -STRUCT_TYPE_SERDE = "sample_serdeType" diff --git a/atlas-examples/sample-app/src/main/resources/atlas-application.properties b/atlas-examples/sample-app/src/main/resources/atlas-application.properties deleted file mode 100644 index d24c663fd3..0000000000 --- a/atlas-examples/sample-app/src/main/resources/atlas-application.properties +++ /dev/null @@ -1,25 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -######### Security Properties ######### - -# SSL config -atlas.enableTLS=false - -######### Server Properties ######### -atlas.rest.address=http://localhost:31000 \ No newline at end of file diff --git a/auth-agents-common/pom.xml b/auth-agents-common/pom.xml index aa37156d5f..3e0a035246 100644 --- a/auth-agents-common/pom.xml +++ b/auth-agents-common/pom.xml @@ -29,8 +29,8 @@ auth-agents-common - 8 - 8 + 17 + 17 1.9.13 31.1-jre @@ -96,7 +96,7 @@ org.apache.maven.plugins maven-shade-plugin - 3.2.4 + ${maven-shade-plugin} package diff --git a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/AbstractRangerAdminClient.java b/auth-agents-common/src/main/java/org/apache/atlas/admin/client/AbstractRangerAdminClient.java deleted file mode 100644 index 0a3d6c6326..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/AbstractRangerAdminClient.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.admin.client; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.util.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -public abstract class AbstractRangerAdminClient implements RangerAdminClient { - private static final Logger LOG = LoggerFactory.getLogger(AbstractRangerAdminClient.class); - - protected Gson gson; - - @Override - public void init(String serviceName, String appId, String configPropertyPrefix, Configuration config) { - Gson gson = null; - - try { - gson = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create(); - } catch(Throwable excp) { - LOG.error("AbstractRangerAdminClient: failed to create GsonBuilder object", excp); - } - - this.gson = gson; - } - - @Override - public ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - return null; - } - - @Override - public RangerRoles getRolesIfUpdated(long lastKnownRoleVersion, long lastActivationTimeInMillis) throws Exception { - return null; - } - - @Override - public RangerRole createRole(RangerRole request) throws Exception { - return null; - } - - @Override - public void dropRole(String execUser, String roleName) throws Exception { - - } - - @Override - public List getAllRoles(String execUser) throws Exception { - return null; - } - - @Override - public List getUserRoles(String execUser) throws Exception { - return null; - } - - @Override - public RangerRole getRole(String execUser, String roleName) throws Exception { - return null; - } - - @Override - public void grantRole(GrantRevokeRoleRequest request) throws Exception { - - } - - @Override - public void revokeRole(GrantRevokeRoleRequest request) throws Exception { - - } - - @Override - public void grantAccess(GrantRevokeRequest request) throws Exception { - - } - - @Override - public void revokeAccess(GrantRevokeRequest request) throws Exception { - - } - - @Override - public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - return null; - } - - @Override - public List getTagTypes(String tagTypePattern) throws Exception { - return null; - } - - @Override - public RangerUserStore getUserStoreIfUpdated(long lastKnownUserStoreVersion, long lastActivationTimeInMillis) throws Exception { - return null; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminClient.java b/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminClient.java deleted file mode 100644 index 795ffad021..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminClient.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.admin.client; - - -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.util.GrantRevokeRequest; -import org.apache.atlas.plugin.util.GrantRevokeRoleRequest; -import org.apache.atlas.plugin.util.RangerRoles; -import org.apache.atlas.plugin.util.RangerUserStore; -import org.apache.atlas.plugin.util.ServicePolicies; -import org.apache.atlas.plugin.util.ServiceTags; - -import java.util.List; - - -public interface RangerAdminClient { - - void init(String serviceName, String appId, String configPropertyPrefix, Configuration config); - - ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception; - - RangerRoles getRolesIfUpdated(long lastKnownRoleVersion, long lastActivationTimeInMills) throws Exception; - - RangerRole createRole(RangerRole request) throws Exception; - - void dropRole(String execUser, String roleName) throws Exception; - - List getAllRoles(String execUser) throws Exception; - - List getUserRoles(String execUser) throws Exception; - - RangerRole getRole(String execUser, String roleName) throws Exception; - - void grantRole(GrantRevokeRoleRequest request) throws Exception; - - void revokeRole(GrantRevokeRoleRequest request) throws Exception; - - void grantAccess(GrantRevokeRequest request) throws Exception; - - void revokeAccess(GrantRevokeRequest request) throws Exception; - - ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception; - - List getTagTypes(String tagTypePattern) throws Exception; - - RangerUserStore getUserStoreIfUpdated(long lastKnownUserStoreVersion, long lastActivationTimeInMillis) throws Exception; - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminRESTClient.java b/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminRESTClient.java deleted file mode 100644 index 49522fd602..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/RangerAdminRESTClient.java +++ /dev/null @@ -1,1500 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.admin.client; - - -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.GenericType; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.AccessControlException; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.atlas.admin.client.datatype.RESTResponse; -import org.apache.atlas.audit.provider.MiscUtil; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.authorization.utils.StringUtil; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.util.*; - -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.Cookie; -import javax.ws.rs.core.NewCookie; -import java.io.UnsupportedEncodingException; -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.security.PrivilegedAction; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class RangerAdminRESTClient extends AbstractRangerAdminClient { - private static final Log LOG = LogFactory.getLog(RangerAdminRESTClient.class); - - private String serviceName; - private String serviceNameUrlParam; - private String pluginId; - private String clusterName; - private RangerRESTClient restClient; - private RangerRESTUtils restUtils = new RangerRESTUtils(); - private boolean supportsPolicyDeltas; - private boolean supportsTagDeltas; - private boolean isRangerCookieEnabled; - private String rangerAdminCookieName; - private Cookie policyDownloadSessionId = null; - private boolean isValidPolicyDownloadSessionCookie = false; - private Cookie tagDownloadSessionId = null; - private boolean isValidTagDownloadSessionCookie = false; - private Cookie roleDownloadSessionId = null; - private Cookie userStoreDownloadSessionId = null; - private boolean isValidRoleDownloadSessionCookie = false; - private boolean isValidUserStoreDownloadSessionCookie = false; - private final String pluginCapabilities = Long.toHexString(new RangerPluginCapability().getPluginCapabilities()); - - public static GenericType> getGenericType(final T clazz) { - - ParameterizedType parameterizedGenericType = new ParameterizedType() { - public Type[] getActualTypeArguments() { - return new Type[] { clazz.getClass() }; - } - - public Type getRawType() { - return List.class; - } - - public Type getOwnerType() { - return List.class; - } - }; - - return new GenericType>(parameterizedGenericType) {}; - } - - @Override - public void init(String serviceName, String appId, String propertyPrefix, Configuration config) { - super.init(serviceName, appId, propertyPrefix, config); - - this.serviceName = serviceName; - this.pluginId = restUtils.getPluginId(serviceName, appId); - - String url = ""; - String tmpUrl = config.get(propertyPrefix + ".policy.rest.url"); - String sslConfigFileName = config.get(propertyPrefix + ".policy.rest.ssl.config.file"); - clusterName = config.get(propertyPrefix + ".access.cluster.name", ""); - if(StringUtil.isEmpty(clusterName)){ - clusterName =config.get(propertyPrefix + ".ambari.cluster.name", ""); - if (StringUtil.isEmpty(clusterName)) { - if (config instanceof RangerPluginConfig) { - clusterName = ((RangerPluginConfig)config).getClusterName(); - } - } - } - int restClientConnTimeOutMs = config.getInt(propertyPrefix + ".policy.rest.client.connection.timeoutMs", 120 * 1000); - int restClientReadTimeOutMs = config.getInt(propertyPrefix + ".policy.rest.client.read.timeoutMs", 30 * 1000); - supportsPolicyDeltas = config.getBoolean(propertyPrefix + RangerCommonConstants.PLUGIN_CONFIG_SUFFIX_POLICY_DELTA, RangerCommonConstants.PLUGIN_CONFIG_SUFFIX_POLICY_DELTA_DEFAULT); - supportsTagDeltas = config.getBoolean(propertyPrefix + RangerCommonConstants.PLUGIN_CONFIG_SUFFIX_TAG_DELTA, RangerCommonConstants.PLUGIN_CONFIG_SUFFIX_TAG_DELTA_DEFAULT); - isRangerCookieEnabled = config.getBoolean(propertyPrefix + ".policy.rest.client.cookie.enabled", RangerCommonConstants.POLICY_REST_CLIENT_SESSION_COOKIE_ENABLED); - rangerAdminCookieName = config.get(propertyPrefix + ".policy.rest.client.session.cookie.name", RangerCommonConstants.DEFAULT_COOKIE_NAME); - - if (!StringUtil.isEmpty(tmpUrl)) { - url = tmpUrl.trim(); - } - if (url.endsWith("/")) { - url = url.substring(0, url.length() - 1); - } - - init(url, sslConfigFileName, restClientConnTimeOutMs , restClientReadTimeOutMs, config); - - try { - this.serviceNameUrlParam = URLEncoderUtil.encodeURIParam(serviceName); - } catch (UnsupportedEncodingException e) { - LOG.warn("Unsupported encoding, serviceName=" + serviceName); - this.serviceNameUrlParam = serviceName; - } - } - - @Override - public ServicePolicies getServicePoliciesIfUpdated(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServicePoliciesIfUpdated(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ServicePolicies ret; - - if (isRangerCookieEnabled && policyDownloadSessionId != null && isValidPolicyDownloadSessionCookie) { - ret = getServicePoliciesIfUpdatedWithCookie(lastKnownVersion, lastActivationTimeInMillis); - } else { - ret = getServicePoliciesIfUpdatedWithCred(lastKnownVersion, lastActivationTimeInMillis); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServicePoliciesIfUpdated(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - @Override - public RangerRoles getRolesIfUpdated(final long lastKnownRoleVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRolesIfUpdated(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerRoles ret; - - if (isRangerCookieEnabled && roleDownloadSessionId != null && isValidRoleDownloadSessionCookie) { - ret = getRolesIfUpdatedWithCookie(lastKnownRoleVersion, lastActivationTimeInMillis); - } else { - ret = getRolesIfUpdatedWithCred(lastKnownRoleVersion, lastActivationTimeInMillis); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRolesIfUpdated(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + "): "); - } - - return ret; - } - - @Override - public RangerRole createRole(final RangerRole request) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.createRole(" + request + ")"); - } - - RangerRole ret = null; - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_CREATE_ROLE; - - Map queryParams = new HashMap (); - queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - try { - clientRes = restClient.post(relativeURL, queryParams, request); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("create role as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.post(relativeURL, queryParams, request); - } - - if(response != null && response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("createRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus()==HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else if(response == null) { - throw new Exception("unknown error during createRole. roleName=" + request.getName()); - } else { - ret = response.getEntity(RangerRole.class); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.createRole(" + request + ")"); - } - return ret; - } - - @Override - public void dropRole(final String execUser, final String roleName) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.dropRole(" + roleName + ")"); - } - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam); - queryParams.put(RangerRESTUtils.REST_PARAM_EXEC_USER, execUser); - - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_DROP_ROLE + roleName; - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - try { - clientRes = restClient.delete(relativeURL, queryParams); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("drop role as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.delete(relativeURL, queryParams); - } - if(response == null) { - throw new Exception("unknown error during deleteRole. roleName=" + roleName); - } else if(response.getStatus() != HttpServletResponse.SC_OK && response.getStatus() != HttpServletResponse.SC_NO_CONTENT) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("createRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus()==HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.deleteRole(" + roleName + ")"); - } - } - - @Override - public List getUserRoles(final String execUser) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getUserRoles(" + execUser + ")"); - } - - List ret = null; - String emptyString = ""; - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GET_USER_ROLES + execUser; - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - try { - clientRes = restClient.get(relativeURL, null); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("get roles as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.get(relativeURL, null); - } - if(response != null) { - if (response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("getUserRoles() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else { - ret = response.getEntity(getGenericType(emptyString)); - } - } else { - throw new Exception("unknown error during getUserRoles. execUser=" + execUser); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserRoles(" + execUser + ")"); - } - return ret; - } - - @Override - public List getAllRoles(final String execUser) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getAllRoles()"); - } - - List ret = null; - String emptyString = ""; - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GET_ALL_ROLES; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam); - queryParams.put(RangerRESTUtils.REST_PARAM_EXEC_USER, execUser); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - try { - clientRes = restClient.get(relativeURL, queryParams); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("get roles as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.get(relativeURL, queryParams); - } - if(response != null) { - if (response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("getAllRoles() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else { - ret = response.getEntity(getGenericType(emptyString)); - } - } else { - throw new Exception("unknown error during getAllRoles."); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getAllRoles()"); - } - return ret; - } - - @Override - public RangerRole getRole(final String execUser, final String roleName) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getPrincipalsForRole(" + roleName + ")"); - } - - RangerRole ret = null; - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GET_ROLE_INFO + roleName; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam); - queryParams.put(RangerRESTUtils.REST_PARAM_EXEC_USER, execUser); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientResp = null; - try { - clientResp = restClient.get(relativeURL, queryParams); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("get role info as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.get(relativeURL, queryParams); - } - if(response != null) { - if (response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("getPrincipalsForRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else { - ret = response.getEntity(RangerRole.class); - } - } else { - throw new Exception("unknown error during getPrincipalsForRole. roleName=" + roleName); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getPrincipalsForRole(" + roleName + ")"); - } - return ret; - } - - - @Override - public void grantRole(final GrantRevokeRoleRequest request) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.grantRole(" + request + ")"); - } - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GRANT_ROLE + serviceNameUrlParam; - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientResp = null; - try { - clientResp = restClient.put(relativeURL, null, request); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("grant role as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.put(relativeURL, null, request); - } - if(response != null && response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("grantRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus()==HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else if(response == null) { - throw new Exception("unknown error during grantRole. serviceName=" + serviceName); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.grantRole(" + request + ")"); - } - } - - @Override - public void revokeRole(final GrantRevokeRoleRequest request) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.revokeRole(" + request + ")"); - } - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_REVOKE_ROLE + serviceNameUrlParam; - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientResp = null; - try { - clientResp = restClient.put(relativeURL, null, request); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("revoke role as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.put(relativeURL, null, request); - } - if(response != null && response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("revokeRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus()==HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else if(response == null) { - throw new Exception("unknown error during revokeRole. serviceName=" + serviceName); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.revokeRole(" + request + ")"); - } - } - - @Override - public void grantAccess(final GrantRevokeRequest request) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.grantAccess(" + request + ")"); - } - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - String relativeURL = RangerRESTUtils.REST_URL_SECURE_SERVICE_GRANT_ACCESS + serviceNameUrlParam; - ClientResponse clientResp = null; - try { - clientResp = restClient.post(relativeURL, queryParams, request); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("grantAccess as user " + user); - } - response = user.doAs(action); - } else { - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GRANT_ACCESS + serviceNameUrlParam; - response = restClient.post(relativeURL, queryParams, request); - } - if(response != null && response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("grantAccess() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus()==HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else if(response == null) { - throw new Exception("unknown error during grantAccess. serviceName=" + serviceName); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.grantAccess(" + request + ")"); - } - } - - @Override - public void revokeAccess(final GrantRevokeRequest request) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.revokeAccess(" + request + ")"); - } - - ClientResponse response = null; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - String relativeURL = RangerRESTUtils.REST_URL_SECURE_SERVICE_REVOKE_ACCESS + serviceNameUrlParam; - ClientResponse clientResp = null; - try { - clientResp = restClient.post(relativeURL, queryParams, request); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("revokeAccess as user " + user); - } - response = user.doAs(action); - } else { - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_REVOKE_ACCESS + serviceNameUrlParam; - response = restClient.post(relativeURL, queryParams, request); - } - - if(response != null && response.getStatus() != HttpServletResponse.SC_OK) { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("revokeAccess() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : "")); - - if(response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { - throw new AccessControlException(); - } - - throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage()); - } else if(response == null) { - throw new Exception("unknown error. revokeAccess(). serviceName=" + serviceName); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.revokeAccess(" + request + ")"); - } - } - - private void init(String url, String sslConfigFileName, int restClientConnTimeOutMs , int restClientReadTimeOutMs, Configuration config) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.init(" + url + ", " + sslConfigFileName + ")"); - } - - restClient = new RangerRESTClient(url, sslConfigFileName, config); - restClient.setRestClientConnTimeOutMs(restClientConnTimeOutMs); - restClient.setRestClientReadTimeOutMs(restClientReadTimeOutMs); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.init(" + url + ", " + sslConfigFileName + ")"); - } - } - - @Override - public ServiceTags getServiceTagsIfUpdated(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServiceTagsIfUpdated(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): "); - } - - final ServiceTags ret; - - if (isRangerCookieEnabled && tagDownloadSessionId != null && isValidTagDownloadSessionCookie) { - ret = getServiceTagsIfUpdatedWithCookie(lastKnownVersion, lastActivationTimeInMillis); - } else { - ret = getServiceTagsIfUpdatedWithCred(lastKnownVersion, lastActivationTimeInMillis); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServiceTagsIfUpdated(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): "); - } - - return ret; - } - - @Override - public List getTagTypes(String pattern) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getTagTypes(" + pattern + "): "); - } - - List ret = null; - String emptyString = ""; - UserGroupInformation user = MiscUtil.getUGILoginUser(); - boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam); - queryParams.put(RangerRESTUtils.PATTERN_PARAM, pattern); - String relativeURL = RangerRESTUtils.REST_URL_LOOKUP_TAG_NAMES; - - ClientResponse response = null; - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientResp = null; - try { - clientResp = restClient.get(relativeURL, queryParams); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("getTagTypes as user " + user); - } - response = user.doAs(action); - } else { - response = restClient.get(relativeURL, queryParams); - } - - if(response != null && response.getStatus() == HttpServletResponse.SC_OK) { - ret = response.getEntity(getGenericType(emptyString)); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.error("Error getting tags. response=" + resp + ", serviceName=" + serviceName + ", " + "pattern=" + pattern); - throw new Exception(resp.getMessage()); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getTagTypes(" + pattern + "): " + ret); - } - - return ret; - } - - @Override - public RangerUserStore getUserStoreIfUpdated(long lastKnownUserStoreVersion, long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getUserStoreIfUpdated(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + ")"); - } - - - final RangerUserStore ret; - - if (isRangerCookieEnabled && userStoreDownloadSessionId != null && isValidUserStoreDownloadSessionCookie) { - ret = getUserStoreIfUpdatedWithCookie(lastKnownUserStoreVersion, lastActivationTimeInMillis); - } else { - ret = getUserStoreIfUpdatedWithCred(lastKnownUserStoreVersion, lastActivationTimeInMillis); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserStoreIfUpdated(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + "): "); - } - - return ret; - } - - /* Policies Download ranger admin rest call methods */ - private ServicePolicies getServicePoliciesIfUpdatedWithCred(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServicePoliciesIfUpdatedWithCred(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ServicePolicies ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerAdminPolicyDownloadResponse(lastKnownVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - policyDownloadSessionId = null; - LOG.error("Error getting policies; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - setCookieReceivedFromCredSession(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in policies. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - setCookieReceivedFromCredSession(response); - ret = response.getEntity(ServicePolicies.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - policyDownloadSessionId = null; - ret = null; - LOG.error("Error getting policies; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - policyDownloadSessionId = null; - ret = null; - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting policies. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServicePoliciesIfUpdatedWithCred(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ServicePolicies getServicePoliciesIfUpdatedWithCookie(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServicePoliciesIfUpdatedWithCookie(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ServicePolicies ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerAdminPolicyDownloadResponse(lastKnownVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - policyDownloadSessionId = null; - isValidPolicyDownloadSessionCookie = false; - LOG.error("Error getting policies; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - checkAndResetSessionCookie(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in policies. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - checkAndResetSessionCookie(response); - ret = response.getEntity(ServicePolicies.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - policyDownloadSessionId = null; - isValidPolicyDownloadSessionCookie = false; - ret = null; - LOG.error("Error getting policies; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - policyDownloadSessionId = null; - isValidPolicyDownloadSessionCookie = false; - ret = null; - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting policies. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServicePoliciesIfUpdatedWithCookie(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ClientResponse getRangerAdminPolicyDownloadResponse(final long lastKnownVersion, final long lastActivationTimeInMillis, final UserGroupInformation user, final boolean isSecureMode) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRangerAdminPolicyDownloadResponse(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ClientResponse ret; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_KNOWN_POLICY_VERSION, Long.toString(lastKnownVersion)); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_ACTIVATION_TIME, Long.toString(lastActivationTimeInMillis)); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - queryParams.put(RangerRESTUtils.REST_PARAM_CLUSTER_NAME, clusterName); - queryParams.put(RangerRESTUtils.REST_PARAM_SUPPORTS_POLICY_DELTAS, Boolean.toString(supportsPolicyDeltas)); - queryParams.put(RangerRESTUtils.REST_PARAM_CAPABILITIES, pluginCapabilities); - - if (isSecureMode) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking Service policy if updated as user : " + user); - } - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - String relativeURL = RangerRESTUtils.REST_URL_POLICY_GET_FOR_SECURE_SERVICE_IF_UPDATED + serviceNameUrlParam; - ClientResponse clientResp = null; - try { - clientResp = restClient.get(relativeURL, queryParams, policyDownloadSessionId); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - ret = user.doAs(action); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking Service policy if updated with old api call"); - } - String relativeURL = RangerRESTUtils.REST_URL_POLICY_GET_FOR_SERVICE_IF_UPDATED + serviceNameUrlParam; - ret = restClient.get(relativeURL, queryParams, policyDownloadSessionId); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRangerAdminPolicyDownloadResponse(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private void checkAndResetSessionCookie(ClientResponse response) { - List respCookieList = response.getCookies(); - for (NewCookie respCookie : respCookieList) { - if (respCookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - policyDownloadSessionId = respCookie; - isValidPolicyDownloadSessionCookie = (policyDownloadSessionId != null); - break; - } - } - } - - private void setCookieReceivedFromCredSession(ClientResponse clientResponse) { - if (isRangerCookieEnabled) { - Cookie sessionCookie = null; - List cookieList = clientResponse.getCookies(); - // save cookie received from credentials session login - for (NewCookie cookie : cookieList) { - if (cookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - sessionCookie = cookie.toCookie(); - break; - } - } - policyDownloadSessionId = sessionCookie; - isValidPolicyDownloadSessionCookie = (policyDownloadSessionId != null); - } - } - - /* Tags Download ranger admin rest call */ - private ServiceTags getServiceTagsIfUpdatedWithCred(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServiceTagsIfUpdatedWithCred(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ServiceTags ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerAdminTagDownloadResponse(lastKnownVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED) { - if (response == null) { - tagDownloadSessionId = null; - LOG.error("Error getting tags; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - setCookieReceivedFromTagDownloadSession(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in tags. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - setCookieReceivedFromTagDownloadSession(response); - ret = response.getEntity(ServiceTags.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - tagDownloadSessionId = null; - ret = null; - LOG.error("Error getting tags; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting tags. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - tagDownloadSessionId = null; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServiceTagsIfUpdatedWithCred(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ServiceTags getServiceTagsIfUpdatedWithCookie(final long lastKnownVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getServiceTagsIfUpdatedWithCookie(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ServiceTags ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerAdminTagDownloadResponse(lastKnownVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED) { - if (response == null) { - tagDownloadSessionId = null; - isValidTagDownloadSessionCookie = false; - LOG.error("Error getting tags; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - checkAndResetTagDownloadSessionCookie(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in tags. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - checkAndResetTagDownloadSessionCookie(response); - ret = response.getEntity(ServiceTags.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - tagDownloadSessionId = null; - isValidTagDownloadSessionCookie = false; - ret = null; - LOG.error("Error getting tags; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownVersion=" + lastKnownVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting tags. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - tagDownloadSessionId = null; - isValidTagDownloadSessionCookie = false; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getServiceTagsIfUpdatedWithCookie(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ClientResponse getRangerAdminTagDownloadResponse(final long lastKnownVersion, final long lastActivationTimeInMillis, final UserGroupInformation user, final boolean isSecureMode) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRangerAdminTagDownloadResponse(" + lastKnownVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ClientResponse ret; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.LAST_KNOWN_TAG_VERSION_PARAM, Long.toString(lastKnownVersion)); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_ACTIVATION_TIME, Long.toString(lastActivationTimeInMillis)); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - queryParams.put(RangerRESTUtils.REST_PARAM_SUPPORTS_TAG_DELTAS, Boolean.toString(supportsTagDeltas)); - queryParams.put(RangerRESTUtils.REST_PARAM_CAPABILITIES, pluginCapabilities); - - if (isSecureMode) { - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - String relativeURL = RangerRESTUtils.REST_URL_GET_SECURE_SERVICE_TAGS_IF_UPDATED + serviceNameUrlParam; - ClientResponse clientResp = null; - try { - clientResp = restClient.get(relativeURL, queryParams, tagDownloadSessionId); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientResp; - } - }; - if (LOG.isDebugEnabled()) { - LOG.debug("getServiceTagsIfUpdated as user " + user); - } - ret = user.doAs(action); - } else { - String relativeURL = RangerRESTUtils.REST_URL_GET_SERVICE_TAGS_IF_UPDATED + serviceNameUrlParam; - ret = restClient.get(relativeURL, queryParams); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRangerAdminTagDownloadResponse(" + lastKnownVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private void checkAndResetTagDownloadSessionCookie(ClientResponse response) { - List respCookieList = response.getCookies(); - for (NewCookie respCookie : respCookieList) { - if (respCookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - tagDownloadSessionId = respCookie; - isValidTagDownloadSessionCookie = (tagDownloadSessionId != null); - break; - } - } - } - - private void setCookieReceivedFromTagDownloadSession(ClientResponse clientResponse) { - if (isRangerCookieEnabled) { - Cookie sessionCookie = null; - List cookieList = clientResponse.getCookies(); - // save cookie received from credentials session login - for (NewCookie cookie : cookieList) { - if (cookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - sessionCookie = cookie.toCookie(); - break; - } - } - tagDownloadSessionId = sessionCookie; - isValidTagDownloadSessionCookie = (tagDownloadSessionId != null); - } - } - - /* Roles Download ranger admin rest call methods */ - private RangerRoles getRolesIfUpdatedWithCred(final long lastKnownRoleVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRolesIfUpdatedWithCred(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerRoles ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerRolesDownloadResponse(lastKnownRoleVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - roleDownloadSessionId = null; - LOG.error("Error getting Roles; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - setCookieReceivedFromRoleDownloadSession(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in Roles. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownRoleVersion=" + lastKnownRoleVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - setCookieReceivedFromRoleDownloadSession(response); - ret = response.getEntity(RangerRoles.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - roleDownloadSessionId = null; - ret = null; - LOG.error("Error getting Roles; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownRoleVersion=" + lastKnownRoleVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting Roles. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - roleDownloadSessionId = null; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRolesIfUpdatedWithCred(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private RangerRoles getRolesIfUpdatedWithCookie(final long lastKnownRoleVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRolesIfUpdatedWithCookie(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerRoles ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getRangerRolesDownloadResponse(lastKnownRoleVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - roleDownloadSessionId = null; - isValidRoleDownloadSessionCookie = false; - LOG.error("Error getting Roles; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - checkAndResetRoleDownloadSessionCookie(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in Roles. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownRoleVersion=" + lastKnownRoleVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - checkAndResetRoleDownloadSessionCookie(response); - ret = response.getEntity(RangerRoles.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - roleDownloadSessionId = null; - isValidRoleDownloadSessionCookie = false; - ret = null; - LOG.error("Error getting Roles; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownRoleVersion=" + lastKnownRoleVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting Roles. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - roleDownloadSessionId = null; - isValidRoleDownloadSessionCookie = false; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRolesIfUpdatedWithCookie(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - /* Roles Download ranger admin rest call methods */ - private RangerUserStore getUserStoreIfUpdatedWithCred(final long lastKnownUserStoreVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getUserStoreIfUpdatedWithCred(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerUserStore ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getUserStoreDownloadResponse(lastKnownUserStoreVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - userStoreDownloadSessionId = null; - LOG.error("Error getting UserStore; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - setCookieReceivedFromUserStoreDownloadSession(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in UserStore. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownUserStoreVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - setCookieReceivedFromUserStoreDownloadSession(response); - ret = response.getEntity(RangerUserStore.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - userStoreDownloadSessionId = null; - ret = null; - LOG.error("Error getting UserStore; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownUserStoreVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting UserStore. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - userStoreDownloadSessionId = null; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserStoreIfUpdatedWithCred(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private RangerUserStore getUserStoreIfUpdatedWithCookie(final long lastKnownUserStoreVersion, final long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getUserStoreIfUpdatedWithCookie(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerUserStore ret; - - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response = getUserStoreDownloadResponse(lastKnownUserStoreVersion, lastActivationTimeInMillis, user, isSecureMode); - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { - if (response == null) { - userStoreDownloadSessionId = null; - isValidUserStoreDownloadSessionCookie = false; - LOG.error("Error getting Roles; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user + ", serviceName=" + serviceName); - } else { - checkAndResetUserStoreDownloadSessionCookie(response); - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in Roles. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp + ", serviceName=" + serviceName - + ", " + "lastKnownRoleVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - checkAndResetUserStoreDownloadSessionCookie(response); - ret = response.getEntity(RangerUserStore.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - userStoreDownloadSessionId = null; - isValidUserStoreDownloadSessionCookie = false; - ret = null; - LOG.error("Error getting UserStore; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() + ", serviceName=" + serviceName - + ", " + "lastKnownUserStoreVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - RangerServiceNotFoundException.throwExceptionIfServiceNotFound(serviceName, exceptionMsg); - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting UserStore. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp + ", serviceName=" + serviceName); - userStoreDownloadSessionId = null; - isValidUserStoreDownloadSessionCookie = false; - ret = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserStoreIfUpdatedWithCookie(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ClientResponse getRangerRolesDownloadResponse(final long lastKnownRoleVersion, final long lastActivationTimeInMillis, final UserGroupInformation user, final boolean isSecureMode) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getRangerRolesDownloadResponse(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ClientResponse ret; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_KNOWN_ROLE_VERSION, Long.toString(lastKnownRoleVersion)); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_ACTIVATION_TIME, Long.toString(lastActivationTimeInMillis)); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - queryParams.put(RangerRESTUtils.REST_PARAM_CLUSTER_NAME, clusterName); - queryParams.put(RangerRESTUtils.REST_PARAM_CAPABILITIES, pluginCapabilities); - - if (isSecureMode) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking Roles updated as user : " + user + " isSecureMode :" + isSecureMode); - } - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_SERCURE_GET_USER_GROUP_ROLES + serviceNameUrlParam; - try { - clientRes = restClient.get(relativeURL, queryParams, roleDownloadSessionId); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - ret = user.doAs(action); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking Roles updated as user : " + user + " isSecureMode :" + isSecureMode); - } - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GET_USER_GROUP_ROLES + serviceNameUrlParam; - ret = restClient.get(relativeURL, queryParams); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getRangerRolesDownloadResponse(" + lastKnownRoleVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private ClientResponse getUserStoreDownloadResponse(final long lastKnownUserStoreVersion, final long lastActivationTimeInMillis, final UserGroupInformation user, final boolean isSecureMode) throws Exception { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAdminRESTClient.getUserStoreDownloadResponse(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final ClientResponse ret; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_KNOWN_USERSTORE_VERSION, Long.toString(lastKnownUserStoreVersion)); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_ACTIVATION_TIME, Long.toString(lastActivationTimeInMillis)); - queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, pluginId); - queryParams.put(RangerRESTUtils.REST_PARAM_CLUSTER_NAME, clusterName); - queryParams.put(RangerRESTUtils.REST_PARAM_CAPABILITIES, pluginCapabilities); - - if (isSecureMode) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking UserStore updated as user : " + user + " isSecureMode :" + isSecureMode); - } - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_SERCURE_GET_USERSTORE + serviceNameUrlParam; - try { - clientRes = restClient.get(relativeURL, queryParams, userStoreDownloadSessionId); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - ret = user.doAs(action); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking UserStore updated as user : " + user + " isSecureMode :" + isSecureMode); - } - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GET_USERSTORE + serviceNameUrlParam; - ret = restClient.get(relativeURL, queryParams); - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserStoreDownloadResponse(" + restClient.getUsername() + ", " + restClient.getPassword() + "): " + ret); - } - - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAdminRESTClient.getUserStoreDownloadResponse(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + "): " + ret); - } - - return ret; - } - - private void checkAndResetRoleDownloadSessionCookie(ClientResponse response) { - List respCookieList = response.getCookies(); - for (NewCookie respCookie : respCookieList) { - if (respCookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - roleDownloadSessionId = respCookie; - isValidRoleDownloadSessionCookie = (roleDownloadSessionId != null); - break; - } - } - } - private void checkAndResetUserStoreDownloadSessionCookie(ClientResponse response) { - List respCookieList = response.getCookies(); - for (NewCookie respCookie : respCookieList) { - if (respCookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - userStoreDownloadSessionId = respCookie; - isValidUserStoreDownloadSessionCookie = (userStoreDownloadSessionId != null); - break; - } - } - } - - private void setCookieReceivedFromRoleDownloadSession(ClientResponse clientResponse) { - if (isRangerCookieEnabled) { - Cookie sessionCookie = null; - List cookieList = clientResponse.getCookies(); - // save cookie received from credentials session login - for (NewCookie cookie : cookieList) { - if (cookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - sessionCookie = cookie.toCookie(); - break; - } - } - roleDownloadSessionId = sessionCookie; - isValidRoleDownloadSessionCookie = (roleDownloadSessionId != null); - } - } - private void setCookieReceivedFromUserStoreDownloadSession(ClientResponse clientResponse) { - if (isRangerCookieEnabled) { - Cookie sessionCookie = null; - List cookieList = clientResponse.getCookies(); - // save cookie received from credentials session login - for (NewCookie cookie : cookieList) { - if (cookie.getName().equalsIgnoreCase(rangerAdminCookieName)) { - sessionCookie = cookie.toCookie(); - break; - } - } - userStoreDownloadSessionId = sessionCookie; - isValidUserStoreDownloadSessionCookie = (userStoreDownloadSessionId != null); - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/GrantRevokeData.java b/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/GrantRevokeData.java deleted file mode 100644 index 22c9470abc..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/GrantRevokeData.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.admin.client.datatype; - - -import org.apache.atlas.authorization.utils.StringUtil; -import org.apache.atlas.plugin.util.JsonUtilsV2; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -import java.util.ArrayList; -import java.util.List; - - -@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown = true) -public class GrantRevokeData implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String grantor; - private String repositoryName; - private String repositoryType; - private String databases; - private String tables; - private String columns; - private String columnFamilies; - private List permMapList = new ArrayList<>(); - - private static String WILDCARD_ASTERISK = "*"; - - public GrantRevokeData() { - } - - public String getGrantor() { - return grantor; - } - - public void setGrantor(String grantor) { - this.grantor = grantor; - } - - public String getRepositoryName() { - return repositoryName; - } - - public void setRepositoryName(String repositoryName) { - this.repositoryName = repositoryName; - } - - public String getRepositoryType() { - return repositoryType; - } - - public void setRepositoryType(String repositoryType) { - this.repositoryType = repositoryType; - } - - public String getDatabases() { - return databases; - } - - public void setDatabases(String databases) { - this.databases = databases; - } - - public String getTables() { - return tables; - } - - public void setTables(String tables) { - this.tables = tables; - } - - public String getColumns() { - return columns; - } - - public void setColumns(String columns) { - this.columns = columns; - } - - public String getColumnFamilies() { - return columnFamilies; - } - - public void setColumnFamilies(String columnFamilies) { - this.columnFamilies = columnFamilies; - } - - public List getPermMapList() { - return permMapList; - } - - public void setPermMapList(List permMapList) { - this.permMapList = permMapList; - } - - - public void setHiveData(String grantor, - String repositoryName, - String databases, - String tables, - String columns, - PermMap permMap) { - this.grantor = grantor; - this.repositoryName = repositoryName; - this.repositoryType = "hive"; - this.databases = StringUtil.isEmpty(databases) ? WILDCARD_ASTERISK : databases; - this.tables = StringUtil.isEmpty(tables) ? WILDCARD_ASTERISK : tables; - this.columns = StringUtil.isEmpty(columns) ? WILDCARD_ASTERISK : columns; - this.permMapList.add(permMap); - } - - public void setHBaseData(String grantor, - String repositoryName, - String tables, - String columns, - String columnFamilies, - PermMap permMap) { - this.grantor = grantor; - this.repositoryName = repositoryName; - this.repositoryType = "hbase"; - this.tables = StringUtil.isEmpty(tables) ? WILDCARD_ASTERISK : tables; - this.columns = StringUtil.isEmpty(columns) ? WILDCARD_ASTERISK : columns; - this.columnFamilies = StringUtil.isEmpty(columnFamilies) ? WILDCARD_ASTERISK : columnFamilies; - this.permMapList.add(permMap); - } - - public String toJson() { - try { - return JsonUtilsV2.objToJson(this); - } catch (Exception e) { - e.printStackTrace(); - } - - return ""; - } - - @Override - public String toString() { - return toJson(); - } - - @JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) - @JsonIgnoreProperties(ignoreUnknown = true) - public static class PermMap implements java.io.Serializable { - private List userList = new ArrayList<>(); - private List groupList = new ArrayList<>(); - private List permList = new ArrayList<>(); - - public PermMap() { - } - - public PermMap(String user, String group, String perm) { - addUser(user); - addGroup(group); - addPerm(perm); - } - - public PermMap(List userList, List groupList, List permList) { - copyList(userList, this.userList); - copyList(groupList, this.groupList); - copyList(permList, this.permList); - } - - public List getUserList() { - return userList; - } - - public List getGroupList() { - return groupList; - } - - public List getPermList() { - return permList; - } - - public void addUser(String user) { - addToList(user, userList); - } - - public void addGroup(String group) { - addToList(group, groupList); - } - - public void addPerm(String perm) { - addToList(perm, permList); - } - - private void addToList(String str, List list) { - if(list != null && !StringUtil.isEmpty(str)) { - list.add(str); - } - } - - private void copyList(List fromList, List toList) { - if(fromList != null && toList != null) { - for(String str : fromList) { - addToList(str, toList); - } - } - } - - public String toJson() { - try { - return JsonUtilsV2.objToJson(this); - } catch (Exception e) { - e.printStackTrace(); - } - - return ""; - } - - @Override - public String toString() { - return toJson(); - } - } - - public static void main(String[] args) { - GrantRevokeData grData = new GrantRevokeData(); - - System.out.println(grData.toString()); - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/RESTResponse.java b/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/RESTResponse.java deleted file mode 100644 index f48d1c5688..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/admin/client/datatype/RESTResponse.java +++ /dev/null @@ -1,211 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.admin.client.datatype; - -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.authorization.utils.StringUtil; -import org.apache.atlas.plugin.util.JsonUtilsV2; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - - -@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown = true) -public class RESTResponse implements java.io.Serializable { - private static final Logger LOG = LoggerFactory.getLogger(RESTResponse.class); - - /** - * values for statusCode - */ - public static final int STATUS_SUCCESS = 0; - public static final int STATUS_ERROR = 1; - public static final int STATUS_VALIDATION = 2; - public static final int STATUS_WARN = 3; - public static final int STATUS_INFO = 4; - public static final int STATUS_PARTIAL_SUCCESS = 5; - public static final int ResponseStatus_MAX = 5; - - private int httpStatusCode; - private int statusCode; - private String msgDesc; - private List messageList; - - - public int getHttpStatusCode() { - return httpStatusCode; - } - - public void setHttpStatusCode(int httpStatusCode) { - this.httpStatusCode = httpStatusCode; - } - - public int getStatusCode() { - return statusCode; - } - - public void setStatusCode(int statusCode) { - this.statusCode = statusCode; - } - - public String getMsgDesc() { - return msgDesc; - } - - public void setMsgDesc(String msgDesc) { - this.msgDesc = msgDesc; - } - - public List getMessageList() { - return messageList; - } - - public void setMessageList(List messageList) { - this.messageList = messageList; - } - - public String getMessage() { - return StringUtil.isEmpty(msgDesc) ? ("HTTP " + httpStatusCode) : msgDesc; - } - - public static RESTResponse fromClientResponse(ClientResponse response) { - RESTResponse ret = null; - - String jsonString = response == null ? null : response.getEntity(String.class); - int httpStatus = response == null ? 0 : response.getStatus(); - - if(! StringUtil.isEmpty(jsonString)) { - ret = RESTResponse.fromJson(jsonString); - } - - if(ret == null) { - ret = new RESTResponse(); - } - - ret.setHttpStatusCode(httpStatus); - - return ret; - } - - public String toJson() { - try { - return JsonUtilsV2.objToJson(this); - } catch (Exception e) { - if(LOG.isDebugEnabled()) { - LOG.debug("toJson() failed", e); - } - } - - return ""; - } - - public static RESTResponse fromJson(String jsonString) { - try { - return JsonUtilsV2.jsonToObj(jsonString, RESTResponse.class); - } catch (Exception e) { - if(LOG.isDebugEnabled()) { - LOG.debug("fromJson('" + jsonString + "') failed", e); - } - } - - return null; - } - - @Override - public String toString() { - return toJson(); - } - - @JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) - @JsonIgnoreProperties(ignoreUnknown = true) - public static class Message implements java.io.Serializable { - private String name; - private String rbKey; - private String message; - private Long objectId; - private String fieldName; - - public String getName() { - return name; - } - public void setName(String name) { - this.name = name; - } - public String getRbKey() { - return rbKey; - } - public void setRbKey(String rbKey) { - this.rbKey = rbKey; - } - public String getMessage() { - return message; - } - public void setMessage(String message) { - this.message = message; - } - public Long getObjectId() { - return objectId; - } - public void setObjectId(Long objectId) { - this.objectId = objectId; - } - public String getFieldName() { - return fieldName; - } - public void setFieldName(String fieldName) { - this.fieldName = fieldName; - } - - public String toJson() { - try { - return JsonUtilsV2.objToJson(this); - } catch (Exception e) { - if(LOG.isDebugEnabled()) { - LOG.debug("toJson() failed", e); - } - } - - return ""; - } - - public static RESTResponse fromJson(String jsonString) { - try { - return JsonUtilsV2.jsonToObj(jsonString, RESTResponse.class); - } catch (Exception e) { - if(LOG.isDebugEnabled()) { - LOG.debug("fromJson('" + jsonString + "') failed", e); - } - } - - return null; - } - - @Override - public String toString() { - return toJson(); - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAdminConfig.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAdminConfig.java similarity index 94% rename from auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAdminConfig.java rename to auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAdminConfig.java index 32e32cfe56..c20ed1e2fb 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAdminConfig.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAdminConfig.java @@ -17,7 +17,7 @@ * under the License. */ -package org.apache.atlas.authorization.hadoop.config; +package org.apache.atlas.authorization.config; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -50,9 +50,8 @@ public static RangerAdminConfig getInstance() { private RangerAdminConfig() { super(); addAdminResources(); - String storeType = get(RangerConfigConstants.RANGER_KEYSTORE_TYPE, KeyStore.getDefaultType()); + String storeType = KeyStore.getDefaultType(); isFipsEnabled = StringUtils.equalsIgnoreCase("bcfks", storeType) ? true : false; - } private boolean addAdminResources() { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAuditConfig.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAuditConfig.java similarity index 97% rename from auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAuditConfig.java rename to auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAuditConfig.java index c8ee96dfbb..8a742fd329 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerAuditConfig.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerAuditConfig.java @@ -17,7 +17,7 @@ * under the License. */ -package org.apache.atlas.authorization.hadoop.config; +package org.apache.atlas.authorization.config; import org.slf4j.Logger; diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LogBuffer.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfigConstants.java similarity index 78% rename from auth-audits/src/main/java/org/apache/atlas/audit/provider/LogBuffer.java rename to auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfigConstants.java index 305d614143..0ee818cce5 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LogBuffer.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfigConstants.java @@ -16,17 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.atlas.audit.provider; +package org.apache.atlas.authorization.config; -public interface LogBuffer { - void start(LogDestination destination); +public class RangerConfigConstants { + //SECURITY CONFIG DEFAULTS + public static final String RANGER_KEYSTORE_TYPE = "ranger.keystore.file.type"; - void stop(); - - boolean isAvailable(); - - boolean isEmpty(); - - boolean add(T log); } + diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfiguration.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfiguration.java similarity index 98% rename from auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfiguration.java rename to auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfiguration.java index a00575ea84..44b0043c32 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfiguration.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerConfiguration.java @@ -18,7 +18,7 @@ */ -package org.apache.atlas.authorization.hadoop.config; +package org.apache.atlas.authorization.config; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerPluginConfig.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerPluginConfig.java similarity index 62% rename from auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerPluginConfig.java rename to auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerPluginConfig.java index 21e229500d..c89cd8baaa 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerPluginConfig.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/config/RangerPluginConfig.java @@ -17,18 +17,15 @@ * under the License. */ -package org.apache.atlas.authorization.hadoop.config; +package org.apache.atlas.authorization.config; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.utils.StringUtil; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.atlas.plugin.policyengine.RangerPolicyEngineOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.net.URL; import java.util.Collections; import java.util.HashSet; import java.util.Set; @@ -60,8 +57,6 @@ public class RangerPluginConfig extends RangerConfiguration { public RangerPluginConfig(String serviceType, String serviceName, String appId, String clusterName, String clusterType, RangerPolicyEngineOptions policyEngineOptions) { super(); - addResourcesForServiceType(serviceType); - this.serviceType = serviceType; this.appId = StringUtils.isEmpty(appId) ? serviceType : appId; this.propertyPrefix = "atlas.plugin." + serviceType; @@ -71,18 +66,18 @@ public RangerPluginConfig(String serviceType, String serviceName, String appId, String trustedProxyAddressString = this.get(propertyPrefix + ".trusted.proxy.ipaddresses"); - if (StringUtil.isEmpty(clusterName)) { + if (RangerUtil.isEmpty(clusterName)) { clusterName = this.get(propertyPrefix + ".access.cluster.name", ""); - if (StringUtil.isEmpty(clusterName)) { + if (RangerUtil.isEmpty(clusterName)) { clusterName = this.get(propertyPrefix + ".ambari.cluster.name", ""); } } - if (StringUtil.isEmpty(clusterType)) { + if (RangerUtil.isEmpty(clusterType)) { clusterType = this.get(propertyPrefix + ".access.cluster.type", ""); - if (StringUtil.isEmpty(clusterType)) { + if (RangerUtil.isEmpty(clusterType)) { clusterType = this.get(propertyPrefix + ".ambari.cluster.type", ""); } } @@ -119,24 +114,6 @@ public RangerPluginConfig(String serviceType, String serviceName, String appId, LOG.info(policyEngineOptions.toString()); } - protected RangerPluginConfig(String serviceType, String serviceName, String appId, RangerPluginConfig sourcePluginConfig) { - super(); - - this.serviceType = serviceType; - this.appId = StringUtils.isEmpty(appId) ? serviceType : appId; - this.propertyPrefix = "ranger.plugin." + serviceType; - this.serviceName = serviceName; - - this.clusterName = sourcePluginConfig.getClusterName(); - this.clusterType = sourcePluginConfig.getClusterType(); - this.useForwardedIPAddress = sourcePluginConfig.isUseForwardedIPAddress(); - this.trustedProxyAddresses = sourcePluginConfig.getTrustedProxyAddresses(); - this.isFallbackSupported = sourcePluginConfig.getIsFallbackSupported(); - - this.policyEngineOptions = sourcePluginConfig.getPolicyEngineOptions(); - - } - public String getServiceType() { return serviceType; } @@ -228,131 +205,16 @@ public boolean isServiceAdmin(String userName) { return serviceAdmins.contains(userName); } - private void addResourcesForServiceType(String serviceType) { - String auditCfg = "atlas-" + serviceType + "-audit.xml"; - String securityCfg = "atlas-" + serviceType + "-security.xml"; - String sslCfg = "atlas-" + serviceType + "-policymgr-ssl.xml"; - - if (!addResourceIfReadable(auditCfg)) { - addAuditResource(serviceType); - } - - if (!addResourceIfReadable(securityCfg)) { - addSecurityResource(serviceType); - } - - if (!addResourceIfReadable(sslCfg)) { - addSslConfigResource(serviceType); - } - } - // load service specific config overrides, if config files are available private void addResourcesForServiceName(String serviceType, String serviceName) { if (StringUtils.isNotBlank(serviceType) && StringUtils.isNotBlank(serviceName)) { - String serviceAuditCfg = "atlas-" + serviceType + "-" + serviceName + "-audit.xml"; - String serviceSecurityCfg = "atlas-" + serviceType + "-" + serviceName + "-security.xml"; - String serviceSslCfg = "atlas-" + serviceType + "-" + serviceName + "-policymgr-ssl.xml"; + String serviceAuditCfg = "atlas-" + serviceName + "-audit.xml"; + String serviceSecurityCfg = "atlas-" + serviceName + "-security.xml"; + String serviceSslCfg = "atlas-" + serviceName + "-policymgr-ssl.xml"; addResourceIfReadable(serviceAuditCfg); addResourceIfReadable(serviceSecurityCfg); addResourceIfReadable(serviceSslCfg); } } - - private void addSecurityResource(String serviceType) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> addSecurityResource(Service Type: " + serviceType ); - } - - Configuration rangerConf = RangerLegacyConfigBuilder.getSecurityConfig(serviceType); - - if (rangerConf != null ) { - addResource(rangerConf); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Unable to add the Security Config for " + serviceType + ". Plugin won't be enabled!"); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<= addSecurityResource(Service Type: " + serviceType ); - } - } - - private void addAuditResource(String serviceType) { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> addAuditResource(Service Type: " + serviceType ); - } - - try { - URL url = RangerLegacyConfigBuilder.getAuditConfig(serviceType); - - if (url != null) { - addResource(url); - - if (LOG.isDebugEnabled()) { - LOG.debug("==> addAuditResource() URL" + url.getPath()); - } - } - - } catch (Throwable t) { - LOG.warn("Unable to find Audit Config for " + serviceType + " Auditing not enabled !" ); - - if(LOG.isDebugEnabled()) { - LOG.debug("Unable to find Audit Config for " + serviceType + " Auditing not enabled !" + t); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== addAuditResource(Service Type: " + serviceType + ")"); - } - } - - private void addSslConfigResource(String serviceType) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> addSslConfigResource(Service Type: " + serviceType); - } - - try { - String sslConfigFile = this.get(RangerLegacyConfigBuilder.getPropertyName(RangerConfigConstants.RANGER_PLUGIN_REST_SSL_CONFIG_FILE, serviceType)); - - URL url = getSSLConfigResource(sslConfigFile); - if (url != null) { - addResource(url); - if (LOG.isDebugEnabled()) { - LOG.debug("SSL config file URL:" + url.getPath()); - } - } - } catch (Throwable t) { - LOG.warn(" Unable to find SSL Configs"); - - if (LOG.isDebugEnabled()) { - LOG.debug(" Unable to find SSL Configs"); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== addSslConfigResource(Service Type: " + serviceType + ")"); - } - } - - private URL getSSLConfigResource(String fileName) throws Throwable { - URL ret = null; - - try { - if (fileName != null) { - File f = new File(fileName); - if (f.exists() && f.canRead()) { - ret = f.toURI().toURL(); - } - } - } catch (Throwable t) { - LOG.error("Unable to read SSL configuration file:" + fileName); - - throw t; - } - - return ret; - } } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerChainedPluginConfig.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerChainedPluginConfig.java deleted file mode 100644 index a0e138c497..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerChainedPluginConfig.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.hadoop.config; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class RangerChainedPluginConfig extends RangerPluginConfig { - - private static final Log LOG = LogFactory.getLog(RangerChainedPluginConfig.class); - - private final String[] legacySSLProperties = new String[] {"xasecure.policymgr.clientssl.keystore", "xasecure.policymgr.clientssl.keystore.type", "xasecure.policymgr.clientssl.keystore.credential.file","xasecure.policymgr.clientssl.truststore", "xasecure.policymgr.clientssl.truststore.credential.file", "hadoop.security.credential.provider.path"}; - private final String[] chainedPluginPropertyPrefixes = new String[] { ".chained.services"}; - - public RangerChainedPluginConfig(String serviceType, String serviceName, String appId, RangerPluginConfig sourcePluginConfig) { - super(serviceType, serviceName, appId, sourcePluginConfig); - - // Copy all of properties from sourcePluginConfig except chained properties but with converted propertyPrefix - copyProperties(sourcePluginConfig, sourcePluginConfig.getPropertyPrefix()); - - // Copy SSL configurations from sourcePluginConfig - copyLegacySSLProperties(sourcePluginConfig); - - // Override copied properties from those in sourcePluginConfig with getPropertyPrefix() - copyProperties(sourcePluginConfig, getPropertyPrefix()); - - // Copy chained properties - copyChainedProperties(sourcePluginConfig, getPropertyPrefix()); - - set(getPropertyPrefix() + ".service.name", serviceName); - } - - private void copyProperties(RangerPluginConfig sourcePluginConfig, String propertyPrefix) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> copyProperties: propertyPrefix:[" + propertyPrefix + "]"); - } - for (String propName : sourcePluginConfig.getProperties().stringPropertyNames()) { - String value = sourcePluginConfig.get(propName); - - if (value != null && propName.startsWith(propertyPrefix)) { - String suffix = propName.substring(propertyPrefix.length()); - if (!isExcludedSuffix(suffix)) { - set(getPropertyPrefix() + suffix, value); - if (LOG.isDebugEnabled()) { - LOG.debug("set property:[" + getPropertyPrefix() + suffix + "] to value:[" + value + "]"); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Not copying property :[" + propName + "] value from sourcePluginConfig"); - } - } - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== copyProperties: propertyPrefix:[" + propertyPrefix + "]"); - } - } - - private void copyLegacySSLProperties(RangerPluginConfig sourcePluginConfig) { - for (String legacyPropertyName : legacySSLProperties) { - String value = sourcePluginConfig.get(legacyPropertyName); - if (value != null) { - set(legacyPropertyName, value); - } - } - } - - private void copyChainedProperties(RangerPluginConfig sourcePluginConfig, String propertyPrefix) { - for (String propName : sourcePluginConfig.getProperties().stringPropertyNames()) { - String value = sourcePluginConfig.get(propName); - - if (value != null && propName.startsWith(propertyPrefix)) { - String suffix = propName.substring(propertyPrefix.length()); - for (String chainedPropertyPrefix : chainedPluginPropertyPrefixes) { - if (StringUtils.startsWith(suffix, chainedPropertyPrefix)) { - set(getPropertyPrefix() + suffix, value); - } - } - } - } - } - - private boolean isExcludedSuffix(String suffix) { - for (String excludedSuffix : chainedPluginPropertyPrefixes) { - if (StringUtils.startsWith(suffix, excludedSuffix)) { - return true; - } - } - return false; - } - - private String printProperties() { - StringBuilder sb = new StringBuilder(); - boolean seenOneProp = false; - for (String propName : this.getProperties().stringPropertyNames()) { - String value = this.get(propName); - if (!seenOneProp) { - seenOneProp = true; - } else { - sb.append(",\n"); - } - sb.append("{ propertyName:[").append(propName).append("], propertyValue:[").append(value).append("] }"); - } - return sb.toString(); - } - - @Override - public String toString() { - return this.getClass().getSimpleName() + " : { " + printProperties() + " }"; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfigConstants.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfigConstants.java deleted file mode 100644 index 32b801ed88..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerConfigConstants.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.hadoop.config; - -public class RangerConfigConstants { - //SECURITY CONFIG DEFAULTS - public static final String RANGER_SERVICE_NAME = "ranger.plugin..service.name"; - public static final String RANGER_PLUGIN_POLICY_SOURCE_IMPL = "ranger.plugin..policy.source.impl"; - public static final String RANGER_PLUGIN_POLICY_SOURCE_IMPL_DEFAULT = "org.apache.atlas.admin.client.RangerAdminRESTClient"; - public static final String RANGER_PLUGIN_POLICY_REST_URL = "ranger.plugin..policy.rest.url"; - public static final String RANGER_PLUGIN_REST_SSL_CONFIG_FILE = "ranger.plugin..policy.rest.ssl.config.file"; - public static final String RANGER_PLUGIN_POLICY_POLLINVETERVALMS = "ranger.plugin..policy.pollIntervalMs"; - public static final String RANGER_PLUGIN_POLICY_CACHE_DIR = "ranger.plugin..policy.cache.dir"; - public static final String RANGER_PLUGIN_ADD_HADDOOP_AUTHORIZATION = "xasecure.add-hadoop-authorization"; - public static final String RANGER_KEYSTORE_TYPE = "ranger.keystore.file.type"; - - //CHANGE MAP CONSTANTS - public static final String XASECURE_POLICYMGR_URL = "xasecure..policymgr.url"; - public static final String XASECURE_POLICYMGR_URL_LASTSTOREDFILE = "xasecure..policymgr.url.laststoredfile"; - public static final String XASECURE_POLICYMGR_GRL_RELOADINTERVALINMILLIS = "xasecure..policymgr.url.reloadIntervalInMillis"; - public static final String XASECURE_ADD_HADDOP_AUTHORZATION = "xasecure.add-hadoop-authorization"; - public static final String XASECURE_UPDATE_XAPOLICIES_ON_GRANT = "xasecure..update.xapolicies.on.grant.revoke"; - - //Legacy Files - public static final String XASECURE_AUDIT_FILE = "xasecure-audit.xml"; - public static final String XASECURE_SECURITY_FILE = "xasecure--security.xml"; - public static final String XASECURE_POLICYMGR_SSL_FILE = "/etc//conf/xasecure-policymgr-ssl.xml"; - - //KNOX - public static final String RANGER_KNOX_PLUGIN_POLICY_SOURCE_IMPL_DEFAULT = "org.apache.atlas.admin.client.RangerAdminJersey2RESTClient"; -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerLegacyConfigBuilder.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerLegacyConfigBuilder.java deleted file mode 100644 index bc71b56d39..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/config/RangerLegacyConfigBuilder.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.hadoop.config; - -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.net.URL; -import java.util.HashMap; -import java.util.Map; - -public class RangerLegacyConfigBuilder { - - private static final Logger LOG = LoggerFactory.getLogger(RangerLegacyConfigBuilder.class); - - static String serviceType; - static String legacyResource; - - public static Configuration getSecurityConfig(String serviceType) { - - RangerLegacyConfigBuilder.legacyResource = getPropertyName(RangerConfigConstants.XASECURE_SECURITY_FILE,serviceType); - RangerLegacyConfigBuilder.serviceType = serviceType; - - Configuration ret = null; - Configuration legacyConfig = new Configuration(); - URL legacyFileUrl = getFileURL(legacyResource); - - if(LOG.isDebugEnabled()) { - LOG.debug("==> getSecurityConfig() " + legacyResource + " FileName: " + legacyFileUrl); - } - - if ( legacyFileUrl != null) { - legacyConfig.addResource(legacyFileUrl); - Configuration rangerDefaultProp = buildRangerSecurityConf(serviceType); - ret = mapLegacyConfigToRanger(rangerDefaultProp,legacyConfig); - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== getSecurityConfig() " + legacyResource + " FileName: " + legacyFileUrl); - } - - return ret; - } - - public static URL getAuditConfig(String serviceType) throws Throwable { - - RangerLegacyConfigBuilder.legacyResource = getPropertyName(RangerConfigConstants.XASECURE_AUDIT_FILE,serviceType); - RangerLegacyConfigBuilder.serviceType = serviceType; - - URL ret = null; - try { - ret = getAuditResource(legacyResource); - } catch (Throwable t) { - throw t; - } - return ret; - } - - private static Configuration mapLegacyConfigToRanger(Configuration rangerInConf, Configuration legacyConf) { - - Configuration ret = rangerInConf; - - HashMap chgMap = getConfigChangeMap(serviceType); - if(LOG.isDebugEnabled()) { - LOG.debug("<== mapLegacyConfigToRanger() MAP Size: " + chgMap.size()); - } - for(Map.Entry entry : chgMap.entrySet()) { - String legacyKey = entry.getKey(); - String rangerKey = entry.getValue(); - - String legacyConfVal = null; - - if ( rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_SERVICE_NAME,serviceType)) ) { - //For getting the service - String serviceURL = legacyConf.get(getPropertyName(RangerConfigConstants.XASECURE_POLICYMGR_URL,serviceType)); - legacyConfVal = fetchLegacyValue(serviceURL,rangerKey); - } else if ( rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_REST_URL,serviceType)) || - rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_CACHE_DIR,serviceType)) ) { - // For Getting Admin URL and CacheDir - legacyConfVal = fetchLegacyValue(legacyConf.get(legacyKey),rangerKey); - } else { - legacyConfVal = legacyConf.get(legacyKey); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== mapLegacyConfigToRanger() Ranger Key: " + rangerKey + "Legacy Key:" + legacyKey + "Legacy Value:" + legacyConfVal); - } - - ret.set(rangerKey, legacyConfVal); - } - return ret; - } - - - public static URL getAuditResource(String fName) throws Throwable { - URL ret = null; - - try { - for(String cfgFile : new String[] { "hive-site.xml", "hbase-site.xml", "hdfs-site.xml" } ) { - String loc = getFileLocation(cfgFile); - if (loc != null) { - File f = new File(loc); - if ( f.exists() && f.canRead()) { - File parentFile = new File(loc).getParentFile(); - ret = new File(parentFile, RangerConfigConstants.XASECURE_AUDIT_FILE).toURI().toURL(); - break; - } - } - } - } - catch(Throwable t) { - LOG.error("Missing Ranger Audit configuration files..."); - throw t; - } - return ret; - } - - public static Configuration buildRangerSecurityConf(String serviceType) { - Configuration rangerConf = new Configuration(); - - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_SERVICE_NAME,serviceType),""); - if (EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KNOX_NAME.equals(serviceType) ) { - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_SOURCE_IMPL,serviceType),RangerConfigConstants.RANGER_KNOX_PLUGIN_POLICY_SOURCE_IMPL_DEFAULT); - } else { - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_SOURCE_IMPL,serviceType),""); - } - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_REST_URL,serviceType),""); - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_REST_SSL_CONFIG_FILE,serviceType), getPropertyName(RangerConfigConstants.XASECURE_POLICYMGR_SSL_FILE,serviceType)); - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_POLLINVETERVALMS,serviceType), ""); - rangerConf.set(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_CACHE_DIR,serviceType), ""); - rangerConf.set(RangerConfigConstants.RANGER_PLUGIN_ADD_HADDOOP_AUTHORIZATION,""); - - return rangerConf; - } - - public static HashMap getConfigChangeMap(String serviceType) { - // ConfigMap for moving legacy Configuration to Ranger Configuration - HashMap changeMap = new HashMap<>(); - - changeMap.put(serviceType, - getPropertyName(RangerConfigConstants.RANGER_SERVICE_NAME,serviceType)); - changeMap.put(getPropertyName(RangerConfigConstants.XASECURE_POLICYMGR_URL,serviceType), - getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_REST_URL,serviceType)); - changeMap.put(getPropertyName(RangerConfigConstants.XASECURE_POLICYMGR_GRL_RELOADINTERVALINMILLIS,serviceType), - getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_POLLINVETERVALMS,serviceType)); - changeMap.put(getPropertyName(RangerConfigConstants.XASECURE_POLICYMGR_URL_LASTSTOREDFILE,serviceType), - getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_CACHE_DIR,serviceType)); - - if (EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_HDFS_NAME.equals(serviceType)) { - changeMap.put(RangerConfigConstants.XASECURE_ADD_HADDOP_AUTHORZATION, - RangerConfigConstants.RANGER_PLUGIN_ADD_HADDOOP_AUTHORIZATION); - } - - if (EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_HBASE_NAME.equals(serviceType) || - EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_HIVE_NAME.equals(serviceType)) { - changeMap.put(getPropertyName(RangerConfigConstants.XASECURE_UPDATE_XAPOLICIES_ON_GRANT,serviceType), - getPropertyName(RangerConfigConstants.XASECURE_UPDATE_XAPOLICIES_ON_GRANT,serviceType)); - } - - if ( LOG.isDebugEnabled()) { - for(Map.Entry entry : changeMap.entrySet()) { - String legacyKey = entry.getKey(); - String rangerKey = entry.getValue(); - LOG.debug("<== getConfigChangeMap() RangerConfig Key: " + rangerKey + " Legacy Key: " + legacyKey); - } - } - - return changeMap; - } - - public static String getFileLocation(String fileName) { - String ret = null; - - URL lurl = RangerLegacyConfigBuilder.class.getClassLoader().getResource(fileName); - if (lurl == null ) { - lurl = RangerLegacyConfigBuilder.class.getClassLoader().getResource("/" + fileName); - } - if (lurl != null) { - ret = lurl.getFile(); - } - return ret; - } - - public static URL getFileURL(String fileName) { - return RangerLegacyConfigBuilder.class.getClassLoader().getResource(fileName); - } - - public static String getPropertyName(String rangerProp, String serviceType) { - return rangerProp.replace("", serviceType); - } - - public static String getPolicyMgrURL(String url) { - int index = url.indexOf("/",url.lastIndexOf(":")); - - return url.substring(0,index); - } - - public static String getServiceNameFromURL(String url) { - int index = url.lastIndexOf("/"); - - return url.substring(index+1); - } - - - public static String getCacheFileURL(String cacheFile) { - int index = cacheFile.lastIndexOf("/"); - - return cacheFile.substring(0,index); - } - - public static String fetchLegacyValue(String legacyVal, String rangerKey) { - String ret = null; - - if ( rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_SERVICE_NAME,serviceType)) ) { - // To Fetch ServiceName - ret = getServiceNameFromURL(legacyVal); - } else if ( rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_REST_URL,serviceType)) ) { - // To Fetch PolicyMgr URL - ret = getPolicyMgrURL(legacyVal); - } else if ( rangerKey.equals(getPropertyName(RangerConfigConstants.RANGER_PLUGIN_POLICY_CACHE_DIR,serviceType)) ) { - ret = getCacheFileURL(legacyVal); - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/constants/RangerHadoopConstants.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/constants/RangerHadoopConstants.java deleted file mode 100644 index 9d367ecf0c..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/hadoop/constants/RangerHadoopConstants.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.authorization.hadoop.constants; - -public class RangerHadoopConstants { - - public static final String RANGER_ADD_HDFS_PERMISSION_PROP = "xasecure.add-hadoop-authorization"; - public static final String RANGER_OPTIMIZE_SUBACCESS_AUTHORIZATION_PROP = "ranger.optimize-subaccess-authorization" ; - public static final boolean RANGER_ADD_HDFS_PERMISSION_DEFAULT = false; - public static final boolean RANGER_OPTIMIZE_SUBACCESS_AUTHORIZATION_DEFAULT = false ; - public static final String READ_ACCCESS_TYPE = "read"; - public static final String WRITE_ACCCESS_TYPE = "write"; - public static final String EXECUTE_ACCCESS_TYPE = "execute"; - - public static final String READ_EXECUTE_PERM = "READ_EXECUTE"; - public static final String WRITE_EXECUTE_PERM = "WRITE_EXECUTE"; - public static final String READ_WRITE_PERM = "READ_WRITE"; - public static final String ALL_PERM = "ALL"; - - public static final String HDFS_ROOT_FOLDER_PATH_ALT = ""; - public static final String HDFS_ROOT_FOLDER_PATH = "/"; - - public static final String HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP = "xasecure.hive.update.xapolicies.on.grant.revoke"; - public static final boolean HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE = true; - public static final String HIVE_BLOCK_UPDATE_IF_ROWFILTER_COLUMNMASK_SPECIFIED_PROP = "xasecure.hive.block.update.if.rowfilter.columnmask.specified"; - public static final boolean HIVE_BLOCK_UPDATE_IF_ROWFILTER_COLUMNMASK_SPECIFIED_DEFAULT_VALUE = true; - public static final String HIVE_DESCRIBE_TABLE_SHOW_COLUMNS_AUTH_OPTION_PROP = "xasecure.hive.describetable.showcolumns.authorization.option"; - public static final String HIVE_DESCRIBE_TABLE_SHOW_COLUMNS_AUTH_OPTION_PROP_DEFAULT_VALUE = "NONE"; - - public static final String HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP = "xasecure.hbase.update.xapolicies.on.grant.revoke"; - public static final boolean HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE = true; - - public static final String KNOX_ACCESS_VERIFIER_CLASS_NAME_PROP = "knox.authorization.verifier.classname"; - public static final String KNOX_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE = "org.apache.atlas.pdp.knox.RangerAuthorizer"; - - public static final String STORM_ACCESS_VERIFIER_CLASS_NAME_PROP = "storm.authorization.verifier.classname"; - public static final String STORM_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE = "org.apache.atlas.pdp.storm.RangerAuthorizer"; - - public static final String RANGER_ADD_YARN_PERMISSION_PROP = "ranger.add-yarn-authorization"; - public static final boolean RANGER_ADD_YARN_PERMISSION_DEFAULT = true; - - // - // Logging constants - // - public static final String AUDITLOG_FIELD_DELIMITER_PROP = "xasecure.auditlog.fieldDelimiterString"; - public static final String AUDITLOG_RANGER_MODULE_ACL_NAME_PROP = "xasecure.auditlog.xasecureAcl.name"; - public static final String AUDITLOG_HADOOP_MODULE_ACL_NAME_PROP = "xasecure.auditlog.hadoopAcl.name"; - public static final String AUDITLOG_YARN_MODULE_ACL_NAME_PROP = "ranger.auditlog.yarnAcl.name"; - - public static final String DEFAULT_LOG_FIELD_DELIMITOR = "|"; - public static final String DEFAULT_XASECURE_MODULE_ACL_NAME = "xasecure-acl"; - public static final String DEFAULT_RANGER_MODULE_ACL_NAME = "ranger-acl"; - public static final String DEFAULT_HADOOP_MODULE_ACL_NAME = "hadoop-acl"; - public static final String DEFAULT_YARN_MODULE_ACL_NAME = "yarn-acl"; - - - public static final String AUDITLOG_FIELDINFO_VISIBLE_PROP = "xasecure.auditlog.fieldInfoVisible"; - public static final boolean DEFAULT_AUDITLOG_FIELDINFO_VISIBLE = false; - - public static final String AUDITLOG_ACCESS_GRANTED_TEXT_PROP = "xasecure.auditlog.accessgranted.text"; - public static final String AUDITLOG_ACCESS_DENIED_TEXT_PROP = "xasecure.auditlog.accessdenied.text"; - - public static final String DEFAULT_ACCESS_GRANTED_TEXT = "granted"; - public static final String DEFAULT_ACCESS_DENIED_TEXT = "denied"; - - public static final String AUDITLOG_EMPTY_STRING = ""; - - public static final String AUDITLOG_HDFS_EXCLUDE_LIST_PROP = "xasecure.auditlog.hdfs.excludeusers"; - public static final String AUDITLOG_REPOSITORY_NAME_PROP = "xasecure.audit.repository.name"; - public static final String AUDITLOG_IS_ENABLED_PROP = "xasecure.audit.is.enabled"; - - public static final String KEYMGR_URL_PROP = "hdfs.keymanager.url"; -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/JsonUtils.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/JsonUtils.java deleted file mode 100644 index ddc492908a..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/JsonUtils.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.authorization.utils; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.reflect.TypeToken; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.model.AuditFilter; -import org.apache.atlas.plugin.model.RangerValidityRecurrence; -import org.apache.atlas.plugin.model.RangerValiditySchedule; - -import java.lang.reflect.Type; -import java.util.List; -import java.util.Map; - -public class JsonUtils { - private static final Log LOG = LogFactory.getLog(JsonUtils.class); - - private static final ThreadLocal gson = new ThreadLocal() { - @Override - protected Gson initialValue() { - return new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").create(); - } - }; - - public static String mapToJson(Map map) { - String ret = null; - if (MapUtils.isNotEmpty(map)) { - try { - ret = gson.get().toJson(map); - } catch (Exception e) { - LOG.error("Invalid input data: ", e); - } - } - return ret; - } - - public static String listToJson(List list) { - String ret = null; - if (CollectionUtils.isNotEmpty(list)) { - try { - ret = gson.get().toJson(list); - } catch (Exception e) { - LOG.error("Invalid input data: ", e); - } - } - return ret; - } - - public static String objectToJson(Object object) { - String ret = null; - - if(object != null) { - try { - ret = gson.get().toJson(object); - } catch(Exception excp) { - LOG.warn("objectToJson() failed to convert object to Json", excp); - } - } - - return ret; - } - - public static T jsonToObject(String jsonStr, Class clz) { - T ret = null; - - if(StringUtils.isNotEmpty(jsonStr)) { - try { - ret = gson.get().fromJson(jsonStr, clz); - } catch(Exception excp) { - LOG.warn("jsonToObject() failed to convert json to object: " + jsonStr, excp); - } - } - - return ret; - } - - public static Map jsonToMapStringString(String jsonStr) { - Map ret = null; - - if(StringUtils.isNotEmpty(jsonStr)) { - try { - Type mapType = new TypeToken>() {}.getType(); - ret = gson.get().fromJson(jsonStr, mapType); - } catch(Exception excp) { - LOG.warn("jsonToObject() failed to convert json to object: " + jsonStr, excp); - } - } - - return ret; - } - - public static List jsonToRangerValiditySchedule(String jsonStr) { - try { - Type listType = new TypeToken>() {}.getType(); - return gson.get().fromJson(jsonStr, listType); - } catch (Exception e) { - LOG.error("Cannot get List from " + jsonStr, e); - return null; - } - } - - public static List jsonToAuditFilterList(String jsonStr) { - try { - Type listType = new TypeToken>() {}.getType(); - return gson.get().fromJson(jsonStr, listType); - } catch (Exception e) { - LOG.error("failed to create audit filters from: " + jsonStr, e); - return null; - } - } - - public static List jsonToRangerValidityRecurringSchedule(String jsonStr) { - try { - Type listType = new TypeToken>() { - }.getType(); - return gson.get().fromJson(jsonStr, listType); - } catch (Exception e) { - LOG.error("Cannot get List from " + jsonStr, e); - return null; - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerAtlasConstants.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerAtlasConstants.java new file mode 100644 index 0000000000..eaa1b70996 --- /dev/null +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerAtlasConstants.java @@ -0,0 +1,28 @@ +package org.apache.atlas.authorization.utils; + +public class RangerAtlasConstants { + + public static final String RESOURCE_SERVICE = "atlas-service"; + public static final String RESOURCE_TYPE_CATEGORY = "type-category"; + public static final String RESOURCE_TYPE_NAME = "type"; + public static final String RESOURCE_ENTITY_TYPE = "entity-type"; + public static final String RESOURCE_ENTITY_CLASSIFICATION = "entity-classification"; + public static final String RESOURCE_CLASSIFICATION = "classification"; + public static final String RESOURCE_ENTITY_ID = "entity"; + public static final String RESOURCE_ENTITY_LABEL = "entity-label"; + public static final String RESOURCE_ENTITY_BUSINESS_METADATA = "entity-business-metadata"; + public static final String RESOURCE_ENTITY_OWNER = "owner"; + public static final String RESOURCE_RELATIONSHIP_TYPE = "relationship-type"; + public static final String RESOURCE_END_ONE_ENTITY_TYPE = "end-one-entity-type"; + public static final String RESOURCE_END_ONE_ENTITY_CLASSIFICATION = "end-one-entity-classification"; + public static final String RESOURCE_END_ONE_ENTITY_ID = "end-one-entity"; + public static final String RESOURCE_END_TWO_ENTITY_TYPE = "end-two-entity-type"; + public static final String RESOURCE_END_TWO_ENTITY_CLASSIFICATION = "end-two-entity-classification"; + public static final String RESOURCE_END_TWO_ENTITY_ID = "end-two-entity"; + + public static final String ACCESS_TYPE_TYPE_READ = "type-read"; + + public static final String ENTITY_NOT_CLASSIFIED = "_NOT_CLASSIFIED"; + + public static final String COMPONENT_ACCESSTYPE_SEPARATOR = ":"; +} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerUtil.java new file mode 100644 index 0000000000..3b3d6ae10e --- /dev/null +++ b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/RangerUtil.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + package org.apache.atlas.authorization.utils; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; +import org.apache.atlas.plugin.model.AuditFilter; +import org.apache.atlas.plugin.model.RangerValiditySchedule; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TimeZone; + +public class RangerUtil { + + private static final Log LOG = LogFactory.getLog(RangerUtil.class); + + private static final TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT+0"); + + private static final ThreadLocal gson = new ThreadLocal() { + @Override + protected Gson initialValue() { + return new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").create(); + } + }; + + public static String objectToJson(Object object) { + String ret = null; + + if(object != null) { + try { + ret = gson.get().toJson(object); + } catch(Exception excp) { + LOG.warn("objectToJson() failed to convert object to Json", excp); + } + } + + return ret; + } + + public static List jsonToRangerValiditySchedule(String jsonStr) { + try { + Type listType = new TypeToken>() {}.getType(); + return gson.get().fromJson(jsonStr, listType); + } catch (Exception e) { + LOG.error("Cannot get List from " + jsonStr, e); + return null; + } + } + + public static List jsonToAuditFilterList(String jsonStr) { + try { + Type listType = new TypeToken>() {}.getType(); + return gson.get().fromJson(jsonStr, listType); + } catch (Exception e) { + LOG.error("failed to create audit filters from: " + jsonStr, e); + return null; + } + } + + + public static String toString(List arr) { + String ret = ""; + + if(arr != null && !arr.isEmpty()) { + ret = arr.get(0); + for(int i = 1; i < arr.size(); i++) { + ret += (", " + arr.get(i)); + } + } + + return ret; + } + + public static boolean isEmpty(String str) { + return str == null || str.trim().isEmpty(); + } + + public static Date getUTCDateForLocalDate(Date date) { + Calendar local = Calendar.getInstance(); + int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); + + GregorianCalendar utc = new GregorianCalendar(gmtTimeZone); + + utc.setTimeInMillis(date.getTime()); + utc.add(Calendar.MILLISECOND, -offset); + + return utc.getTime(); + } + + public static Map toStringObjectMap(Map map) { + Map ret = null; + + if (map != null) { + ret = new HashMap<>(map.size()); + + for (Map.Entry e : map.entrySet()) { + ret.put(e.getKey(), e.getValue()); + } + } + + return ret; + } + + public static Set toSet(String str) { + Set values = new HashSet(); + if (StringUtils.isNotBlank(str)) { + for (String item : str.split(",")) { + if (StringUtils.isNotBlank(item)) { + values.add(StringUtils.trim(item)); + } + } + } + return values; + } + + public static List toList(String str) { + List values; + if (StringUtils.isNotBlank(str)) { + values = new ArrayList<>(); + for (String item : str.split(",")) { + if (StringUtils.isNotBlank(item)) { + values.add(StringUtils.trim(item)); + } + } + } else { + values = Collections.emptyList(); + } + return values; + } + + public static List getURLs(String configURLs) { + List configuredURLs = new ArrayList<>(); + if(configURLs!=null) { + String[] urls = configURLs.split(","); + for (String strUrl : urls) { + if (StringUtils.isNotEmpty(StringUtils.trimToEmpty(strUrl))) { + if (strUrl.endsWith("/")) { + strUrl = strUrl.substring(0, strUrl.length() - 1); + } + configuredURLs.add(strUrl); + } + } + } + return configuredURLs; + } +} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/StringUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/StringUtil.java deleted file mode 100644 index e685b96a90..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/authorization/utils/StringUtil.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.authorization.utils; - -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TimeZone; - -public class StringUtil { - - private static final TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT+0"); - - public static boolean equals(String str1, String str2) { - boolean ret = false; - - if(str1 == null) { - ret = str2 == null; - } else if(str2 == null) { - ret = false; - } else { - ret = str1.equals(str2); - } - - return ret; - } - - public static boolean equalsIgnoreCase(String str1, String str2) { - boolean ret = false; - - if(str1 == null) { - ret = str2 == null; - } else if(str2 == null) { - ret = false; - } else { - ret = str1.equalsIgnoreCase(str2); - } - - return ret; - } - - public static boolean equals(Collection set1, Collection set2) { - boolean ret = false; - - if(set1 == null) { - ret = set2 == null; - } else if(set2 == null) { - ret = false; - } else if(set1.size() == set2.size()) { - ret = set1.containsAll(set2); - } - - return ret; - } - - public static boolean equalsIgnoreCase(Collection set1, Collection set2) { - boolean ret = false; - - if(set1 == null) { - ret = set2 == null; - } else if(set2 == null) { - ret = false; - } else if(set1.size() == set2.size()) { - int numFound = 0; - - for(String str1 : set1) { - boolean str1Found = false; - - for(String str2 : set2) { - if(equalsIgnoreCase(str1, str2)) { - str1Found = true; - - break; - } - } - - if(str1Found) { - numFound++; - } else { - break; - } - } - - ret = numFound == set1.size(); - } - - return ret; - } - - public static boolean matches(String pattern, String str) { - boolean ret = false; - - if(pattern == null || str == null || pattern.isEmpty() || str.isEmpty()) { - ret = true; - } else { - ret = str.matches(pattern); - } - - return ret; - } - - /* - public static boolean matches(Collection patternSet, Collection strSet) { - boolean ret = false; - - if(patternSet == null || strSet == null || patternSet.isEmpty() || strSet.isEmpty()) { - ret = true; - } else { - boolean foundUnmatched = false; - - for(String str : strSet) { - boolean isMatched = false; - for(String pattern : patternSet) { - isMatched = str.matches(pattern); - - if(isMatched) { - break; - } - } - - foundUnmatched = ! isMatched; - - if(foundUnmatched) { - break; - } - } - - ret = !foundUnmatched; - } - - return ret; - } - */ - - public static boolean contains(String str, String strToFind) { - return str != null && strToFind != null && str.contains(strToFind); - } - - public static boolean containsIgnoreCase(String str, String strToFind) { - return str != null && strToFind != null && str.toLowerCase().contains(strToFind.toLowerCase()); - } - - public static boolean contains(String[] strArr, String str) { - boolean ret = false; - - if(strArr != null && strArr.length > 0 && str != null) { - for(String s : strArr) { - ret = equals(s, str); - - if(ret) { - break; - } - } - } - - return ret; - } - - public static boolean containsIgnoreCase(String[] strArr, String str) { - boolean ret = false; - - if(strArr != null && strArr.length > 0 && str != null) { - for(String s : strArr) { - ret = equalsIgnoreCase(s, str); - - if(ret) { - break; - } - } - } - - return ret; - } - - public static String toString(Iterable iterable) { - String ret = ""; - - if(iterable != null) { - int count = 0; - for(String str : iterable) { - if(count == 0) - ret = str; - else - ret += (", " + str); - count++; - } - } - - return ret; - } - - public static String toString(String[] arr) { - String ret = ""; - - if(arr != null && arr.length > 0) { - ret = arr[0]; - for(int i = 1; i < arr.length; i++) { - ret += (", " + arr[i]); - } - } - - return ret; - } - - public static String toString(List arr) { - String ret = ""; - - if(arr != null && !arr.isEmpty()) { - ret = arr.get(0); - for(int i = 1; i < arr.size(); i++) { - ret += (", " + arr.get(i)); - } - } - - return ret; - } - - public static boolean isEmpty(String str) { - return str == null || str.trim().isEmpty(); - } - - public static boolean isEmpty(Collection set) { - return set == null || set.isEmpty(); - } - - public static String toLower(String str) { - return str == null ? null : str.toLowerCase(); - } - - public static byte[] getBytes(String str) { - return str == null ? null : str.getBytes(); - } - - public static Date getUTCDate() { - Calendar local = Calendar.getInstance(); - int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); - - GregorianCalendar utc = new GregorianCalendar(gmtTimeZone); - - utc.setTimeInMillis(local.getTimeInMillis()); - utc.add(Calendar.MILLISECOND, -offset); - - return utc.getTime(); - } - - public static Date getUTCDateForLocalDate(Date date) { - Calendar local = Calendar.getInstance(); - int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); - - GregorianCalendar utc = new GregorianCalendar(gmtTimeZone); - - utc.setTimeInMillis(date.getTime()); - utc.add(Calendar.MILLISECOND, -offset); - - return utc.getTime(); - } - - public static Map toStringObjectMap(Map map) { - Map ret = null; - - if (map != null) { - ret = new HashMap<>(map.size()); - - for (Map.Entry e : map.entrySet()) { - ret.put(e.getKey(), e.getValue()); - } - } - - return ret; - } - - public static Set toSet(String str) { - Set values = new HashSet(); - if (StringUtils.isNotBlank(str)) { - for (String item : str.split(",")) { - if (StringUtils.isNotBlank(item)) { - values.add(StringUtils.trim(item)); - } - } - } - return values; - } - - public static List toList(String str) { - List values; - if (StringUtils.isNotBlank(str)) { - values = new ArrayList<>(); - for (String item : str.split(",")) { - if (StringUtils.isNotBlank(item)) { - values.add(StringUtils.trim(item)); - } - } - } else { - values = Collections.emptyList(); - } - return values; - } - - public static List getURLs(String configURLs) { - List configuredURLs = new ArrayList<>(); - if(configURLs!=null) { - String[] urls = configURLs.split(","); - for (String strUrl : urls) { - if (StringUtils.isNotEmpty(StringUtils.trimToEmpty(strUrl))) { - if (strUrl.endsWith("/")) { - strUrl = strUrl.substring(0, strUrl.length() - 1); - } - configuredURLs.add(strUrl); - } - } - } - return configuredURLs; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthAdminClient.java b/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthAdminClient.java index a79407ba9c..888254f674 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthAdminClient.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthAdminClient.java @@ -1,6 +1,6 @@ package org.apache.atlas.authz.admin.client; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.util.RangerRoles; import org.apache.atlas.plugin.util.RangerUserStore; import org.apache.atlas.plugin.util.ServicePolicies; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthRESTClient.java b/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthRESTClient.java index 10c97008c3..006e8c019c 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthRESTClient.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/authz/admin/client/AtlasAuthRESTClient.java @@ -3,8 +3,8 @@ import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.authorization.utils.StringUtil; +import org.apache.atlas.authorization.config.RangerPluginConfig; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.atlas.plugin.util.RangerRoles; import org.apache.atlas.plugin.util.RangerUserStore; import org.apache.atlas.plugin.util.ServicePolicies; @@ -128,7 +128,7 @@ private String getAdminUrl(RangerPluginConfig config) { String url = ""; String tmpUrl = config.get(config.getPropertyPrefix() + ".authz.rest.url"); - if (!StringUtil.isEmpty(tmpUrl)) { + if (!RangerUtil.isEmpty(tmpUrl)) { url = tmpUrl.trim(); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerDefaultAuditHandler.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerDefaultAuditHandler.java index 4f760ee28e..2def5eb6e2 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerDefaultAuditHandler.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerDefaultAuditHandler.java @@ -19,21 +19,20 @@ package org.apache.atlas.plugin.audit; +import org.apache.atlas.type.AtlasType; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.atlas.audit.model.AuthzAuditEvent; import org.apache.atlas.audit.provider.AuditHandler; import org.apache.atlas.audit.provider.MiscUtil; -import org.apache.atlas.authorization.hadoop.constants.RangerHadoopConstants; import org.apache.atlas.plugin.contextenricher.RangerTagForEval; import org.apache.atlas.plugin.policyengine.*; import org.apache.atlas.plugin.service.RangerBasePlugin; -import org.apache.atlas.plugin.util.JsonUtilsV2; import org.apache.atlas.plugin.util.RangerAccessRequestUtil; import org.apache.atlas.plugin.util.RangerRESTUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Serializable; import java.util.ArrayList; @@ -46,14 +45,14 @@ public class RangerDefaultAuditHandler implements RangerAccessResultProcessor { - private static final Log LOG = LogFactory.getLog(RangerDefaultAuditHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(RangerDefaultAuditHandler.class); private static final String CONF_AUDIT_ID_STRICT_UUID = "xasecure.audit.auditid.strict.uuid"; private static final boolean DEFAULT_AUDIT_ID_STRICT_UUID = false; private final boolean auditIdStrictUUID; - protected final String moduleName; + protected final String moduleName = null; private final RangerRESTUtils restUtils = new RangerRESTUtils(); private long sequenceNumber = 0; private String UUID = MiscUtil.generateUniqueId(); @@ -63,12 +62,10 @@ public class RangerDefaultAuditHandler implements RangerAccessResultProcessor { public RangerDefaultAuditHandler() { auditIdStrictUUID = DEFAULT_AUDIT_ID_STRICT_UUID; - moduleName = RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME; } public RangerDefaultAuditHandler(Configuration config) { auditIdStrictUUID = config.getBoolean(CONF_AUDIT_ID_STRICT_UUID, DEFAULT_AUDIT_ID_STRICT_UUID); - moduleName = config.get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME); } @Override @@ -304,7 +301,7 @@ private String generateNextAuditEventId() { private String writeObjectAsString(Serializable obj) { String jsonStr = StringUtils.EMPTY; try { - jsonStr = JsonUtilsV2.objToJson(obj); + jsonStr = AtlasType.toJson(obj); } catch (Exception e) { LOG.error("Cannot create JSON string for object:[" + obj + "]", e); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerMultiResourceAuditHandler.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerMultiResourceAuditHandler.java deleted file mode 100644 index 47809cf99e..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/audit/RangerMultiResourceAuditHandler.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.audit; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.model.AuthzAuditEvent; - -import java.util.ArrayList; -import java.util.Collection; - -public class RangerMultiResourceAuditHandler extends RangerDefaultAuditHandler { - private static final Log LOG = LogFactory.getLog(RangerMultiResourceAuditHandler.class); - - Collection auditEvents = new ArrayList<>(); - - public RangerMultiResourceAuditHandler() { - } - - - @Override - public void logAuthzAudit(AuthzAuditEvent auditEvent) { - auditEvents.add(auditEvent); - } - - @Override - public void logAuthzAudits(Collection auditEvents) { - this.auditEvents.addAll(auditEvents); - } - - public void flushAudit() { - try { - boolean deniedExists = false; - // First iterate to see if there are any denied - for (AuthzAuditEvent auditEvent : auditEvents) { - if (auditEvent.getAccessResult() == 0) { - deniedExists = true; - break; - } - } - - for (AuthzAuditEvent auditEvent : auditEvents) { - if (deniedExists && auditEvent.getAccessResult() != 0) { - continue; - } - - super.logAuthzAudit(auditEvent); - } - } catch (Throwable t) { - LOG.error("Error occured while writing audit log... ", t); - } finally { - // reset auditEvents once audits are logged - auditEvents = new ArrayList<>(); - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/classloader/RangerPluginClassLoader.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/classloader/RangerPluginClassLoader.java index d55983d66b..0fcd1003d3 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/classloader/RangerPluginClassLoader.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/classloader/RangerPluginClassLoader.java @@ -28,9 +28,6 @@ import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.security.PrivilegedExceptionAction; import java.util.Enumeration; import java.util.List; @@ -44,13 +41,7 @@ public class RangerPluginClassLoader extends URLClassLoader { public RangerPluginClassLoader(String pluginType, Class pluginClass ) throws Exception { super(RangerPluginClassLoaderUtil.getInstance().getPluginFilesForServiceTypeAndPluginclass(pluginType, pluginClass), null); - componentClassLoader = AccessController.doPrivileged( - new PrivilegedAction() { - public MyClassLoader run() { - return new MyClassLoader(Thread.currentThread().getContextClassLoader()); - } - } - ); + componentClassLoader = new MyClassLoader(Thread.currentThread().getContextClassLoader()); } public static RangerPluginClassLoader getInstance(final String pluginType, final Class pluginClass ) throws Exception { @@ -59,13 +50,7 @@ public static RangerPluginClassLoader getInstance(final String pluginType, final synchronized(RangerPluginClassLoader.class) { ret = me; if (ret == null && pluginClass != null) { - me = ret = AccessController.doPrivileged( - new PrivilegedExceptionAction(){ - public RangerPluginClassLoader run() throws Exception { - return new RangerPluginClassLoader(pluginType,pluginClass); - } - } - ); + me = ret = new RangerPluginClassLoader(pluginType,pluginClass); } } } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/BaseClient.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/BaseClient.java deleted file mode 100644 index 3047c05e55..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/BaseClient.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.plugin.client; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.SecureClientLogin; -import org.apache.atlas.plugin.util.PasswordUtils; - -import javax.security.auth.Subject; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public abstract class BaseClient { - private static final Log LOG = LogFactory.getLog(BaseClient.class); - - - private static final String DEFAULT_NAME_RULE = "DEFAULT"; - protected static final String DEFAULT_ERROR_MESSAGE = " You can still save the repository and start creating " - + "policies, but you would not be able to use autocomplete for " - + "resource names. Check ranger_admin.log for more info."; - - - private String serviceName; - private String defaultConfigFile; - private Subject loginSubject; - private HadoopConfigHolder configHolder; - - protected Map connectionProperties; - - public BaseClient(String svcName, Map connectionProperties) { - this(svcName, connectionProperties, null); - } - - public BaseClient(String serivceName, Map connectionProperties, String defaultConfigFile) { - this.serviceName = serivceName; - this.connectionProperties = connectionProperties; - this.defaultConfigFile = defaultConfigFile; - init(); - login(); - } - - - private void init() { - if (connectionProperties == null) { - configHolder = HadoopConfigHolder.getInstance(serviceName); - } - else { - configHolder = HadoopConfigHolder.getInstance(serviceName,connectionProperties, defaultConfigFile); - } - } - - - protected void login() { - ClassLoader prevCl = Thread.currentThread().getContextClassLoader(); - try { - //Thread.currentThread().setContextClassLoader(configHolder.getClassLoader()); - String lookupPrincipal = SecureClientLogin.getPrincipal(configHolder.getLookupPrincipal(), java.net.InetAddress.getLocalHost().getCanonicalHostName()); - String lookupKeytab = configHolder.getLookupKeytab(); - String nameRules = configHolder.getNameRules(); - if(StringUtils.isEmpty(nameRules)){ - if(LOG.isDebugEnabled()){ - LOG.debug("Name Rule is empty. Setting Name Rule as 'DEFAULT'"); - } - nameRules = DEFAULT_NAME_RULE; - } - String userName = configHolder.getUserName(); - if(StringUtils.isEmpty(lookupPrincipal) || StringUtils.isEmpty(lookupKeytab)){ - if (userName == null) { - throw createException("Unable to find login username for hadoop environment, [" + serviceName + "]", null); - } - String keyTabFile = configHolder.getKeyTabFile(); - if (keyTabFile != null) { - if ( configHolder.isKerberosAuthentication() ) { - LOG.info("Init Login: security enabled, using username/keytab"); - loginSubject = SecureClientLogin.loginUserFromKeytab(userName, keyTabFile, nameRules); - } - else { - LOG.info("Init Login: using username"); - loginSubject = SecureClientLogin.login(userName); - } - } - else { - String encryptedPwd = configHolder.getPassword(); - String password = null; - if (encryptedPwd != null) { - try { - password = PasswordUtils.decryptPassword(encryptedPwd); - } catch(Exception ex) { - LOG.info("Password decryption failed; trying connection with received password string"); - password = null; - } finally { - if (password == null) { - password = encryptedPwd; - } - } - } else { - LOG.info("Password decryption failed: no password was configured"); - } - if ( configHolder.isKerberosAuthentication() ) { - LOG.info("Init Login: using username/password"); - loginSubject = SecureClientLogin.loginUserWithPassword(userName, password); - } - else { - LOG.info("Init Login: security not enabled, using username"); - loginSubject = SecureClientLogin.login(userName); - } - } - }else{ - if ( configHolder.isKerberosAuthentication() ) { - LOG.info("Init Lookup Login: security enabled, using lookupPrincipal/lookupKeytab"); - loginSubject = SecureClientLogin.loginUserFromKeytab(lookupPrincipal, lookupKeytab, nameRules); - }else{ - LOG.info("Init Login: security not enabled, using username"); - loginSubject = SecureClientLogin.login(userName); - } - } - } catch (IOException ioe) { - throw createException(ioe); - } catch (SecurityException se) { - throw createException(se); - } finally { - Thread.currentThread().setContextClassLoader(prevCl); - } - } - - private HadoopException createException(Exception exp) { - return createException("Unable to login to Hadoop environment [" + serviceName + "]", exp); - } - - private HadoopException createException(String msgDesc, Exception exp) { - HadoopException hdpException = new HadoopException(msgDesc, exp); - final String fullDescription = exp != null ? getMessage(exp) : msgDesc; - hdpException.generateResponseDataMap(false, fullDescription + DEFAULT_ERROR_MESSAGE, - msgDesc + DEFAULT_ERROR_MESSAGE, null, null); - return hdpException; - } - - public String getSerivceName() { - return serviceName; - } - - protected Subject getLoginSubject() { - return loginSubject; - } - - protected HadoopConfigHolder getConfigHolder() { - return configHolder; - } - - public static void generateResponseDataMap(boolean connectivityStatus, - String message, String description, Long objectId, - String fieldName, Map responseData) { - responseData.put("connectivityStatus", connectivityStatus); - responseData.put("message", message); - responseData.put("description", description); - responseData.put("objectId", objectId); - responseData.put("fieldName", fieldName); - } - - public static String getMessage(Throwable excp) { - List errList = new ArrayList<>(); - while (excp != null) { - String message = excp.getMessage(); - if (StringUtils.isNotEmpty(message) && !errList.contains(message + ". \n")) { - errList.add(message + ". \n"); - } - excp = excp.getCause(); - } - return StringUtils.join(errList, ""); - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopConfigHolder.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopConfigHolder.java deleted file mode 100644 index 26c585d3dc..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopConfigHolder.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.plugin.client; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.SecureClientLogin; - -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -public class HadoopConfigHolder { - private static final Log LOG = LogFactory.getLog(HadoopConfigHolder.class); - public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = "hadoop-login.properties"; - public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = "datasource.properties"; - public static final String RESOURCEMAP_PROP_FILE = "resourcenamemap.properties"; - public static final String DEFAULT_RESOURCE_NAME = "core-site.xml"; - public static final String RANGER_SECTION_NAME = "xalogin.xml"; - public static final String RANGER_LOGIN_USER_NAME_PROP = "username"; - public static final String RANGER_LOGIN_KEYTAB_FILE_PROP = "keytabfile"; - public static final String RANGER_LOGIN_PASSWORD = "password"; - public static final String RANGER_LOOKUP_PRINCIPAL = "lookupprincipal"; - public static final String RANGER_LOOKUP_KEYTAB = "lookupkeytab"; - public static final String RANGER_PRINCIPAL = "rangerprincipal"; - public static final String RANGER_KEYTAB = "rangerkeytab"; - public static final String RANGER_NAME_RULES = "namerules"; - public static final String RANGER_AUTH_TYPE = "authtype"; - public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; - public static final String HADOOP_NAME_RULES = "hadoop.security.auth_to_local"; - public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = "kerberos"; - public static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection"; - - public static final String ENABLE_HIVE_METASTORE_LOOKUP = "enable.hive.metastore.lookup"; - public static final String HIVE_SITE_FILE_PATH = "hive.site.file.path"; - - private static boolean initialized; - private static Map> dataSource2ResourceListMap = new HashMap<>(); - private static Map dataSource2HadoopConfigHolder = new HashMap<>(); - private static Properties globalLoginProp = new Properties(); - private static Properties resourcemapProperties; - - private String datasourceName; - private String defaultConfigFile; - private String userName; - private String keyTabFile; - private String password; - private String lookupPrincipal; - private String lookupKeytab; - private String nameRules; - private String authType; - private String hiveSiteFilePath; - private boolean isKerberosAuth; - private boolean enableHiveMetastoreLookup; - - private Map connectionProperties; - - private static Set rangerInternalPropertyKeys = new HashSet<>(); - - public static HadoopConfigHolder getInstance(String aDatasourceName) { - HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName); - if (ret == null) { - synchronized (HadoopConfigHolder.class) { - HadoopConfigHolder temp = ret; - if (temp == null) { - ret = new HadoopConfigHolder(aDatasourceName); - dataSource2HadoopConfigHolder.put(aDatasourceName, ret); - } - } - } - return ret; - } - - public static HadoopConfigHolder getInstance(String aDatasourceName, Map connectionProperties, - String defaultConfigFile) { - HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName); - if (ret == null) { - synchronized (HadoopConfigHolder.class) { - HadoopConfigHolder temp = ret; - if (temp == null) { - ret = new HadoopConfigHolder(aDatasourceName,connectionProperties, defaultConfigFile); - dataSource2HadoopConfigHolder.put(aDatasourceName, ret); - } - } - } - else { - if (connectionProperties !=null && !connectionProperties.equals(ret.connectionProperties)) { - ret = new HadoopConfigHolder(aDatasourceName,connectionProperties); - dataSource2HadoopConfigHolder.remove(aDatasourceName); - dataSource2HadoopConfigHolder.put(aDatasourceName, ret); - } - } - - return ret; - } - - - - private HadoopConfigHolder(String aDatasourceName) { - datasourceName = aDatasourceName; - if (!initialized) { - init(); - } - initLoginInfo(); - } - - private HadoopConfigHolder(String aDatasourceName, - Map connectionProperties) { - this(aDatasourceName, connectionProperties, null); - } - - private HadoopConfigHolder(String aDatasourceName, Map connectionProperties, - String defaultConfigFile) { - datasourceName = aDatasourceName; - this.connectionProperties = connectionProperties; - this.defaultConfigFile = defaultConfigFile; - initConnectionProp(); - initLoginInfo(); - } - - private void initConnectionProp() { - if (!connectionProperties.containsKey(ENABLE_HIVE_METASTORE_LOOKUP)) { - connectionProperties.put(ENABLE_HIVE_METASTORE_LOOKUP, "false"); - } - if (!connectionProperties.containsKey(HIVE_SITE_FILE_PATH)) { - connectionProperties.put(HIVE_SITE_FILE_PATH, ""); - } - - for (Map.Entry entry : connectionProperties.entrySet()) { - String key = entry.getKey(); - String resourceName = getResourceName(key); - - if (resourceName == null) { - resourceName = RANGER_SECTION_NAME; - } - String val = entry.getValue(); - addConfiguration(datasourceName, resourceName, key, val ); - } - } - - private String getResourceName(String key) { - - if (resourcemapProperties == null) { - initResourceMap(); - } - - if (resourcemapProperties != null) { - String rn = resourcemapProperties.getProperty(key); - return ( rn != null) ? rn : defaultConfigFile; - } else { - return defaultConfigFile; - } - } - - private static void initResourceMap() { - Properties props = new Properties(); - InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE); - if (in != null) { - try { - props.load(in); - for (Map.Entry entry : props - .entrySet()) { - String value = (String) entry.getValue(); - if (RANGER_SECTION_NAME.equals(value)) { - String key = (String) entry.getKey(); - rangerInternalPropertyKeys.add(key); - } - } - resourcemapProperties = props; - } catch (IOException e) { - throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e); - } - finally { - if (in != null) { - try { - in.close(); - } catch (IOException ioe) { - // Ignore IOException during close of stream - } - } - } - } else { - throw new HadoopException("Unable to locate resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class path."); - } - } - - - - private static synchronized void init() { - - if (initialized) { - return; - } - - try { - InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE); - if (in != null) { - Properties prop = new Properties(); - try { - prop.load(in); - } catch (IOException e) { - throw new HadoopException("Unable to get configuration information for Hadoop environments", e); - } - finally { - try { - in.close(); - } catch (IOException e) { - // Ignored exception when the stream is closed. - } - } - - if (prop.isEmpty()) { - return; - } - - for (Object keyobj : prop.keySet()) { - String key = (String)keyobj; - String val = prop.getProperty(key); - - int dotLocatedAt = key.indexOf("."); - - if (dotLocatedAt == -1) { - continue; - } - - String dataSource = key.substring(0,dotLocatedAt); - - String propKey = key.substring(dotLocatedAt+1); - int resourceFoundAt = propKey.indexOf("."); - if (resourceFoundAt > -1) { - String resourceName = propKey.substring(0, resourceFoundAt) + ".xml"; - propKey = propKey.substring(resourceFoundAt+1); - addConfiguration(dataSource, resourceName, propKey, val); - } - - } - } - - in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE); - if (in != null) { - Properties tempLoginProp = new Properties(); - try { - tempLoginProp.load(in); - } catch (IOException e) { - throw new HadoopException("Unable to get login configuration information for Hadoop environments from file: [" + GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e); - } - finally { - try { - in.close(); - } catch (IOException e) { - // Ignored exception when the stream is closed. - } - } - globalLoginProp = tempLoginProp; - } - } - finally { - initialized = true; - } - } - - - private void initLoginInfo() { - Properties prop = this.getRangerSection(); - if (prop != null) { - userName = prop.getProperty(RANGER_LOGIN_USER_NAME_PROP); - keyTabFile = prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP); - if (StringUtils.trimToNull(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP)) != null) { - try { - enableHiveMetastoreLookup = Boolean.valueOf(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP,"false").trim()); - } catch (Exception e) { - enableHiveMetastoreLookup = false; - LOG.error("Error while getting " + ENABLE_HIVE_METASTORE_LOOKUP + " : " + e.getMessage()); - } - } - if (StringUtils.trimToNull(prop.getProperty(HIVE_SITE_FILE_PATH)) != null) { - hiveSiteFilePath = prop.getProperty(HIVE_SITE_FILE_PATH).trim(); - } else { - hiveSiteFilePath = null; - } - - password = prop.getProperty(RANGER_LOGIN_PASSWORD); - lookupPrincipal = prop.getProperty(RANGER_LOOKUP_PRINCIPAL); - lookupKeytab = prop.getProperty(RANGER_LOOKUP_KEYTAB); - nameRules = prop.getProperty(RANGER_NAME_RULES); - authType = prop.getProperty(RANGER_AUTH_TYPE, "simple"); - - String hadoopSecurityAuthentication = getHadoopSecurityAuthentication(); - - if (hadoopSecurityAuthentication != null) { - isKerberosAuth = HADOOP_SECURITY_AUTHENTICATION_METHOD.equalsIgnoreCase(hadoopSecurityAuthentication); - } else { - isKerberosAuth = (((userName != null) && (userName.indexOf("@") > -1)) || (SecureClientLogin.isKerberosCredentialExists(lookupPrincipal, lookupKeytab))); - } - } - } - - - public Properties getRangerSection() { - Properties prop = this.getProperties(RANGER_SECTION_NAME); - if (prop == null) { - prop = globalLoginProp; - } - return prop; - } - - - - private static void addConfiguration(String dataSource, String resourceName, String propertyName, String value) { - - if (dataSource == null || dataSource.isEmpty()) { - return; - } - - if (propertyName == null || propertyName.isEmpty()) { - return; - } - - if (resourceName == null) { - resourceName = DEFAULT_RESOURCE_NAME; - } - - - HashMap resourceName2PropertiesMap = dataSource2ResourceListMap.get(dataSource); - - if (resourceName2PropertiesMap == null) { - resourceName2PropertiesMap = new HashMap<>(); - dataSource2ResourceListMap.put(dataSource, resourceName2PropertiesMap); - } - - Properties prop = resourceName2PropertiesMap.get(resourceName); - if (prop == null) { - prop = new Properties(); - resourceName2PropertiesMap.put(resourceName, prop); - } - if (value == null) { - prop.remove(propertyName); - } else { - prop.put(propertyName, value); - } - } - - - public String getDatasourceName() { - return datasourceName; - } - - public boolean hasResourceExists(String aResourceName) { // dilli - HashMap resourceName2PropertiesMap = dataSource2ResourceListMap.get(datasourceName); - return (resourceName2PropertiesMap != null && resourceName2PropertiesMap.containsKey(aResourceName)); - } - - public Properties getProperties(String aResourceName) { - Properties ret = null; - HashMap resourceName2PropertiesMap = dataSource2ResourceListMap.get(datasourceName); - if (resourceName2PropertiesMap != null) { - ret = resourceName2PropertiesMap.get(aResourceName); - } - return ret; - } - - public String getHadoopSecurityAuthentication() { - String ret = null; - String sectionName = RANGER_SECTION_NAME; - - if (defaultConfigFile != null) { - sectionName = defaultConfigFile; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("==> HadoopConfigHolder.getHadoopSecurityAuthentication( " + " DataSource : " + sectionName + " Property : " + HADOOP_SECURITY_AUTHENTICATION + ")" ); - } - - ret = getProperties(sectionName,HADOOP_SECURITY_AUTHENTICATION); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HadoopConfigHolder.getHadoopSecurityAuthentication(" + " DataSource : " + sectionName + " Property : " + HADOOP_SECURITY_AUTHENTICATION + " Value : " + ret + ")" ); - } - - return ret; - } - - public String getUserName() { - return userName; - } - - public String getKeyTabFile() { - return keyTabFile; - } - - public String getPassword() { - return password; - } - - public boolean isKerberosAuthentication() { - return isKerberosAuth; - } - - public String getLookupPrincipal() { - return lookupPrincipal; - } - - public String getLookupKeytab() { - return lookupKeytab; - } - - public String getNameRules() { - return nameRules; - } - - public String getAuthType() { - return authType; - } - - public boolean isEnableHiveMetastoreLookup() { - return enableHiveMetastoreLookup; - } - - public String getHiveSiteFilePath() { - return hiveSiteFilePath; - } - - public Set getRangerInternalPropertyKeys() { - return rangerInternalPropertyKeys; - } - - private String getProperties(String sectionName, String property) { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName + " Property : " + property + ")" ); - } - - Properties repoParam = null; - String ret = null; - - HashMap resourceName2PropertiesMap = dataSource2ResourceListMap.get(this.getDatasourceName()); - - if (resourceName2PropertiesMap != null) { - repoParam=resourceName2PropertiesMap.get(sectionName); - } - - if (repoParam != null) { - ret = (String)repoParam.get(property); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName + " Property : " + property + " Value : " + ret); - } - - return ret; - } - - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopException.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopException.java deleted file mode 100644 index 7f8cc52d9e..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/client/HadoopException.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.plugin.client; - -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -public class HadoopException extends RuntimeException { - private static final long serialVersionUID = 8872734935128535649L; - - public HashMap responseData; - - public HadoopException() { - super(); - } - - public HadoopException(String message, Throwable cause) { - super(message, cause); - } - - public HadoopException(String message) { - super(message); - } - - public HadoopException(Throwable cause) { - super(cause); - } - - public void generateResponseDataMap(boolean connectivityStatus, - String message, String description, Long objectId, String fieldName) { - responseData = new HashMap<>(); - responseData.put("connectivityStatus", connectivityStatus); - responseData.put("message", message); - responseData.put("description", description); - responseData.put("objectId", objectId); - responseData.put("fieldName", fieldName); - } - - public String getMessage(Throwable excp) { - List errList = new ArrayList<>(); - while (excp != null) { - if (!errList.contains(excp.getMessage() + ". \n") && !errList.contains(excp.toString() + ". \n")) { - if (excp.getMessage() != null && !(excp.getMessage().equalsIgnoreCase(""))) { - errList.add(excp.getMessage() + ". \n"); - } - } - excp = excp.getCause(); - } - return StringUtils.join(errList, ""); - } - - public HashMap getResponseData() { - return responseData; - } - - public void setReponseData(HashMap responseData) { - this.responseData = responseData; - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesAccessedTogetherCondition.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesAccessedTogetherCondition.java deleted file mode 100644 index b1849dfac3..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesAccessedTogetherCondition.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.conditionevaluator; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.policyengine.RangerAccessRequest; -import org.apache.atlas.plugin.policyresourcematcher.RangerDefaultPolicyResourceMatcher; -import org.apache.atlas.plugin.policyresourcematcher.RangerPolicyResourceMatcher; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; -import org.apache.atlas.plugin.util.RangerAccessRequestUtil; -import org.apache.atlas.plugin.util.RangerRequestedResources; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class RangerHiveResourcesAccessedTogetherCondition extends RangerAbstractConditionEvaluator { - private static final Log LOG = LogFactory.getLog(RangerHiveResourcesAccessedTogetherCondition.class); - - private List matchers = new ArrayList<>(); - private boolean isInitialized; - - @Override - public void init() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesAccessedTogetherCondition.init(" + condition + ")"); - } - - super.init(); - - if (serviceDef != null) { - doInitialize(); - } else { - LOG.error("RangerHiveResourcesAccessedTogetherCondition.init() - ServiceDef not set ... ERROR .."); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesAccessedTogetherCondition.init(" + condition + ")"); - } - } - - @Override - public boolean isMatched(final RangerAccessRequest request) { - boolean ret = true; - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesAccessedTogetherCondition.isMatched(" + request + ")"); - } - - if (isInitialized && CollectionUtils.isNotEmpty(matchers)) { - RangerRequestedResources resources = RangerAccessRequestUtil.getRequestedResourcesFromContext(request.getContext()); - - ret = resources != null && !resources.isMutuallyExcluded(matchers, request.getContext()); - } else { - LOG.error("RangerHiveResourcesAccessedTogetherCondition.isMatched() - condition is not initialized correctly and will NOT be enforced"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesAccessedTogetherCondition.isMatched(" + request + ")" + ", result=" + ret); - } - - return ret; - } - - private void doInitialize() { - List mutuallyExclusiveResources = condition.getValues(); - - if (CollectionUtils.isNotEmpty(mutuallyExclusiveResources)) { - initializeMatchers(mutuallyExclusiveResources); - - if (CollectionUtils.isEmpty(matchers)) { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesAccessedTogetherCondition.doInitialize() - Cannot create matchers from values in MutualExclustionEnforcer"); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesAccessedTogetherCondition.doInitialize() - Created " + matchers.size() + " matchers from values in MutualExclustionEnforcer"); - } - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesAccessedTogetherCondition.doInitialize() - No values in MutualExclustionEnforcer"); - } - } - - isInitialized = true; - } - - private void initializeMatchers(List mutuallyExclusiveResources) { - - for (String s : mutuallyExclusiveResources) { - - String policyResourceSpec = s.trim(); - - RangerPolicyResourceMatcher matcher = buildMatcher(policyResourceSpec); - - if (matcher != null) { - matchers.add(matcher); - } - } - } - - private RangerPolicyResourceMatcher buildMatcher(String policyResourceSpec) { - - RangerPolicyResourceMatcher matcher = null; - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesAccessedTogetherCondition.buildMatcher(" + policyResourceSpec + ")"); - } - - // Works only for Hive serviceDef for now - if (serviceDef != null && EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_HIVE_NAME.equals(serviceDef.getName())) { - //Parse policyResourceSpec - char separator = '.'; - String any = "*"; - - Map policyResources = new HashMap<>(); - - String[] elements = StringUtils.split(policyResourceSpec, separator); - - RangerPolicy.RangerPolicyResource policyResource; - - if (elements.length > 0 && elements.length < 4) { - if (elements.length == 3) { - policyResource = new RangerPolicy.RangerPolicyResource(elements[2]); - } else { - policyResource = new RangerPolicy.RangerPolicyResource(any); - } - policyResources.put("column", policyResource); - - if (elements.length >= 2) { - policyResource = new RangerPolicy.RangerPolicyResource(elements[1]); - } else { - policyResource = new RangerPolicy.RangerPolicyResource(any); - } - policyResources.put("table", policyResource); - - policyResource = new RangerPolicy.RangerPolicyResource(elements[0]); - policyResources.put("database", policyResource); - - matcher = new RangerDefaultPolicyResourceMatcher(); - matcher.setPolicyResources(policyResources); - matcher.setServiceDef(serviceDef); - matcher.init(); - - } else { - LOG.error("RangerHiveResourcesAccessedTogetherCondition.buildMatcher() - Incorrect elements in the hierarchy specified (" - + elements.length + ")"); - } - } else { - LOG.error("RangerHiveResourcesAccessedTogetherCondition.buildMatcher() - ServiceDef not set or ServiceDef is not for Hive"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesAccessedTogetherCondition.buildMatcher(" + policyResourceSpec + ")" + ", matcher=" + matcher); - } - - return matcher; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesNotAccessedTogetherCondition.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesNotAccessedTogetherCondition.java deleted file mode 100644 index aca442567a..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerHiveResourcesNotAccessedTogetherCondition.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.conditionevaluator; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.policyengine.RangerAccessRequest; -import org.apache.atlas.plugin.policyresourcematcher.RangerDefaultPolicyResourceMatcher; -import org.apache.atlas.plugin.policyresourcematcher.RangerPolicyResourceMatcher; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; -import org.apache.atlas.plugin.util.RangerAccessRequestUtil; -import org.apache.atlas.plugin.util.RangerRequestedResources; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class RangerHiveResourcesNotAccessedTogetherCondition extends RangerAbstractConditionEvaluator { - private static final Log LOG = LogFactory.getLog(RangerHiveResourcesNotAccessedTogetherCondition.class); - - private List matchers = new ArrayList<>(); - private boolean isInitialized; - - @Override - public void init() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesNotAccessedTogetherCondition.init(" + condition + ")"); - } - - super.init(); - - if (serviceDef != null) { - doInitialize(); - } else { - LOG.error("RangerHiveResourcesNotAccessedTogetherCondition.init() - ServiceDef not set ... ERROR .."); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesNotAccessedTogetherCondition.init(" + condition + ")"); - } - } - - @Override - public boolean isMatched(final RangerAccessRequest request) { - boolean ret = true; - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesNotAccessedTogetherCondition.isMatched(" + request + ")"); - } - - if (isInitialized && CollectionUtils.isNotEmpty(matchers)) { - RangerRequestedResources resources = RangerAccessRequestUtil.getRequestedResourcesFromContext(request.getContext()); - - ret = resources == null || resources.isMutuallyExcluded(matchers, request.getContext()); - } else { - LOG.error("RangerHiveResourcesNotAccessedTogetherCondition.isMatched() - Enforcer is not initialized correctly, Mutual Exclusion will NOT be enforced"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesNotAccessedTogetherCondition.isMatched(" + request + ")" + ", result=" + ret); - } - - return ret; - } - - private void doInitialize() { - List mutuallyExclusiveResources = condition.getValues(); - - if (CollectionUtils.isNotEmpty(mutuallyExclusiveResources)) { - initializeMatchers(mutuallyExclusiveResources); - - if (CollectionUtils.isEmpty(matchers)) { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesNotAccessedTogetherCondition.doInitialize() - Cannot create matchers from values in MutualExclustionEnforcer"); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesNotAccessedTogetherCondition.doInitialize() - Created " + matchers.size() + " matchers from values in MutualExclustionEnforcer"); - } - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerHiveResourcesNotAccessedTogetherCondition.doInitialize() - No values in MutualExclustionEnforcer"); - } - } - - isInitialized = true; - } - - private void initializeMatchers(List mutuallyExclusiveResources) { - - for (String s : mutuallyExclusiveResources) { - - String policyResourceSpec = s.trim(); - - RangerPolicyResourceMatcher matcher = buildMatcher(policyResourceSpec); - - if (matcher != null) { - matchers.add(matcher); - } - } - } - - private RangerPolicyResourceMatcher buildMatcher(String policyResourceSpec) { - - RangerPolicyResourceMatcher matcher = null; - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerHiveResourcesNotAccessedTogetherCondition.buildMatcher(" + policyResourceSpec + ")"); - } - - // Works only for Hive serviceDef for now - if (serviceDef != null && EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_HIVE_NAME.equals(serviceDef.getName())) { - //Parse policyResourceSpec - char separator = '.'; - String any = "*"; - - Map policyResources = new HashMap<>(); - - String[] elements = StringUtils.split(policyResourceSpec, separator); - - RangerPolicy.RangerPolicyResource policyResource; - - if (elements.length > 0 && elements.length < 4) { - if (elements.length == 3) { - policyResource = new RangerPolicy.RangerPolicyResource(elements[2]); - } else { - policyResource = new RangerPolicy.RangerPolicyResource(any); - } - policyResources.put("column", policyResource); - - if (elements.length >= 2) { - policyResource = new RangerPolicy.RangerPolicyResource(elements[1]); - } else { - policyResource = new RangerPolicy.RangerPolicyResource(any); - } - policyResources.put("table", policyResource); - - policyResource = new RangerPolicy.RangerPolicyResource(elements[0]); - policyResources.put("database", policyResource); - - matcher = new RangerDefaultPolicyResourceMatcher(); - matcher.setPolicyResources(policyResources); - matcher.setServiceDef(serviceDef); - matcher.init(); - - } else { - LOG.error("RangerHiveResourcesNotAccessedTogetherCondition.buildMatcher() - Incorrect elements in the hierarchy specified (" - + elements.length + ")"); - } - } else { - LOG.error("RangerHiveResourcesNotAccessedTogetherCondition.buildMatcher() - ServiceDef not set or ServiceDef is not for Hive"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerHiveResourcesNotAccessedTogetherCondition.buildMatcher(" + policyResourceSpec + ")" + ", matcher=" + matcher); - } - - return matcher; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerScriptExecutionContext.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerScriptExecutionContext.java index 2b39df3b05..3fb1f4bafd 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerScriptExecutionContext.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/conditionevaluator/RangerScriptExecutionContext.java @@ -24,8 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.utils.JsonUtils; -import org.apache.atlas.authorization.utils.StringUtil; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.atlas.plugin.contextenricher.RangerTagForEval; import org.apache.atlas.plugin.policyengine.RangerAccessRequest; import org.apache.atlas.plugin.policyengine.RangerAccessResource; @@ -204,7 +203,7 @@ public String toJson() { } } - String strRet = JsonUtils.objectToJson(ret); + String strRet = RangerUtil.objectToJson(ret); RangerPerfTracer.log(perf); @@ -463,7 +462,7 @@ public Date getAsDate(String value) { if (ret == null) { logError("RangerScriptExecutionContext.getAsDate() - Could not convert [" + value + "] to Date using any of the Format-Strings: " + Arrays.toString(dateFormatStrings)); } else { - ret = StringUtil.getUTCDateForLocalDate(ret); + ret = RangerUtil.getUTCDateForLocalDate(ret); } return ret; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractContextEnricher.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractContextEnricher.java index 3470ed1c7b..4e39cf5735 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractContextEnricher.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractContextEnricher.java @@ -23,7 +23,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.model.RangerServiceDef; import org.apache.atlas.plugin.model.RangerServiceDef.RangerContextEnricherDef; import org.apache.atlas.plugin.policyengine.RangerAccessRequest; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractGeolocationProvider.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractGeolocationProvider.java deleted file mode 100644 index 26c2595a85..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAbstractGeolocationProvider.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.geo.RangerGeolocationData; -import org.apache.atlas.plugin.geo.RangerGeolocationDatabase; -import org.apache.atlas.plugin.policyengine.RangerAccessRequest; -import org.apache.atlas.plugin.store.GeolocationStore; - -import java.util.Map; - -public abstract class RangerAbstractGeolocationProvider extends RangerAbstractContextEnricher { - - private static final Log LOG = LogFactory.getLog(RangerAbstractGeolocationProvider.class); - - public static final String ENRICHER_OPTION_GEOLOCATION_META_PREFIX = "geolocation.meta.prefix"; - - public static final String KEY_CONTEXT_GEOLOCATION_PREFIX = "LOCATION_"; - private GeolocationStore store; - private String geoMetaPrefix; - - abstract public String getGeoSourceLoader(); - - @Override - public void init() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAbstractGeolocationProvider.init()"); - } - - super.init(); - - geoMetaPrefix = getOption(ENRICHER_OPTION_GEOLOCATION_META_PREFIX); - if (geoMetaPrefix == null) { - geoMetaPrefix = ""; - } - - String geoSourceLoader = getGeoSourceLoader(); - - GeolocationStore geoStore = null; - Map context = enricherDef.getEnricherOptions(); - - if (context != null) { - try { - // Get the class definition and ensure it is of the correct type - @SuppressWarnings("unchecked") - Class geoSourceLoaderClass = (Class) Class.forName(geoSourceLoader); - // instantiate the loader class and initialize it with options - geoStore = geoSourceLoaderClass.newInstance(); - } - catch (ClassNotFoundException exception) { - LOG.error("RangerAbstractGeolocationProvider.init() - Class " + geoSourceLoader + " not found, exception=" + exception); - } - catch (ClassCastException exception) { - LOG.error("RangerAbstractGeolocationProvider.init() - Class " + geoSourceLoader + " is not a type of GeolocationStore, exception=" + exception); - } - catch (IllegalAccessException exception) { - LOG.error("RangerAbstractGeolocationProvider.init() - Class " + geoSourceLoader + " could not be instantiated, exception=" + exception); - } - catch (InstantiationException exception) { - LOG.error("RangerAbstractGeolocationProvider.init() - Class " + geoSourceLoader + " could not be instantiated, exception=" + exception); - } - - if (geoStore != null) { - try { - geoStore.init(context); - store = geoStore; - } catch (Exception exception) { - LOG.error("RangerAbstractGeolocationProvider.init() - geoLocation Store cannot be initialized, exception=" + exception); - } - } - } - - if (store == null) { - LOG.error("RangerAbstractGeolocationProvider.init() - is not initialized correctly."); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAbstractGeolocationProvider.init()"); - } - } - - @Override - public void enrich(RangerAccessRequest request) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAbstractGeolocationProvider.enrich(" + request + ")"); - } - - RangerGeolocationData geolocation = null; - - String clientIPAddress = request.getClientIPAddress(); - - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAbstractGeolocationProvider.enrich() - clientIPAddress=" + clientIPAddress); - } - - if (StringUtils.isNotBlank(clientIPAddress) && store != null) { - geolocation = store.getGeoLocation(clientIPAddress); - - if (geolocation != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAbstractGeolocationProvider.enrich() - Country=" + geolocation); - } - Map context = request.getContext(); - - String[] geoAttrValues = geolocation.getLocationData(); - - RangerGeolocationDatabase database = store.getGeoDatabase(); - String[] attributeNames = database.getMetadata().getLocationDataItemNames(); - - for (int i = 0; i < geoAttrValues.length && i < attributeNames.length; i++) { - String contextName = KEY_CONTEXT_GEOLOCATION_PREFIX + geoMetaPrefix + attributeNames[i]; - context.put(contextName, geoAttrValues[i]); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAbstractGeolocationProvider.enrich() - clientIPAddress '" + clientIPAddress + "' not found."); - } - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAbstractGeolocationProvider.enrich() - clientIPAddress is null or blank, cannot get geolocation"); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAbstractGeolocationProvider.enrich(" + request + ")"); - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminTagRetriever.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminTagRetriever.java deleted file mode 100644 index 55e909e370..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminTagRetriever.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import org.apache.atlas.authz.admin.client.AtlasAuthAdminClient; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.plugin.policyengine.RangerPluginContext; -import org.apache.atlas.plugin.util.ServiceTags; - -import java.util.Map; - -public class RangerAdminTagRetriever extends RangerTagRetriever { - private static final Log LOG = LogFactory.getLog(RangerAdminTagRetriever.class); - - private AtlasAuthAdminClient atlasAuthAdminClient; - - @Override - public void init(Map options) { - - if (StringUtils.isNotBlank(serviceName) && serviceDef != null && StringUtils.isNotBlank(appId)) { - RangerPluginConfig pluginConfig = super.pluginConfig; - - if (pluginConfig == null) { - pluginConfig = new RangerPluginConfig(serviceDef.getName(), serviceName, appId, null, null, null); - } - - RangerPluginContext pluginContext = getPluginContext(); - this.atlasAuthAdminClient = pluginContext.getAtlasAuthAdminClient(); - - } else { - LOG.error("FATAL: Cannot find service/serviceDef to use for retrieving tags. Will NOT be able to retrieve tags."); - } - } - - @Override - public ServiceTags retrieveTags(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - - ServiceTags serviceTags = null; - - /*if (adminClient != null) { - try { - serviceTags = adminClient.getServiceTagsIfUpdated(lastKnownVersion, lastActivationTimeInMillis); - } catch (ClosedByInterruptException closedByInterruptException) { - LOG.error("Tag-retriever thread was interrupted while blocked on I/O"); - throw new InterruptedException(); - } catch (Exception e) { - LOG.error("Tag-retriever encounterd exception, exception=", e); - LOG.error("Returning null service tags"); - } - }*/ - return serviceTags; - } - -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminUserStoreRetriever.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminUserStoreRetriever.java deleted file mode 100644 index 237c9fc960..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerAdminUserStoreRetriever.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import org.apache.atlas.authz.admin.client.AtlasAuthAdminClient; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.plugin.policyengine.RangerPluginContext; -import org.apache.atlas.plugin.util.RangerUserStore; - -import java.nio.channels.ClosedByInterruptException; -import java.util.Map; - -public class RangerAdminUserStoreRetriever extends RangerUserStoreRetriever { - private static final Log LOG = LogFactory.getLog(RangerAdminUserStoreRetriever.class); - - private AtlasAuthAdminClient atlasAuthAdminClient; - - @Override - public void init(Map options) { - - if (StringUtils.isNotBlank(serviceName) && serviceDef != null && StringUtils.isNotBlank(appId)) { - RangerPluginConfig pluginConfig = super.pluginConfig; - - if (pluginConfig == null) { - pluginConfig = new RangerPluginConfig(serviceDef.getName(), serviceName, appId, null, null, null); - } - - RangerPluginContext pluginContext = getPluginContext(); - this.atlasAuthAdminClient = pluginContext.getAtlasAuthAdminClient(); - - } else { - LOG.error("FATAL: Cannot find service/serviceDef to use for retrieving userstore. Will NOT be able to retrieve userstore."); - } - } - - @Override - public RangerUserStore retrieveUserStoreInfo(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - - RangerUserStore rangerUserStore = null; - - if (atlasAuthAdminClient != null) { - try { - rangerUserStore = atlasAuthAdminClient.getUserStoreIfUpdated(lastActivationTimeInMillis); - } catch (ClosedByInterruptException closedByInterruptException) { - LOG.error("UserStore-retriever thread was interrupted while blocked on I/O"); - throw new InterruptedException(); - } catch (Exception e) { - LOG.error("UserStore-retriever encounterd exception, exception=", e); - LOG.error("Returning null userstore info"); - } - } - return rangerUserStore; - } - -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedGeolocationProvider.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedGeolocationProvider.java deleted file mode 100644 index c71119731b..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedGeolocationProvider.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -public class RangerFileBasedGeolocationProvider extends RangerAbstractGeolocationProvider { - - public static final String GEOLOCATION_SOURCE_LOADER_FILELOADER = "org.apache.atlas.plugin.store.file.GeolocationFileStore"; - - @Override - public String getGeoSourceLoader() { - return GEOLOCATION_SOURCE_LOADER_FILELOADER; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedTagRetriever.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedTagRetriever.java deleted file mode 100644 index 79a4ea956b..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerFileBasedTagRetriever.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.util.ServiceTags; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.charset.Charset; -import java.util.Map; - -public class RangerFileBasedTagRetriever extends RangerTagRetriever { - private static final Log LOG = LogFactory.getLog(RangerFileBasedTagRetriever.class); - - - private URL serviceTagsFileURL; - private String serviceTagsFileName; - private Gson gsonBuilder; - - @Override - public void init(Map options) { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> init()" ); - } - - gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z") - .setPrettyPrinting() - .create(); - - String serviceTagsFileNameProperty = "serviceTagsFileName"; - String serviceTagsDefaultFileName = "/testdata/test_servicetags_hive.json"; - - if (StringUtils.isNotBlank(serviceName) && serviceDef != null && StringUtils.isNotBlank(appId)) { - InputStream serviceTagsFileStream = null; - - - // Open specified file from options- it should contain service-tags - - serviceTagsFileName = options != null? options.get(serviceTagsFileNameProperty) : null; - - serviceTagsFileName = serviceTagsFileName == null ? serviceTagsDefaultFileName : serviceTagsFileName; - - File f = new File(serviceTagsFileName); - - if (f.exists() && f.isFile() && f.canRead()) { - try { - serviceTagsFileStream = new FileInputStream(f); - serviceTagsFileURL = f.toURI().toURL(); - } catch (FileNotFoundException exception) { - LOG.error("Error processing input file:" + serviceTagsFileName + " or no privilege for reading file " + serviceTagsFileName, exception); - } catch (MalformedURLException malformedException) { - LOG.error("Error processing input file:" + serviceTagsFileName + " cannot be converted to URL " + serviceTagsFileName, malformedException); - } - } else { - URL fileURL = getClass().getResource(serviceTagsFileName); - if (fileURL == null && !serviceTagsFileName.startsWith("/")) { - fileURL = getClass().getResource("/" + serviceTagsFileName); - } - - if (fileURL == null) { - fileURL = ClassLoader.getSystemClassLoader().getResource(serviceTagsFileName); - if (fileURL == null && !serviceTagsFileName.startsWith("/")) { - fileURL = ClassLoader.getSystemClassLoader().getResource("/" + serviceTagsFileName); - } - } - - if (fileURL != null) { - try { - serviceTagsFileStream = fileURL.openStream(); - serviceTagsFileURL = fileURL; - } catch (Exception exception) { - LOG.error(serviceTagsFileName + " is not a file", exception); - } - } else { - LOG.warn("Error processing input file: URL not found for " + serviceTagsFileName + " or no privilege for reading file " + serviceTagsFileName); - } - } - - if (serviceTagsFileStream != null) { - try { - serviceTagsFileStream.close(); - } catch (Exception e) { - // Ignore - } - } - - } else { - LOG.error("FATAL: Cannot find service/serviceDef/serviceTagsFile to use for retrieving tags. Will NOT be able to retrieve tags."); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== init() : serviceTagsFileName=" + serviceTagsFileName); - } - } - - @Override - public ServiceTags retrieveTags(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> retrieveTags(lastKnownVersion=" + lastKnownVersion + ", lastActivationTimeInMillis=" + lastActivationTimeInMillis + ", serviceTagsFilePath=" + serviceTagsFileName); - } - - ServiceTags serviceTags = null; - - if (serviceTagsFileURL != null) { - try ( - InputStream serviceTagsFileStream = serviceTagsFileURL.openStream(); - Reader reader = new InputStreamReader(serviceTagsFileStream, Charset.forName("UTF-8")) - ) { - - serviceTags = gsonBuilder.fromJson(reader, ServiceTags.class); - - if (serviceTags.getTagVersion() <= lastKnownVersion) { - // No change in serviceTags - serviceTags = null; - } - } catch (IOException e) { - LOG.warn("Error processing input file: or no privilege for reading file " + serviceTagsFileName); - throw e; - } - } else { - LOG.error("Error reading file: " + serviceTagsFileName); - throw new Exception("serviceTagsFileURL is null!"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== retrieveTags(lastKnownVersion=" + lastKnownVersion + ", lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - - return serviceTags; - } - -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagEnricher.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagEnricher.java index bdc1a2793e..488fd3461e 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagEnricher.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagEnricher.java @@ -26,7 +26,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.model.RangerPolicy; import org.apache.atlas.plugin.model.RangerServiceDef; import org.apache.atlas.plugin.model.RangerServiceResource; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagForEval.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagForEval.java index 59ba56175e..3cec7dd8bc 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagForEval.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagForEval.java @@ -19,11 +19,11 @@ package org.apache.atlas.plugin.contextenricher; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.utils.JsonUtils; import org.apache.atlas.plugin.model.RangerTag; import org.apache.atlas.plugin.model.RangerValiditySchedule; import org.apache.atlas.plugin.policyevaluator.RangerValidityScheduleEvaluator; @@ -101,7 +101,7 @@ public boolean isApplicable(Date accessTime) { Object value = getOption(RangerTag.OPTION_TAG_VALIDITY_PERIODS); if (value != null && value instanceof String) { - this.validityPeriods = JsonUtils.jsonToRangerValiditySchedule((String) value); + this.validityPeriods = RangerUtil.jsonToRangerValiditySchedule((String) value); validityPeriodEvaluators = createValidityPeriodEvaluators(); } else { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagRetriever.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagRetriever.java index 880e820feb..7f549ffcad 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagRetriever.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerTagRetriever.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.contextenricher; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.model.RangerServiceDef; import org.apache.atlas.plugin.policyengine.RangerPluginContext; import org.apache.atlas.plugin.util.ServiceTags; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreEnricher.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreEnricher.java deleted file mode 100644 index 87eebf0f8f..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreEnricher.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.policyengine.RangerAccessRequest; -import org.apache.atlas.plugin.service.RangerAuthContext; -import org.apache.atlas.plugin.util.DownloadTrigger; -import org.apache.atlas.plugin.util.DownloaderTask; -import org.apache.atlas.plugin.util.RangerAccessRequestUtil; -import org.apache.atlas.plugin.util.RangerPerfTracer; -import org.apache.atlas.plugin.util.RangerUserStore; - -import java.io.File; -import java.util.Timer; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; - -public class RangerUserStoreEnricher extends RangerAbstractContextEnricher { - private static final Log LOG = LogFactory.getLog(RangerUserStoreEnricher.class); - - private static final Log PERF_SET_USERSTORE_LOG = RangerPerfTracer.getPerfLogger("userstoreenricher.setuserstore"); - - - private static final String USERSTORE_REFRESHER_POLLINGINTERVAL_OPTION = "userStoreRefresherPollingInterval"; - private static final String USERSTORE_RETRIEVER_CLASSNAME_OPTION = "userStoreRetrieverClassName"; - - private RangerUserStoreRefresher userStoreRefresher; - private RangerUserStoreRetriever userStoreRetriever; - private RangerUserStore rangerUserStore; - private boolean disableCacheIfServiceNotFound = true; - - private final BlockingQueue userStoreDownloadQueue = new LinkedBlockingQueue<>(); - private Timer userStoreDownloadTimer; - - @Override - public void init() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreEnricher.init()"); - } - - super.init(); - - String userStoreRetrieverClassName = getOption(USERSTORE_RETRIEVER_CLASSNAME_OPTION); - - long pollingIntervalMs = getLongOption(USERSTORE_REFRESHER_POLLINGINTERVAL_OPTION, 3600 * 1000); - - if (StringUtils.isNotBlank(userStoreRetrieverClassName)) { - - try { - @SuppressWarnings("unchecked") - Class userStoreRetriverClass = (Class) Class.forName(userStoreRetrieverClassName); - - userStoreRetriever = userStoreRetriverClass.newInstance(); - - } catch (ClassNotFoundException exception) { - LOG.error("Class " + userStoreRetrieverClassName + " not found, exception=" + exception); - } catch (ClassCastException exception) { - LOG.error("Class " + userStoreRetrieverClassName + " is not a type of RangerUserStoreRetriever, exception=" + exception); - } catch (IllegalAccessException exception) { - LOG.error("Class " + userStoreRetrieverClassName + " illegally accessed, exception=" + exception); - } catch (InstantiationException exception) { - LOG.error("Class " + userStoreRetrieverClassName + " could not be instantiated, exception=" + exception); - } - - if (userStoreRetriever != null) { - String propertyPrefix = "ranger.plugin." + serviceDef.getName(); - disableCacheIfServiceNotFound = getBooleanConfig(propertyPrefix + ".disable.cache.if.servicenotfound", true); - String cacheDir = getConfig(propertyPrefix + ".policy.cache.dir", null); - String cacheFilename = String.format("%s_%s_userstore.json", appId, serviceName); - - cacheFilename = cacheFilename.replace(File.separatorChar, '_'); - cacheFilename = cacheFilename.replace(File.pathSeparatorChar, '_'); - - String cacheFile = cacheDir == null ? null : (cacheDir + File.separator + cacheFilename); - - userStoreRetriever.setServiceName(serviceName); - userStoreRetriever.setServiceDef(serviceDef); - userStoreRetriever.setAppId(appId); - userStoreRetriever.setPluginConfig(getPluginConfig()); - userStoreRetriever.setPluginContext(getPluginContext()); - userStoreRetriever.init(enricherDef.getEnricherOptions()); - - userStoreRefresher = new RangerUserStoreRefresher(userStoreRetriever, this, null, -1L, userStoreDownloadQueue, cacheFile); - LOG.info("Created Thread(RangerUserStoreRefresher(" + getName() + ")"); - - try { - userStoreRefresher.populateUserStoreInfo(); - } catch (Throwable exception) { - LOG.error("Exception when retrieving userstore information for this enricher", exception); - } - - userStoreRefresher.setDaemon(true); - userStoreRefresher.startRefresher(); - - userStoreDownloadTimer = new Timer("userStoreDownloadTimer", true); - - try { - userStoreDownloadTimer.schedule(new DownloaderTask(userStoreDownloadQueue), pollingIntervalMs, pollingIntervalMs); - if (LOG.isDebugEnabled()) { - LOG.debug("Scheduled userStoreDownloadRefresher to download userstore every " + pollingIntervalMs + " milliseconds"); - } - } catch (IllegalStateException exception) { - LOG.error("Error scheduling userStoreDownloadTimer:", exception); - LOG.error("*** UserStore information will NOT be downloaded every " + pollingIntervalMs + " milliseconds ***"); - userStoreDownloadTimer = null; - } - } - } else { - LOG.error("No value specified for " + USERSTORE_RETRIEVER_CLASSNAME_OPTION + " in the RangerUserStoreEnricher options"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreEnricher.init()"); - } - } - - @Override - public void enrich(RangerAccessRequest request) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreEnricher.enrich(" + request + ")"); - } - - enrich(request, null); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreEnricher.enrich(" + request + ")"); - } - } - - @Override - public void enrich(RangerAccessRequest request, Object dataStore) { - - // Unused by Solr plugin as document level authorization gets RangerUserStore from AuthContext - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreEnricher.enrich(" + request + ") with dataStore:[" + dataStore + "]"); - } - final RangerUserStore rangerUserStore; - - if (dataStore instanceof RangerUserStore) { - rangerUserStore = (RangerUserStore) dataStore; - } else { - rangerUserStore = this.rangerUserStore; - - if (dataStore != null) { - LOG.warn("Incorrect type of dataStore :[" + dataStore.getClass().getName() + "], falling back to original enrich"); - } - } - - RangerAccessRequestUtil.setRequestUserStoreInContext(request.getContext(), rangerUserStore); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreEnricher.enrich(" + request + ") with dataStore:[" + dataStore + "])"); - } - } - - public boolean isDisableCacheIfServiceNotFound() { - return disableCacheIfServiceNotFound; - } - - public RangerUserStore getRangerUserStore() {return this.rangerUserStore;} - - public void setRangerUserStore(final RangerUserStore rangerUserStore) { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreEnricher.setRangerUserStore(rangerUserStore=" + rangerUserStore + ")"); - } - - if (rangerUserStore == null) { - LOG.info("UserStore information is null for service " + serviceName); - this.rangerUserStore = null; - } else { - RangerPerfTracer perf = null; - - if(RangerPerfTracer.isPerfTraceEnabled(PERF_SET_USERSTORE_LOG)) { - perf = RangerPerfTracer.getPerfTracer(PERF_SET_USERSTORE_LOG, "RangerUserStoreEnricher.setRangerUserStore(newUserStoreVersion=" + rangerUserStore.getUserStoreVersion() + ")"); - } - - this.rangerUserStore = rangerUserStore; - RangerPerfTracer.logAlways(perf); - } - - setRangerUserStoreInPlugin(); - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreEnricher.setRangerUserStore(rangerUserStore=" + rangerUserStore + ")"); - } - - } - - @Override - public boolean preCleanup() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreEnricher.preCleanup()"); - } - - super.preCleanup(); - - if (userStoreDownloadTimer != null) { - userStoreDownloadTimer.cancel(); - userStoreDownloadTimer = null; - } - - if (userStoreRefresher != null) { - userStoreRefresher.cleanup(); - userStoreRefresher = null; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreEnricher.preCleanup() : result=" + true); - } - return true; - } - - private void setRangerUserStoreInPlugin() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> setRangerUserStoreInPlugin()"); - } - - RangerAuthContext authContext = getAuthContext(); - - if (authContext != null) { - authContext.addOrReplaceRequestContextEnricher(this, rangerUserStore); - - notifyAuthContextChanged(); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== setRangerUserStoreInPlugin()"); - } - } - -} \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRefresher.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRefresher.java deleted file mode 100644 index e60e0a788c..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRefresher.java +++ /dev/null @@ -1,443 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.atlas.plugin.contextenricher; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.sun.jersey.api.client.ClientResponse; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.atlas.admin.client.datatype.RESTResponse; -import org.apache.atlas.audit.provider.MiscUtil; -import org.apache.atlas.plugin.util.DownloadTrigger; -import org.apache.atlas.plugin.util.RangerPerfTracer; -import org.apache.atlas.plugin.util.RangerRESTClient; -import org.apache.atlas.plugin.util.RangerRESTUtils; -import org.apache.atlas.plugin.util.RangerServiceNotFoundException; -import org.apache.atlas.plugin.util.RangerUserStore; - -import javax.servlet.http.HttpServletResponse; -import java.io.File; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.Reader; -import java.io.Writer; -import java.nio.channels.ClosedByInterruptException; -import java.security.PrivilegedAction; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.BlockingQueue; - -public class RangerUserStoreRefresher extends Thread { - private static final Log LOG = LogFactory.getLog(RangerUserStoreRefresher.class); - private static final Log PERF_REFRESHER_INIT_LOG = RangerPerfTracer.getPerfLogger("userstore.init"); - - private final RangerUserStoreRetriever userStoreRetriever; - private final RangerUserStoreEnricher userStoreEnricher; - private long lastKnownVersion; - private final BlockingQueue userStoreDownloadQueue; - private long lastActivationTimeInMillis; - - private final String cacheFile; - private boolean hasProvidedUserStoreToReceiver; - private Gson gson; - private RangerRESTClient rangerRESTClient; - - public RangerUserStoreRefresher(RangerUserStoreRetriever userStoreRetriever, RangerUserStoreEnricher userStoreEnricher, - RangerRESTClient restClient, long lastKnownVersion, - BlockingQueue userStoreDownloadQueue, String cacheFile) { - this.userStoreRetriever = userStoreRetriever; - this.userStoreEnricher = userStoreEnricher; - this.rangerRESTClient = restClient; - this.lastKnownVersion = lastKnownVersion; - this.userStoreDownloadQueue = userStoreDownloadQueue; - this.cacheFile = cacheFile; - try { - gson = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").create(); - } catch(Throwable excp) { - LOG.fatal("failed to create GsonBuilder object", excp); - } - setName("RangerUserStoreRefresher(serviceName=" + userStoreRetriever.getServiceName() + ")-" + getId()); - } - - public long getLastActivationTimeInMillis() { - return lastActivationTimeInMillis; - } - - public void setLastActivationTimeInMillis(long lastActivationTimeInMillis) { - this.lastActivationTimeInMillis = lastActivationTimeInMillis; - } - - @Override - public void run() { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefresher().run()"); - } - - while (true) { - DownloadTrigger trigger = null; - - try { - RangerPerfTracer perf = null; - - if(RangerPerfTracer.isPerfTraceEnabled(PERF_REFRESHER_INIT_LOG)) { - perf = RangerPerfTracer.getPerfTracer(PERF_REFRESHER_INIT_LOG, - "RangerUserStoreRefresher.run(lastKnownVersion=" + lastKnownVersion + ")"); - } - trigger = userStoreDownloadQueue.take(); - populateUserStoreInfo(); - - RangerPerfTracer.log(perf); - - } catch (InterruptedException excp) { - LOG.debug("RangerUserStoreRefresher().run() : interrupted! Exiting thread", excp); - break; - } finally { - if (trigger != null) { - trigger.signalCompletion(); - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefresher().run()"); - } - } - - public RangerUserStore populateUserStoreInfo() throws InterruptedException { - - RangerUserStore rangerUserStore = null; - if (userStoreEnricher != null && userStoreRetriever != null) { - try { - rangerUserStore = userStoreRetriever.retrieveUserStoreInfo(lastKnownVersion, lastActivationTimeInMillis); - - if (rangerUserStore == null) { - if (!hasProvidedUserStoreToReceiver) { - rangerUserStore = loadFromCache(); - } - } - - if (rangerUserStore != null) { - userStoreEnricher.setRangerUserStore(rangerUserStore); - if (rangerUserStore.getUserStoreVersion() != -1L) { - saveToCache(rangerUserStore); - } - LOG.info("RangerUserStoreRefresher.populateUserStoreInfo() - Updated userstore-cache to new version, lastKnownVersion=" + lastKnownVersion + "; newVersion=" - + (rangerUserStore.getUserStoreVersion() == null ? -1L : rangerUserStore.getUserStoreVersion())); - hasProvidedUserStoreToReceiver = true; - lastKnownVersion = rangerUserStore.getUserStoreVersion() == null ? -1L : rangerUserStore.getUserStoreVersion(); - setLastActivationTimeInMillis(System.currentTimeMillis()); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerUserStoreRefresher.populateUserStoreInfo() - No need to update userstore-cache. lastKnownVersion=" + lastKnownVersion); - } - } - } catch (RangerServiceNotFoundException snfe) { - LOG.error("Caught ServiceNotFound exception :", snfe); - - // Need to clean up local userstore cache - if (userStoreEnricher.isDisableCacheIfServiceNotFound()) { - disableCache(); - setLastActivationTimeInMillis(System.currentTimeMillis()); - lastKnownVersion = -1L; - } - } catch (InterruptedException interruptedException) { - throw interruptedException; - } catch (Exception e) { - LOG.error("Encountered unexpected exception. Ignoring", e); - } - } else if (rangerRESTClient != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerUserStoreRefresher.populateUserStoreInfo() for Ranger Raz"); - } - try { - rangerUserStore = retrieveUserStoreInfo(); - - if (rangerUserStore == null) { - if (!hasProvidedUserStoreToReceiver) { - rangerUserStore = loadFromCache(); - } - } - - if (rangerUserStore != null) { - - if (rangerUserStore.getUserStoreVersion() != -1L) { - saveToCache(rangerUserStore); - } - LOG.info("RangerUserStoreRefresher.populateUserStoreInfo() - Updated userstore-cache for raz to new version, lastKnownVersion=" + lastKnownVersion + "; newVersion=" - + (rangerUserStore.getUserStoreVersion() == null ? -1L : rangerUserStore.getUserStoreVersion())); - hasProvidedUserStoreToReceiver = true; - lastKnownVersion = rangerUserStore.getUserStoreVersion() == null ? -1L : rangerUserStore.getUserStoreVersion(); - setLastActivationTimeInMillis(System.currentTimeMillis()); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerUserStoreRefresher.populateUserStoreInfo() - No need to update userstore-cache for raz. lastKnownVersion=" + lastKnownVersion); - } - } - }catch (InterruptedException interruptedException) { - throw interruptedException; - } catch (Exception e) { - LOG.error("Encountered unexpected exception. Ignoring", e); - } - } - else { - LOG.error("RangerUserStoreRefresher.populateUserStoreInfo() - no userstore receiver to update userstore-cache"); - } - return rangerUserStore; - } - - public void cleanup() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefresher.cleanup()"); - } - - stopRefresher(); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefresher.cleanup()"); - } - } - - public void startRefresher() { - try { - super.start(); - } catch (Exception excp) { - LOG.error("RangerUserStoreRefresher.startRetriever() - failed to start, exception=" + excp); - } - } - - public void stopRefresher() { - - if (super.isAlive()) { - super.interrupt(); - - boolean setInterrupted = false; - boolean isJoined = false; - - while (!isJoined) { - try { - super.join(); - isJoined = true; - } catch (InterruptedException excp) { - LOG.warn("RangerUserStoreRefresher(): Error while waiting for thread to exit", excp); - LOG.warn("Retrying Thread.join(). Current thread will be marked as 'interrupted' after Thread.join() returns"); - setInterrupted = true; - } - } - if (setInterrupted) { - Thread.currentThread().interrupt(); - } - } - } - - - private RangerUserStore loadFromCache() { - RangerUserStore rangerUserStore = null; - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefreher.loadFromCache()"); - } - - File cacheFile = StringUtils.isEmpty(this.cacheFile) ? null : new File(this.cacheFile); - - if (cacheFile != null && cacheFile.isFile() && cacheFile.canRead()) { - Reader reader = null; - - try { - reader = new FileReader(cacheFile); - - rangerUserStore = gson.fromJson(reader, RangerUserStore.class); - - } catch (Exception excp) { - LOG.error("failed to load userstore information from cache file " + cacheFile.getAbsolutePath(), excp); - } finally { - if (reader != null) { - try { - reader.close(); - } catch (Exception excp) { - LOG.error("error while closing opened cache file " + cacheFile.getAbsolutePath(), excp); - } - } - } - } else { - LOG.warn("cache file does not exist or not readable '" + (cacheFile == null ? null : cacheFile.getAbsolutePath()) + "'"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefreher.loadFromCache()"); - } - - return rangerUserStore; - } - - public void saveToCache(RangerUserStore rangerUserStore) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefreher.saveToCache()"); - } - - if (rangerUserStore != null) { - File cacheFile = StringUtils.isEmpty(this.cacheFile) ? null : new File(this.cacheFile); - - if (cacheFile != null) { - Writer writer = null; - - try { - writer = new FileWriter(cacheFile); - - gson.toJson(rangerUserStore, writer); - } catch (Exception excp) { - LOG.error("failed to save userstore information to cache file '" + cacheFile.getAbsolutePath() + "'", excp); - } finally { - if (writer != null) { - try { - writer.close(); - } catch (Exception excp) { - LOG.error("error while closing opened cache file '" + cacheFile.getAbsolutePath() + "'", excp); - } - } - } - } - } else { - LOG.info("userstore information is null. Nothing to save in cache"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefreher.saveToCache()"); - } - } - - private void disableCache() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefreher.disableCache()"); - } - - File cacheFile = StringUtils.isEmpty(this.cacheFile) ? null : new File(this.cacheFile); - if (cacheFile != null && cacheFile.isFile() && cacheFile.canRead()) { - LOG.warn("Cleaning up local userstore cache"); - String renamedCacheFile = cacheFile.getAbsolutePath() + "_" + System.currentTimeMillis(); - if (!cacheFile.renameTo(new File(renamedCacheFile))) { - LOG.error("Failed to move " + cacheFile.getAbsolutePath() + " to " + renamedCacheFile); - } else { - LOG.warn("moved " + cacheFile.getAbsolutePath() + " to " + renamedCacheFile); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("No local userstore cache found. No need to disable it!"); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefreher.disableCache()"); - } - } - - private RangerUserStore retrieveUserStoreInfo() throws Exception { - - RangerUserStore rangerUserStore = null; - - try { - rangerUserStore = getUserStoreIfUpdated(lastKnownVersion, lastActivationTimeInMillis); - } catch (ClosedByInterruptException closedByInterruptException) { - LOG.error("UserStore-retriever for raz thread was interrupted while blocked on I/O"); - throw new InterruptedException(); - } catch (Exception e) { - LOG.error("UserStore-retriever for raz encounterd exception, exception=", e); - LOG.error("Returning null userstore info"); - } - return rangerUserStore; - } - - private RangerUserStore getUserStoreIfUpdated(long lastKnownUserStoreVersion, long lastActivationTimeInMillis) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerUserStoreRefreher.getUserStoreIfUpdated(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + ")"); - } - - final RangerUserStore ret; - final UserGroupInformation user = MiscUtil.getUGILoginUser(); - final boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled(); - final ClientResponse response; - - Map queryParams = new HashMap(); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_KNOWN_USERSTORE_VERSION, Long.toString(lastKnownUserStoreVersion)); - queryParams.put(RangerRESTUtils.REST_PARAM_LAST_ACTIVATION_TIME, Long.toString(lastActivationTimeInMillis)); - - if (isSecureMode) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking UserStore updated as user : " + user); - } - PrivilegedAction action = new PrivilegedAction() { - public ClientResponse run() { - ClientResponse clientRes = null; - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_SERCURE_GET_USERSTORE; - try { - clientRes = rangerRESTClient.get(relativeURL, queryParams); - } catch (Exception e) { - LOG.error("Failed to get response, Error is : "+e.getMessage()); - } - return clientRes; - } - }; - response = user.doAs(action); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking UserStore updated as user : " + user); - } - String relativeURL = RangerRESTUtils.REST_URL_SERVICE_SERCURE_GET_USERSTORE; - response = rangerRESTClient.get(relativeURL, queryParams); - } - - if (response == null || response.getStatus() == HttpServletResponse.SC_NOT_MODIFIED) { - if (response == null) { - LOG.error("Error getting UserStore; Received NULL response!!. secureMode=" + isSecureMode + ", user=" + user); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - if (LOG.isDebugEnabled()) { - LOG.debug("No change in UserStore. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + resp - + ", " + "lastKnownUserStoreVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - } - } - ret = null; - } else if (response.getStatus() == HttpServletResponse.SC_OK) { - ret = response.getEntity(RangerUserStore.class); - } else if (response.getStatus() == HttpServletResponse.SC_NOT_FOUND) { - ret = null; - LOG.error("Error getting UserStore; service not found. secureMode=" + isSecureMode + ", user=" + user - + ", response=" + response.getStatus() - + ", " + "lastKnownUserStoreVersion=" + lastKnownUserStoreVersion - + ", " + "lastActivationTimeInMillis=" + lastActivationTimeInMillis); - String exceptionMsg = response.hasEntity() ? response.getEntity(String.class) : null; - LOG.warn("Received 404 error code with body:[" + exceptionMsg + "], Ignoring"); - } else { - RESTResponse resp = RESTResponse.fromClientResponse(response); - LOG.warn("Error getting UserStore. secureMode=" + isSecureMode + ", user=" + user + ", response=" + resp); - ret = null; - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerUserStoreRefreher.getUserStoreIfUpdated(" + lastKnownUserStoreVersion + ", " + lastActivationTimeInMillis + "): "); - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRetriever.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRetriever.java deleted file mode 100644 index 94d4d25041..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/contextenricher/RangerUserStoreRetriever.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.contextenricher; - -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.policyengine.RangerPluginContext; -import org.apache.atlas.plugin.util.RangerUserStore; - -import java.util.Map; - -public abstract class RangerUserStoreRetriever { - - protected String serviceName; - protected RangerServiceDef serviceDef; - protected String appId; - protected RangerPluginConfig pluginConfig; - protected RangerPluginContext pluginContext; - - public abstract void init(Map options); - - public abstract RangerUserStore retrieveUserStoreInfo(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception; - - public String getServiceName() { - return serviceName; - } - - public void setServiceName(String serviceName) { - this.serviceName = serviceName; - } - - public RangerServiceDef getServiceDef() { - return serviceDef; - } - - public void setServiceDef(RangerServiceDef serviceDef) { - this.serviceDef = serviceDef; - } - - public String getAppId() { - return appId; - } - - public void setAppId(String appId) { - this.appId = appId; - } - - public void setPluginConfig(RangerPluginConfig pluginConfig) { this.pluginConfig = pluginConfig; } - - public RangerPluginContext getPluginContext() { - return pluginContext; - } - - public void setPluginContext(RangerPluginContext pluginContext) { - this.pluginContext = pluginContext; - } -} \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/AuditFilter.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/AuditFilter.java index 4a1d9ecb76..97281e1c51 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/AuditFilter.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/AuditFilter.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; import javax.xml.bind.annotation.XmlAccessType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java index ea5630744e..8ab39fe7b2 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/GroupInfo.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.util.RangerUserStoreUtil; import javax.xml.bind.annotation.XmlAccessType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java index aba6661426..ef29f1f2e6 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerBaseModelObject.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerMetrics.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerMetrics.java index c6210df5e1..d9ac642fc4 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerMetrics.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerMetrics.java @@ -20,7 +20,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPluginInfo.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPluginInfo.java deleted file mode 100644 index 0e67119453..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPluginInfo.java +++ /dev/null @@ -1,452 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.apache.commons.lang.StringUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.io.Serializable; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -@JsonInclude(JsonInclude.Include.NON_NULL) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class RangerPluginInfo implements Serializable { - private static final long serialVersionUID = 1L; - - public static final int ENTITY_TYPE_POLICIES = 0; - public static final int ENTITY_TYPE_TAGS = 1; - public static final int ENTITY_TYPE_ROLES = 2; - public static final int ENTITY_TYPE_USERSTORE = 3; - - public static final String PLUGIN_INFO_POLICY_DOWNLOAD_TIME = "policyDownloadTime"; - public static final String PLUGIN_INFO_POLICY_DOWNLOADED_VERSION = "policyDownloadedVersion"; - public static final String PLUGIN_INFO_POLICY_ACTIVATION_TIME = "policyActivationTime"; - public static final String PLUGIN_INFO_POLICY_ACTIVE_VERSION = "policyActiveVersion"; - public static final String PLUGIN_INFO_TAG_DOWNLOAD_TIME = "tagDownloadTime"; - public static final String PLUGIN_INFO_TAG_DOWNLOADED_VERSION = "tagDownloadedVersion"; - public static final String PLUGIN_INFO_TAG_ACTIVATION_TIME = "tagActivationTime"; - public static final String PLUGIN_INFO_TAG_ACTIVE_VERSION = "tagActiveVersion"; - - public static final String PLUGIN_INFO_ROLE_DOWNLOAD_TIME = "roleDownloadTime"; - public static final String PLUGIN_INFO_ROLE_DOWNLOADED_VERSION = "roleDownloadedVersion"; - public static final String PLUGIN_INFO_ROLE_ACTIVATION_TIME = "roleActivationTime"; - public static final String PLUGIN_INFO_ROLE_ACTIVE_VERSION = "roleActiveVersion"; - - public static final String PLUGIN_INFO_USERSTORE_DOWNLOAD_TIME = "userstoreDownloadTime"; - public static final String PLUGIN_INFO_USERSTORE_DOWNLOADED_VERSION = "userstoreDownloadedVersion"; - public static final String PLUGIN_INFO_USERSTORE_ACTIVATION_TIME = "userstoreActivationTime"; - public static final String PLUGIN_INFO_USERSTORE_ACTIVE_VERSION = "userstoreActiveVersion"; - - public static final String RANGER_ADMIN_LAST_POLICY_UPDATE_TIME = "lastPolicyUpdateTime"; - public static final String RANGER_ADMIN_LATEST_POLICY_VERSION = "latestPolicyVersion"; - public static final String RANGER_ADMIN_LAST_TAG_UPDATE_TIME = "lastTagUpdateTime"; - public static final String RANGER_ADMIN_LATEST_TAG_VERSION = "latestTagVersion"; - - public static final String RANGER_ADMIN_CAPABILITIES = "adminCapabilities"; - public static final String PLUGIN_INFO_CAPABILITIES = "pluginCapabilities"; - - private Long id; - private Date createTime; - private Date updateTime; - - private String serviceName; - private String serviceDisplayName; - private String serviceType; - private String serviceTypeDisplayName; - private String hostName; - private String appType; - private String ipAddress; - private Map info; - - //FIXME UNUSED - public RangerPluginInfo(Long id, Date createTime, Date updateTime, String serviceName, String appType, String hostName, String ipAddress, Map info) { - super(); - - setId(id); - setCreateTime(createTime); - setUpdateTime(updateTime); - setServiceName(serviceName); - setAppType(appType); - setHostName(hostName); - setIpAddress(ipAddress); - setInfo(info); - } - - //FIXME UNUSED - public RangerPluginInfo() { - this(null, null, null, null, null, null, null, null); - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getServiceType() { - return serviceType; - } - - public void setServiceType(String serviceType) { - this.serviceType = serviceType; - } - - public String getServiceTypeDisplayName() { - return serviceTypeDisplayName; - } - - public void setServiceTypeDisplayName(String serviceTypeDisplayName) { - this.serviceTypeDisplayName = serviceTypeDisplayName; - } - - public String getServiceName() { - return serviceName; - } - - public void setServiceName(String serviceName) { - this.serviceName = serviceName; - } - - public String getServiceDisplayName() { - return serviceDisplayName; - } - - public void setServiceDisplayName(String serviceDisplayName) { - this.serviceDisplayName = serviceDisplayName; - } - - public String getHostName() { - return hostName; - } - - public void setHostName(String hostName) { - this.hostName = hostName; - } - - public String getAppType() { - return appType; - } - - public void setAppType(String appType) { - this.appType = appType; - } - - public String getIpAddress() { - return ipAddress; - } - - public void setIpAddress(String ipAddress) { - this.ipAddress = ipAddress; - } - - public Map getInfo() { - return info; - } - - public void setInfo(Map info) { - this.info = info == null ? new HashMap() : info; - } - - @JsonIgnore - public void setPolicyDownloadTime(Long policyDownloadTime) { - getInfo().put(PLUGIN_INFO_POLICY_DOWNLOAD_TIME, policyDownloadTime == null ? null : Long.toString(policyDownloadTime)); - } - - @JsonIgnore - public Long getPolicyDownloadTime() { - String downloadTimeString = getInfo().get(PLUGIN_INFO_POLICY_DOWNLOAD_TIME); - return StringUtils.isNotBlank(downloadTimeString) ? Long.valueOf(downloadTimeString) : null; - } - - @JsonIgnore - public void setPolicyDownloadedVersion(Long policyDownloadedVersion) { - getInfo().put(PLUGIN_INFO_POLICY_DOWNLOADED_VERSION, policyDownloadedVersion == null ? null : Long.toString(policyDownloadedVersion)); - } - - @JsonIgnore - public Long getPolicyDownloadedVersion() { - String downloadedVersionString = getInfo().get(PLUGIN_INFO_POLICY_DOWNLOADED_VERSION); - return StringUtils.isNotBlank(downloadedVersionString) ? Long.valueOf(downloadedVersionString) : null; - } - - @JsonIgnore - public void setPolicyActivationTime(Long policyActivationTime) { - getInfo().put(PLUGIN_INFO_POLICY_ACTIVATION_TIME, policyActivationTime == null ? null : Long.toString(policyActivationTime)); - } - - @JsonIgnore - public Long getPolicyActivationTime() { - String activationTimeString = getInfo().get(PLUGIN_INFO_POLICY_ACTIVATION_TIME); - return StringUtils.isNotBlank(activationTimeString) ? Long.valueOf(activationTimeString) : null; - } - - @JsonIgnore - public void setPolicyActiveVersion(Long policyActiveVersion) { - getInfo().put(PLUGIN_INFO_POLICY_ACTIVE_VERSION, policyActiveVersion == null ? null : Long.toString(policyActiveVersion)); - } - - @JsonIgnore - public Long getPolicyActiveVersion() { - String activeVersionString = getInfo().get(PLUGIN_INFO_POLICY_ACTIVE_VERSION); - return StringUtils.isNotBlank(activeVersionString) ? Long.valueOf(activeVersionString) : null; - } - - @JsonIgnore - public void setTagDownloadTime(Long tagDownloadTime) { - getInfo().put(PLUGIN_INFO_TAG_DOWNLOAD_TIME, tagDownloadTime == null ? null : Long.toString(tagDownloadTime)); - } - - @JsonIgnore - public Long getTagDownloadTime() { - String downloadTimeString = getInfo().get(PLUGIN_INFO_TAG_DOWNLOAD_TIME); - return StringUtils.isNotBlank(downloadTimeString) ? Long.valueOf(downloadTimeString) : null; - } - - @JsonIgnore - public void setTagDownloadedVersion(Long tagDownloadedVersion) { - getInfo().put(PLUGIN_INFO_TAG_DOWNLOADED_VERSION, tagDownloadedVersion == null ? null : Long.toString(tagDownloadedVersion)); - } - - @JsonIgnore - public Long getTagDownloadedVersion() { - String downloadedVersion = getInfo().get(PLUGIN_INFO_TAG_DOWNLOADED_VERSION); - return StringUtils.isNotBlank(downloadedVersion) ? Long.valueOf(downloadedVersion) : null; - } - - @JsonIgnore - public void setTagActivationTime(Long tagActivationTime) { - getInfo().put(PLUGIN_INFO_TAG_ACTIVATION_TIME, tagActivationTime == null ? null : Long.toString(tagActivationTime)); - } - - @JsonIgnore - public Long getTagActivationTime() { - String activationTimeString = getInfo().get(PLUGIN_INFO_TAG_ACTIVATION_TIME); - return StringUtils.isNotBlank(activationTimeString) ? Long.valueOf(activationTimeString) : null; - } - - @JsonIgnore - public void setTagActiveVersion(Long tagActiveVersion) { - getInfo().put(PLUGIN_INFO_TAG_ACTIVE_VERSION, tagActiveVersion == null ? null : Long.toString(tagActiveVersion)); - } - - @JsonIgnore - public Long getTagActiveVersion() { - String activeVersionString = getInfo().get(PLUGIN_INFO_TAG_ACTIVE_VERSION); - return StringUtils.isNotBlank(activeVersionString) ? Long.valueOf(activeVersionString) : null; - } - - @JsonIgnore - public Long getLatestPolicyVersion() { - String latestPolicyVersionString = getInfo().get(RANGER_ADMIN_LATEST_POLICY_VERSION); - return StringUtils.isNotBlank(latestPolicyVersionString) ? Long.valueOf(latestPolicyVersionString) : null; - } - - @JsonIgnore - public Long getLastPolicyUpdateTime() { - String updateTimeString = getInfo().get(RANGER_ADMIN_LAST_POLICY_UPDATE_TIME); - return StringUtils.isNotBlank(updateTimeString) ? Long.valueOf(updateTimeString) : null; - } - - @JsonIgnore - public Long getLatestTagVersion() { - String latestTagVersionString = getInfo().get(RANGER_ADMIN_LATEST_TAG_VERSION); - return StringUtils.isNotBlank(latestTagVersionString) ? Long.valueOf(latestTagVersionString) : null; - } - - @JsonIgnore - public Long getLastTagUpdateTime() { - String updateTimeString = getInfo().get(RANGER_ADMIN_LAST_TAG_UPDATE_TIME); - return StringUtils.isNotBlank(updateTimeString) ? Long.valueOf(updateTimeString) : null; - } - - @JsonIgnore - public void setRoleDownloadTime(Long roleDownloadTime) { - getInfo().put(PLUGIN_INFO_ROLE_DOWNLOAD_TIME, roleDownloadTime == null ? null : Long.toString(roleDownloadTime)); - } - - @JsonIgnore - public Long getRoleDownloadTime() { - String downloadTimeString = getInfo().get(PLUGIN_INFO_ROLE_DOWNLOAD_TIME); - return StringUtils.isNotBlank(downloadTimeString) ? Long.valueOf(downloadTimeString) : null; - } - - @JsonIgnore - public void setRoleDownloadedVersion(Long roleDownloadedVersion) { - getInfo().put(PLUGIN_INFO_ROLE_DOWNLOADED_VERSION, roleDownloadedVersion == null ? null : Long.toString(roleDownloadedVersion)); - } - - @JsonIgnore - public Long getRoleDownloadedVersion() { - String downloadedVersionString = getInfo().get(PLUGIN_INFO_ROLE_DOWNLOADED_VERSION); - return StringUtils.isNotBlank(downloadedVersionString) ? Long.valueOf(downloadedVersionString) : null; - } - - @JsonIgnore - public void setRoleActivationTime(Long roleActivationTime) { - getInfo().put(PLUGIN_INFO_ROLE_ACTIVATION_TIME, roleActivationTime == null ? null : Long.toString(roleActivationTime)); - } - - @JsonIgnore - public Long getRoleActivationTime() { - String activationTimeString = getInfo().get(PLUGIN_INFO_ROLE_ACTIVATION_TIME); - return StringUtils.isNotBlank(activationTimeString) ? Long.valueOf(activationTimeString) : null; - } - - @JsonIgnore - public void setRoleActiveVersion(Long roleActiveVersion) { - getInfo().put(PLUGIN_INFO_ROLE_ACTIVE_VERSION, roleActiveVersion == null ? null : Long.toString(roleActiveVersion)); - } - - @JsonIgnore - public Long getRoleActiveVersion() { - String activeVersionString = getInfo().get(PLUGIN_INFO_POLICY_ACTIVE_VERSION); - return StringUtils.isNotBlank(activeVersionString) ? Long.valueOf(activeVersionString) : null; - } - - @JsonIgnore - public void setUserStoreDownloadTime(Long userstoreDownloadTime) { - getInfo().put(PLUGIN_INFO_USERSTORE_DOWNLOAD_TIME, userstoreDownloadTime == null ? null : Long.toString(userstoreDownloadTime)); - } - - @JsonIgnore - public Long getUserStoreDownloadTime() { - String downloadTimeString = getInfo().get(PLUGIN_INFO_USERSTORE_DOWNLOAD_TIME); - return StringUtils.isNotBlank(downloadTimeString) ? Long.valueOf(downloadTimeString) : null; - } - - @JsonIgnore - public void setUserStoreDownloadedVersion(Long userstoreDownloadedVersion) { - getInfo().put(PLUGIN_INFO_USERSTORE_DOWNLOADED_VERSION, userstoreDownloadedVersion == null ? null : Long.toString(userstoreDownloadedVersion)); - } - - @JsonIgnore - public Long getUserStoreDownloadedVersion() { - String downloadedVersionString = getInfo().get(PLUGIN_INFO_USERSTORE_DOWNLOADED_VERSION); - return StringUtils.isNotBlank(downloadedVersionString) ? Long.valueOf(downloadedVersionString) : null; - } - - @JsonIgnore - public void setUserStoreActivationTime(Long userstoreActivationTime) { - getInfo().put(PLUGIN_INFO_USERSTORE_ACTIVATION_TIME, userstoreActivationTime == null ? null : Long.toString(userstoreActivationTime)); - } - - @JsonIgnore - public Long getUserStoreActivationTime() { - String activationTimeString = getInfo().get(PLUGIN_INFO_USERSTORE_ACTIVATION_TIME); - return StringUtils.isNotBlank(activationTimeString) ? Long.valueOf(activationTimeString) : null; - } - - @JsonIgnore - public void setUserStoreActiveVersion(Long userstoreActiveVersion) { - getInfo().put(PLUGIN_INFO_USERSTORE_ACTIVE_VERSION, userstoreActiveVersion == null ? null : Long.toString(userstoreActiveVersion)); - } - - @JsonIgnore - public Long getUserStoreActiveVersion() { - String activeVersionString = getInfo().get(PLUGIN_INFO_USERSTORE_ACTIVE_VERSION); - return StringUtils.isNotBlank(activeVersionString) ? Long.valueOf(activeVersionString) : null; - } - - @JsonIgnore - public void setPluginCapabilities(String capabilities) { - setCapabilities(PLUGIN_INFO_CAPABILITIES, capabilities); - } - - @JsonIgnore - public String getPluginCapabilities() { - return getCapabilities(PLUGIN_INFO_CAPABILITIES); - } - - @JsonIgnore - public void setAdminCapabilities(String capabilities) { - setCapabilities(RANGER_ADMIN_CAPABILITIES, capabilities); - } - - @JsonIgnore - public String getAdminCapabilities() { - return getCapabilities(RANGER_ADMIN_CAPABILITIES); - } - - @JsonIgnore - private void setCapabilities(String optionName, String capabilities) { - getInfo().put(optionName, capabilities == null ? null : capabilities); - } - - @JsonIgnore - private String getCapabilities(String optionName) { - String capabilitiesString = getInfo().get(optionName); - return StringUtils.isNotBlank(capabilitiesString) ? capabilitiesString : null; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - - toString(sb); - - return sb.toString(); - } - - public StringBuilder toString(StringBuilder sb) { - sb.append("RangerPluginInfo={"); - - sb.append("id={").append(id).append("} "); - sb.append("createTime={").append(createTime).append("} "); - sb.append("updateTime={").append(updateTime).append("} "); - sb.append("serviceName={").append(serviceName).append("} "); - sb.append("serviceType={").append(serviceType).append("} "); - sb.append("serviceTypeDisplayName{").append(serviceTypeDisplayName).append("} "); - sb.append("serviceDisplayName={").append(serviceDisplayName).append("} "); - sb.append("hostName={").append(hostName).append("} "); - sb.append("appType={").append(appType).append("} "); - sb.append("ipAddress={").append(ipAddress).append("} "); - sb.append("info={").append(info).append("} "); - - sb.append(" }"); - - return sb; - } -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java index f81f8e2854..ba2f210b97 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicy.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.model; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.commons.collections.CollectionUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -32,7 +32,7 @@ import java.util.Map; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerPolicy extends RangerBaseModelObject implements java.io.Serializable { @@ -678,7 +678,7 @@ public int compare(RangerPolicy me, RangerPolicy other) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyResource implements java.io.Serializable { @@ -844,7 +844,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyItem implements java.io.Serializable { @@ -1135,7 +1135,7 @@ public boolean equals(Object obj) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerDataMaskPolicyItem extends RangerPolicyItem implements java.io.Serializable { @@ -1220,7 +1220,7 @@ public StringBuilder toString(StringBuilder sb) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerRowFilterPolicyItem extends RangerPolicyItem implements java.io.Serializable { @@ -1305,7 +1305,7 @@ public StringBuilder toString(StringBuilder sb) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyItemAccess implements java.io.Serializable { @@ -1407,7 +1407,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyItemCondition implements java.io.Serializable { @@ -1523,7 +1523,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyItemDataMaskInfo implements java.io.Serializable { @@ -1630,7 +1630,7 @@ public StringBuilder toString(StringBuilder sb) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyItemRowFilterInfo implements java.io.Serializable { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyDelta.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyDelta.java index adfe24af1d..c5eba66a5b 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyDelta.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyDelta.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonIgnore; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -34,12 +34,6 @@ public class RangerPolicyDelta implements java.io.Serializable { public static final int CHANGE_TYPE_POLICY_CREATE = 0; public static final int CHANGE_TYPE_POLICY_UPDATE = 1; public static final int CHANGE_TYPE_POLICY_DELETE = 2; - public static final int CHANGE_TYPE_SERVICE_CHANGE = 3; - public static final int CHANGE_TYPE_SERVICE_DEF_CHANGE = 4; - public static final int CHANGE_TYPE_RANGER_ADMIN_START = 5; - public static final int CHANGE_TYPE_LOG_ERROR = 6; - public static final int CHANGE_TYPE_INVALIDATE_POLICY_DELTAS = 7; - public static final int CHANGE_TYPE_ROLE_UPDATE = 8; private static String[] changeTypeNames = { "POLICY_CREATE", "POLICY_UPDATE", "POLICY_DELETE", "SERVICE_CHANGE", "SERVICE_DEF_CHANGE", "RANGER_ADMIN_START", "LOG_ERROR", "INVALIDATE_POLICY_DELTAS", "ROLE_UPDATE" }; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyResourceSignature.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyResourceSignature.java index 5cfcdd1fed..10f220e43f 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyResourceSignature.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerPolicyResourceSignature.java @@ -24,7 +24,7 @@ import org.apache.commons.collections.MapUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerAdminConfig; +import org.apache.atlas.authorization.config.RangerAdminConfig; import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItemCondition; import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; import org.apache.commons.lang.StringUtils; @@ -57,24 +57,6 @@ public RangerPolicyResourceSignature(RangerPolicy policy) { } } - /** - * Only added for testability. Do not make public - * @param string - */ - RangerPolicyResourceSignature(String string) { - _policy = null; - if (string == null) { - _string = ""; - } else { - _string = string; - } - if (RangerAdminConfig.getInstance().isFipsEnabled()) { - _hash = DigestUtils.sha384Hex(_string); - } else { - _hash = DigestUtils.sha256Hex(_string); - } - } - String asString() { return _string; } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java index 1ae6970dbe..8e47316240 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerRole.java @@ -20,7 +20,7 @@ package org.apache.atlas.plugin.model; import org.apache.commons.collections.MapUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerService.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerService.java index 99e6de6e7d..ed64ed5686 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerService.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerService.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java index 4c5d8988d0..f7d331f44a 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceDef.java @@ -19,7 +19,8 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -33,7 +34,7 @@ import java.util.Set; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerServiceDef extends RangerBaseModelObject implements java.io.Serializable { @@ -529,7 +530,7 @@ public StringBuilder toString(StringBuilder sb) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerEnumDef implements java.io.Serializable { @@ -695,7 +696,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerEnumElementDef implements java.io.Serializable { @@ -842,7 +843,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerServiceConfigDef implements java.io.Serializable { @@ -1228,7 +1229,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerResourceDef implements java.io.Serializable { @@ -1801,7 +1802,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerAccessTypeDef implements java.io.Serializable { @@ -2001,7 +2002,7 @@ public boolean equals(Object obj) { } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerPolicyConditionDef implements java.io.Serializable { @@ -2354,7 +2355,7 @@ public boolean equals(Object obj) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerContextEnricherDef implements java.io.Serializable { @@ -2503,7 +2504,7 @@ public boolean equals(Object obj) { - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerDataMaskDef implements java.io.Serializable { @@ -2678,7 +2679,7 @@ public boolean equals(Object obj) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerDataMaskTypeDef implements java.io.Serializable { @@ -2927,7 +2928,7 @@ public boolean equals(Object obj) { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RangerRowFilterDef implements java.io.Serializable { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceResource.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceResource.java index 4adf098fec..80886ff614 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceResource.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerServiceResource.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTag.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTag.java index 252d21d2d2..2acd4f0e29 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTag.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTag.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagDef.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagDef.java index 849fac1a0a..f8c587cc2a 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagDef.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagDef.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagResourceMap.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagResourceMap.java index a68e33e11b..8d2b2f4897 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagResourceMap.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerTagResourceMap.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java index 449a3d356f..eb4a6c6542 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/RangerValidityRecurrence.java @@ -19,20 +19,20 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import java.io.Serializable; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerValidityRecurrence implements Serializable { - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class ValidityInterval { @@ -74,7 +74,7 @@ public String toString() { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class RecurrenceSchedule { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/ServiceDeleteResponse.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/ServiceDeleteResponse.java deleted file mode 100644 index ee574f49d9..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/ServiceDeleteResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model; - -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; - -@JsonInclude(JsonInclude.Include.NON_NULL) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class ServiceDeleteResponse implements java.io.Serializable { - /** - * - */ - private static final long serialVersionUID = 1L; - - private String serviceName; - private Long serviceId; - private Boolean isDeleted; - private String errorMsg; - - public ServiceDeleteResponse(Long serviceId) { - this.serviceId = serviceId; - } - - public String getServiceName() { - return serviceName; - } - public void setServiceName(String serviceName) { - this.serviceName = serviceName; - } - public Long getServiceId() { - return serviceId; - } - public void setServiceId(Long serviceId) { - this.serviceId = serviceId; - } - public Boolean getIsDeleted() { - return isDeleted; - } - public void setIsDeleted(Boolean isDeleted) { - this.isDeleted = isDeleted; - } - public String getErrorMsg() { - return errorMsg; - } - public void setErrorMsg(String errorMsg) { - this.errorMsg = errorMsg; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java index 4a690a0a29..c8859bff4f 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/UserInfo.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.model; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.util.RangerUserStoreUtil; import javax.xml.bind.annotation.XmlAccessType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerPolicyValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerPolicyValidator.java deleted file mode 100644 index a0594aa127..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerPolicyValidator.java +++ /dev/null @@ -1,1062 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerPolicy.RangerDataMaskPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItemAccess; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; -import org.apache.atlas.plugin.model.RangerPolicy.RangerRowFilterPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicyResourceSignature; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerAccessTypeDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; -import org.apache.atlas.plugin.model.RangerValiditySchedule; -import org.apache.atlas.plugin.store.ServiceStore; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class RangerPolicyValidator extends RangerValidator { - - private static final Log LOG = LogFactory.getLog(RangerPolicyValidator.class); - - public RangerPolicyValidator(ServiceStore store) { - super(store); - } - - public void validate(RangerPolicy policy, Action action, boolean isAdmin) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validate(%s, %s, %s)", policy, action, isAdmin)); - } - - List failures = new ArrayList<>(); - boolean valid = isValid(policy, action, isAdmin, failures); - String message = ""; - try { - if (!valid) { - message = serializeFailures(failures); - throw new Exception(message); - } - } finally { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validate(%s, %s, %s): %s, reason[%s]", policy, action, isAdmin, valid, message)); - } - } - } - - @Override - boolean isValid(Long id, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", id, action, failures)); - } - - boolean valid = true; - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_UNSUPPORTED_ACTION; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (id == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .becauseOf("policy id was null/missing") - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage("id")) - .build()); - valid = false; - } else if (policyExists(id)) { - if (LOG.isDebugEnabled()) { - LOG.debug("No policy found for id[" + id + "]! ok!"); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s): %s", id, action, failures, valid)); - } - return valid; - } - - boolean isValid(RangerPolicy policy, Action action, boolean isAdmin, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s, %s)", policy, action, isAdmin, failures)); - } - - if (!(action == Action.CREATE || action == Action.UPDATE)) { - throw new IllegalArgumentException("isValid(RangerPolicy, ...) is only supported for create/update"); - } - boolean valid = true; - if (policy == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_NULL_POLICY_OBJECT; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - Integer priority = policy.getPolicyPriority(); - if (priority != null) { - if (priority < RangerPolicy.POLICY_PRIORITY_NORMAL || priority > RangerPolicy.POLICY_PRIORITY_OVERRIDE) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_INVALID_PRIORITY; - failures.add(new ValidationFailureDetailsBuilder() - .field("policyPriority") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage("out of range")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - Long id = policy.getId(); - RangerPolicy existingPolicy = null; - - if (action == Action.UPDATE) { // id is ignored for CREATE - if (id == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .becauseOf(error.getMessage("id")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - - existingPolicy = getPolicy(id); - - if (existingPolicy == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_POLICY_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(id)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - String policyName = policy.getName(); - String serviceName = policy.getService(); - String policyServicetype = policy.getServiceType(); - String zoneName = policy.getZoneName(); - - RangerService service = null; - RangerSecurityZone zone = null; - boolean serviceNameValid = false; - if (StringUtils.isBlank(serviceName)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("service name") - .isMissing() - .becauseOf(error.getMessage("service name")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - service = getService(serviceName); - if (service == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_SERVICE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("service name") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(serviceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - serviceNameValid = true; - - String serviceType = service.getType(); - - if (StringUtils.isNotEmpty(serviceType) && StringUtils.isNotEmpty(policyServicetype)) { - if (!serviceType.equalsIgnoreCase(policyServicetype)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_SERVICE_TYPE; - - failures.add(new ValidationFailureDetailsBuilder() - .field("service type") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(policyServicetype,serviceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - - if (StringUtils.isNotEmpty(zoneName)) { - zone = getSecurityZone(zoneName); - if (zone == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_NONEXISTANT_ZONE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("zoneName") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(id, zoneName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - List tagSvcList = zone.getTagServices(); - Set svcNameSet = zone.getServices().keySet(); - if(!svcNameSet.contains(serviceName) && !tagSvcList.contains(serviceName)){ - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_SERVICE_NOT_ASSOCIATED_TO_ZONE; - failures.add(new ValidationFailureDetailsBuilder() - .field("zoneName") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(serviceName, zoneName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - - if (StringUtils.isBlank(policyName)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isMissing() - .becauseOf(error.getMessage("name")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - if (service != null && (StringUtils.isEmpty(zoneName) || zone != null)) { - Long zoneId = zone != null ? zone.getId() : RangerSecurityZone.RANGER_UNZONED_SECURITY_ZONE_ID; - Long policyId = getPolicyId(service.getId(), policyName, zoneId); - - if (policyId != null) { - if (action == Action.CREATE) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_NAME_CONFLICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy name") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(policyId, serviceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (!policyId.equals(id)) { // action == UPDATE - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_NAME_CONFLICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("id/name") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(policyId, serviceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - - if(existingPolicy != null) { - if (!StringUtils.equalsIgnoreCase(existingPolicy.getService(), policy.getService())) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_UPDATE_MOVE_SERVICE_NOT_ALLOWED; - failures.add(new ValidationFailureDetailsBuilder() - .field("service name") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(policy.getId(), existingPolicy.getService(), policy.getService())) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - - String existingPolicyType = StringUtils.isEmpty(existingPolicy.getPolicyType()) ? RangerPolicy.POLICY_TYPE_ACCESS : existingPolicy.getPolicyType(); - String policyType = StringUtils.isEmpty(policy.getPolicyType()) ? RangerPolicy.POLICY_TYPE_ACCESS : policy.getPolicyType(); - - if (existingPolicyType != policyType) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_TYPE_CHANGE_NOT_ALLOWED; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy type") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(policy.getId(), existingPolicyType, policyType)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - - String existingZoneName = existingPolicy.getZoneName(); - - if (StringUtils.isNotEmpty(zoneName) || StringUtils.isNotEmpty(existingZoneName)) { - if (!StringUtils.equals(existingZoneName, zoneName)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_UPDATE_ZONE_NAME_NOT_ALLOWED; - failures.add(new ValidationFailureDetailsBuilder() - .field("zoneName") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(existingZoneName, zoneName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - - boolean isAuditEnabled = getIsAuditEnabled(policy); - String serviceDefName = null; - RangerServiceDef serviceDef = null; - int policyItemsCount = 0; - - String policyType = StringUtils.isEmpty(policy.getPolicyType()) ? RangerPolicy.POLICY_TYPE_ACCESS : policy.getPolicyType(); - switch (policyType) { - case RangerPolicy.POLICY_TYPE_DATAMASK: - if (CollectionUtils.isNotEmpty(policy.getDataMaskPolicyItems())) { - policyItemsCount += policy.getDataMaskPolicyItems().size(); - } - break; - case RangerPolicy.POLICY_TYPE_ROWFILTER: - if (CollectionUtils.isNotEmpty(policy.getRowFilterPolicyItems())) { - policyItemsCount += policy.getRowFilterPolicyItems().size(); - } - break; - default: - if (CollectionUtils.isNotEmpty(policy.getPolicyItems())){ - policyItemsCount += policy.getPolicyItems().size(); - } - if(CollectionUtils.isNotEmpty(policy.getDenyPolicyItems())) { - policyItemsCount += policy.getDenyPolicyItems().size(); - } - break; - } - - if (policyItemsCount == 0 && !isAuditEnabled) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_POLICY_ITEMS; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy items") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (service != null) { - serviceDefName = service.getType(); - serviceDef = getServiceDef(serviceDefName); - if (serviceDef == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_SERVICE_DEF; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy service def") - .isAnInternalError() - .becauseOf(error.getMessage(serviceDefName, serviceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - if (Boolean.TRUE.equals(policy.getIsDenyAllElse())) { - if (CollectionUtils.isNotEmpty(policy.getDenyPolicyItems()) || CollectionUtils.isNotEmpty(policy.getDenyExceptions())) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_UNSUPPORTED_POLICY_ITEM_TYPE; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy items") - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - valid = isValidPolicyItems(policy.getPolicyItems(), failures, serviceDef) && valid; - valid = isValidPolicyItems(policy.getDenyPolicyItems(), failures, serviceDef) && valid; - valid = isValidPolicyItems(policy.getAllowExceptions(), failures, serviceDef) && valid; - valid = isValidPolicyItems(policy.getDenyExceptions(), failures, serviceDef) && valid; - } - } - - if (serviceNameValid) { // resource checks can't be done meaningfully otherwise - valid = isValidValiditySchedule(policy, failures, action) && valid; - valid = isValidResources(policy, failures, action, isAdmin, serviceDef) && valid; - valid = isValidAccessTypeDef(policy, failures, action, isAdmin, serviceDef) && valid; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s, %s): %s", policy, action, isAdmin, failures, valid)); - } - return valid; - } - - boolean isValidAccessTypeDef(RangerPolicy policy, final List failures, Action action,boolean isAdmin, final RangerServiceDef serviceDef) { - boolean valid = true; - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidAccessTypeDef(%s, %s, %s,%s,%s)", policy, failures, action,isAdmin,serviceDef)); - } - String policyType = StringUtils.isEmpty(policy.getPolicyType()) ? RangerPolicy.POLICY_TYPE_ACCESS : policy.getPolicyType(); - //row filter policy - if (policyType==RangerPolicy.POLICY_TYPE_ROWFILTER){ - List rowFilterAccessTypeDefNames=new ArrayList(); - if(serviceDef!=null && serviceDef.getRowFilterDef()!=null){ - if(!CollectionUtils.isEmpty(serviceDef.getRowFilterDef().getAccessTypes())){ - for(RangerAccessTypeDef rangerAccessTypeDef:serviceDef.getRowFilterDef().getAccessTypes()){ - rowFilterAccessTypeDefNames.add(rangerAccessTypeDef.getName().toLowerCase()); - } - } - } - - if(!CollectionUtils.isEmpty(policy.getRowFilterPolicyItems())){ - for(RangerRowFilterPolicyItem rangerRowFilterPolicyItem:policy.getRowFilterPolicyItems()){ - if(!CollectionUtils.isEmpty(rangerRowFilterPolicyItem.getAccesses())){ - for(RangerPolicyItemAccess rangerPolicyItemAccess : rangerRowFilterPolicyItem.getAccesses()){ - if(!rowFilterAccessTypeDefNames.contains(rangerPolicyItemAccess.getType().toLowerCase())){ - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_ITEM_ACCESS_TYPE_INVALID; - failures.add(new ValidationFailureDetailsBuilder() - .field("row filter policy item access type") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(rangerPolicyItemAccess.getType(), rowFilterAccessTypeDefNames)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - } - } - //data mask policy - if (policyType==RangerPolicy.POLICY_TYPE_DATAMASK){ - List dataMaskAccessTypeDefNames=new ArrayList(); - if(serviceDef!=null && serviceDef.getDataMaskDef()!=null){ - if(!CollectionUtils.isEmpty(serviceDef.getDataMaskDef().getAccessTypes())){ - for(RangerAccessTypeDef rangerAccessTypeDef:serviceDef.getDataMaskDef().getAccessTypes()){ - dataMaskAccessTypeDefNames.add(rangerAccessTypeDef.getName().toLowerCase()); - } - } - } - - if(!CollectionUtils.isEmpty(policy.getDataMaskPolicyItems())){ - for(RangerDataMaskPolicyItem rangerDataMaskPolicyItem:policy.getDataMaskPolicyItems()){ - if(!CollectionUtils.isEmpty(rangerDataMaskPolicyItem.getAccesses())){ - for(RangerPolicyItemAccess rangerPolicyItemAccess : rangerDataMaskPolicyItem.getAccesses()){ - if(!dataMaskAccessTypeDefNames.contains(rangerPolicyItemAccess.getType().toLowerCase())){ - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_ITEM_ACCESS_TYPE_INVALID; - failures.add(new ValidationFailureDetailsBuilder() - .field("data masking policy item access type") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(rangerPolicyItemAccess.getType(), dataMaskAccessTypeDefNames)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidAccessTypeDef(%s, %s, %s,%s,%s)", policy, failures, action,isAdmin,serviceDef)); - } - return valid; - } - - boolean isValidResources(RangerPolicy policy, final List failures, Action action, - boolean isAdmin, final RangerServiceDef serviceDef) { - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidResources(%s, %s, %s, %s, %s)", policy, failures, action, isAdmin, serviceDef)); - } - - boolean valid = true; - Map resourceMap = policy.getResources(); - if (resourceMap != null) { // following checks can't be done meaningfully otherwise - valid = isPolicyResourceUnique(policy, failures, action) && valid; - if (serviceDef != null) { // following checks can't be done meaningfully otherwise - valid = isValidResourceNames(policy, failures, serviceDef) && valid; - valid = isValidResourceValues(resourceMap, failures, serviceDef) && valid; - valid = isValidResourceFlags(resourceMap, failures, serviceDef.getResources(), serviceDef.getName(), policy.getName(), isAdmin) && valid; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidResources(%s, %s, %s, %s, %s): %s", policy, failures, action, isAdmin, serviceDef, valid)); - } - return valid; - } - - boolean isValidValiditySchedule(RangerPolicy policy, final List failures, Action action) { - - boolean valid = true; - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidValiditySchedule(%s, %s, %s)", policy, failures, action)); - } - List validitySchedules = policy.getValiditySchedules(); - List normalizedValiditySchedules = null; - - for (RangerValiditySchedule entry : validitySchedules) { - RangerValidityScheduleValidator validator = new RangerValidityScheduleValidator(entry); - - RangerValiditySchedule normalizedValiditySchedule = validator.validate(failures); - if (normalizedValiditySchedule == null) { - valid = false; - if (LOG.isDebugEnabled()) { - LOG.debug("Invalid Validity-Schedule:[" + entry +"]"); - } - } else { - if (normalizedValiditySchedules == null) { - normalizedValiditySchedules = new ArrayList<>(); - } - normalizedValiditySchedules.add(normalizedValiditySchedule); - } - } - if (valid && CollectionUtils.isNotEmpty(normalizedValiditySchedules)) { - policy.setValiditySchedules(normalizedValiditySchedules); - } - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidValiditySchedule(%s, %s, %s): %s", policy, failures, action, valid)); - } - return valid; - } - - boolean isPolicyResourceUnique(RangerPolicy policy, final List failures, Action action) { - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isPolicyResourceUnique(%s, %s, %s)", policy, failures, action)); - } - - boolean valid = true; - if (!Boolean.TRUE.equals(policy.getIsEnabled())) { - LOG.debug("Policy is disabled. Skipping resource uniqueness validation."); - } else { - RangerPolicyResourceSignature policySignature = _factory.createPolicyResourceSignature(policy); - String signature = policySignature.getSignature(); - List policies = getPoliciesForResourceSignature(policy.getService(), signature); - if (CollectionUtils.isNotEmpty(policies)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_DUPLICATE_POLICY_RESOURCE; - RangerPolicy matchedPolicy = policies.iterator().next(); - // there shouldn't be a matching policy for create. During update only match should be to itself - if (action == Action.CREATE || (action == Action.UPDATE && (policies.size() > 1 || !matchedPolicy.getId().equals(policy.getId())))) { - failures.add(new ValidationFailureDetailsBuilder() - .field("resources") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(matchedPolicy.getName(), policy.getService())) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isPolicyResourceUnique(%s, %s, %s): %s", policy, failures, action, valid)); - } - return valid; - } - - boolean isValidResourceNames(final RangerPolicy policy, final List failures, final RangerServiceDef serviceDef) { - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidResourceNames(%s, %s, %s)", policy, failures, serviceDef)); - } - - boolean valid = true; - convertPolicyResourceNamesToLower(policy); - Set policyResources = policy.getResources().keySet(); - - RangerServiceDefHelper defHelper = new RangerServiceDefHelper(serviceDef); - Set> hierarchies = defHelper.getResourceHierarchies(policy.getPolicyType()); // this can be empty but not null! - if (hierarchies.isEmpty()) { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerPolicyValidator.isValidResourceNames: serviceDef does not have any resource hierarchies, possibly due to invalid service def!!"); - } - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_NO_COMPATIBLE_HIERARCHY; - failures.add(new ValidationFailureDetailsBuilder() - .field("service def resource hierarchies") - .subField("incompatible") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(serviceDef.getName(), " does not have any resource hierarchies")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - /* - * A policy is for a single hierarchy however, it doesn't specify which one. So we have to guess which hierarchy(s) it possibly be for. First, see if the policy could be for - * any of the known hierarchies? A candidate hierarchy is one whose resource levels are a superset of those in the policy. - * Why? What we want to catch at this stage is policies that straddles multiple hierarchies, e.g. db, udf and column for a hive policy. - * This has the side effect of catch spurious levels specified on the policy, e.g. having a level "blah" on a hive policy. - */ - Set> candidateHierarchies = filterHierarchies_hierarchyHasAllPolicyResources(policyResources, hierarchies, defHelper); - if (candidateHierarchies.isEmpty()) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("No compatible resource hierarchies found: resource[%s], service-def[%s], valid-resource-hierarchies[%s]", - policyResources.toString(), serviceDef.getName(), toStringHierarchies_all(hierarchies, defHelper))); - } - ValidationErrorCode error; - if (hierarchies.size() == 1) { // we can give a simpler message for single hierarchy service-defs which is the majority of cases - error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_NO_COMPATIBLE_HIERARCHY_SINGLE; - } else { - error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_NO_COMPATIBLE_HIERARCHY; - } - failures.add(new ValidationFailureDetailsBuilder() - .field("policy resources") - .subField("incompatible") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(serviceDef.getName(), toStringHierarchies_all(hierarchies, defHelper))) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("isValidResourceNames: Found [" + candidateHierarchies.size() + "] compatible hierarchies: " + toStringHierarchies_all(candidateHierarchies, defHelper)); - } - /* - * Among the candidate hierarchies there should be at least one for which policy specifies all of the mandatory resources. Note that there could be multiple - * hierarchies that meet that criteria, e.g. a hive policy that specified only DB. It is not clear if it belongs to DB->UDF or DB->TBL->COL hierarchy. - * However, if both UDF and TBL were required then we can detect that policy does not specify mandatory levels for any of the candidate hierarchies. - */ - Set> validHierarchies = filterHierarchies_mandatoryResourcesSpecifiedInPolicy(policyResources, candidateHierarchies, defHelper); - if (validHierarchies.isEmpty()) { - ValidationErrorCode error; - if (candidateHierarchies.size() == 1) { // we can provide better message if there is a single candidate hierarchy - error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_MISSING_MANDATORY_SINGLE; - } else { - error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_MISSING_MANDATORY; - } - failures.add(new ValidationFailureDetailsBuilder() - .field("policy resources") - .subField("missing mandatory") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(serviceDef.getName(), toStringHierarchies_mandatory(candidateHierarchies, defHelper))) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("isValidResourceNames: Found hierarchies with all mandatory fields specified: " + toStringHierarchies_mandatory(validHierarchies, defHelper)); - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidResourceNames(%s, %s, %s): %s", policy, failures, serviceDef, valid)); - } - return valid; - } - - /** - * String representation of mandatory resources of all the hierarchies suitable of showing to user. Mandatory resources within a hierarchy are not ordered per the hierarchy. - * @param hierarchies - * @param defHelper - * @return - */ - String toStringHierarchies_mandatory(Set> hierarchies, RangerServiceDefHelper defHelper) { - - // helper function skipping sanity checks of getting null arguments passed - StringBuilder builder = new StringBuilder(); - for (List aHierarchy : hierarchies) { - builder.append(defHelper.getMandatoryResourceNames(aHierarchy)); - builder.append(" "); - } - return builder.toString(); - } - - /** - * String representation of all resources of all hierarchies. Resources within a hierarchy are ordered per the hierarchy. - * @param hierarchies - * @param defHelper - * @return - */ - String toStringHierarchies_all(Set> hierarchies, RangerServiceDefHelper defHelper) { - - // helper function skipping sanity checks of getting null arguments passed - StringBuilder builder = new StringBuilder(); - for (List aHierarchy : hierarchies) { - builder.append(defHelper.getAllResourceNamesOrdered(aHierarchy)); - builder.append(" "); - } - return builder.toString(); - } - /** - * Returns the subset of all hierarchies that are a superset of the policy's resources. - * @param policyResources - * @param hierarchies - * @return - */ - Set> filterHierarchies_hierarchyHasAllPolicyResources(Set policyResources, Set> hierarchies, RangerServiceDefHelper defHelper) { - - // helper function skipping sanity checks of getting null arguments passed - Set> result = new HashSet>(hierarchies.size()); - for (List aHierarchy : hierarchies) { - if (defHelper.hierarchyHasAllResources(aHierarchy, policyResources)) { - result.add(aHierarchy); - } - } - return result; - } - - /** - * Returns the subset of hierarchies all of whose mandatory resources were found in policy's resource set. candidate hierarchies are expected to have passed - * filterHierarchies_hierarchyHasAllPolicyResources check first. - * @param policyResources - * @param hierarchies - * @param defHelper - * @return - */ - Set> filterHierarchies_mandatoryResourcesSpecifiedInPolicy(Set policyResources, Set> hierarchies, RangerServiceDefHelper defHelper) { - - // helper function skipping sanity checks of getting null arguments passed - Set> result = new HashSet>(hierarchies.size()); - for (List aHierarchy : hierarchies) { - Set mandatoryResources = defHelper.getMandatoryResourceNames(aHierarchy); - if (policyResources.containsAll(mandatoryResources)) { - result.add(aHierarchy); - } - } - return result; - } - - boolean isValidResourceFlags(final Map inputPolicyResources, final List failures, - final List resourceDefs, final String serviceDefName, final String policyName, boolean isAdmin) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidResourceFlags(%s, %s, %s, %s, %s, %s)", inputPolicyResources, failures, resourceDefs, serviceDefName, policyName, isAdmin)); - } - - boolean valid = true; - if (resourceDefs == null) { - LOG.debug("isValidResourceFlags: service Def is null"); - } else { - Map policyResources = getPolicyResourceWithLowerCaseKeys(inputPolicyResources); - for (RangerResourceDef resourceDef : resourceDefs) { - if (resourceDef == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_NULL_RESOURCE_DEF; - failures.add(new ValidationFailureDetailsBuilder() - .field("resource-def") - .isAnInternalError() - .becauseOf(error.getMessage(serviceDefName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (StringUtils.isBlank(resourceDef.getName())) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_RESOURCE_DEF_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("resource-def-name") - .isAnInternalError() - .becauseOf(error.getMessage(serviceDefName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - String resourceName = resourceDef.getName().toLowerCase(); - RangerPolicyResource policyResource = policyResources.get(resourceName); - if (policyResource == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("a policy-resource object for resource[" + resourceName + "] on policy [" + policyName + "] was null"); - } - } else { - boolean excludesSupported = Boolean.TRUE.equals(resourceDef.getExcludesSupported()); // could be null - boolean policyResourceIsExcludes = Boolean.TRUE.equals(policyResource.getIsExcludes()); // could be null - if (policyResourceIsExcludes && !excludesSupported) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_EXCLUDES_NOT_SUPPORTED; - failures.add(new ValidationFailureDetailsBuilder() - .field("isExcludes") - .subField(resourceName) - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(resourceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - if (policyResourceIsExcludes && !isAdmin) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_EXCLUDES_REQUIRES_ADMIN; - failures.add(new ValidationFailureDetailsBuilder() - .field("isExcludes") - .subField("isAdmin") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - boolean recursiveSupported = Boolean.TRUE.equals(resourceDef.getRecursiveSupported()); - boolean policyIsRecursive = Boolean.TRUE.equals(policyResource.getIsRecursive()); - if (policyIsRecursive && !recursiveSupported) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_RECURSIVE_NOT_SUPPORTED; - failures.add(new ValidationFailureDetailsBuilder() - .field("isRecursive") - .subField(resourceName) - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(resourceName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidResourceFlags(%s, %s, %s, %s, %s, %s): %s", inputPolicyResources, failures, resourceDefs, serviceDefName, policyName, isAdmin, valid)); - } - return valid; - } - - boolean isValidResourceValues(Map resourceMap, List failures, RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidResourceValues(%s, %s, %s)", resourceMap, failures, serviceDef)); - } - - boolean valid = true; - Map validationRegExMap = getValidationRegExes(serviceDef); - for (Map.Entry entry : resourceMap.entrySet()) { - String name = entry.getKey(); - RangerPolicyResource policyResource = entry.getValue(); - if(policyResource != null) { - if(CollectionUtils.isNotEmpty(policyResource.getValues())) { - Set resources = new HashSet<>(policyResource.getValues()); - for (String aValue : resources) { - if (StringUtils.isBlank(aValue)) { - policyResource.getValues().remove(aValue); - } - } - } - - if(CollectionUtils.isEmpty(policyResource.getValues())){ - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_RESOURCE_LIST; - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("Resource list was empty or contains null: value[%s], resource-name[%s], service-def-name[%s]", policyResource.getValues(), name, serviceDef.getName())); - } - failures.add(new ValidationFailureDetailsBuilder() - .field("resource-values") - .subField(name) - .isMissing() - .becauseOf(error.getMessage(name)) - .errorCode(error.getErrorCode()) - .build()); - valid=false; - } - - if (validationRegExMap.containsKey(name) && CollectionUtils.isNotEmpty(policyResource.getValues())) { - String regEx = validationRegExMap.get(name); - for (String aValue : policyResource.getValues()) { - if (!aValue.matches(regEx)) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Resource failed regex check: value[%s], resource-name[%s], regEx[%s], service-def-name[%s]", aValue, name, regEx, serviceDef.getName())); - } - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_VALUE_REGEX; - failures.add(new ValidationFailureDetailsBuilder() - .field("resource-values") - .subField(name) - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(aValue, name)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidResourceValues(%s, %s, %s): %s", resourceMap, failures, serviceDef, valid)); - } - return valid; - } - - boolean isValidPolicyItems(List policyItems, List failures, RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", policyItems, failures, serviceDef)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(policyItems)) { - LOG.debug("policy items collection was null/empty"); - } else { - for (RangerPolicyItem policyItem : policyItems) { - if (policyItem == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_NULL_POLICY_ITEM; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - // we want to go through all elements even though one may be bad so all failures are captured - valid = isValidPolicyItem(policyItem, failures, serviceDef) && valid; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s): %s", policyItems, failures, serviceDef, valid)); - } - return valid; - } - - boolean isValidPolicyItem(RangerPolicyItem policyItem, List failures, RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", policyItem, failures, serviceDef)); - } - - boolean valid = true; - if (policyItem == null) { - LOG.debug("policy item was null!"); - } else { - // access items collection can't be empty (unless delegated admin is true) and should be otherwise valid - if (CollectionUtils.isEmpty(policyItem.getAccesses())) { - if (!Boolean.TRUE.equals(policyItem.getDelegateAdmin())) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item accesses") - .isMissing() - .becauseOf(error.getMessage("policy item accesses")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - LOG.debug("policy item collection was null but delegated admin is true. Ok"); - } - } else { - valid = isValidItemAccesses(policyItem.getAccesses(), failures, serviceDef) && valid; - } - // both users and user-groups collections can't be empty - if (CollectionUtils.isEmpty(policyItem.getUsers()) && CollectionUtils.isEmpty(policyItem.getGroups()) && CollectionUtils.isEmpty(policyItem.getRoles())) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_USER_AND_GROUPS; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item users/user-groups/roles") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s): %s", policyItem, failures, serviceDef, valid)); - } - return valid; - } - - boolean isValidItemAccesses(List accesses, List failures, RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", accesses, failures, serviceDef)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(accesses)) { - LOG.debug("policy item accesses collection was null/empty!"); - } else { - Set accessTypes = getAccessTypes(serviceDef); - for (RangerPolicyItemAccess access : accesses) { - if (access == null) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_NULL_POLICY_ITEM_ACCESS; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item access") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - // we want to go through all elements even though one may be bad so all failures are captured - valid = isValidPolicyItemAccess(access, failures, accessTypes) && valid; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s): %b", accesses, failures, serviceDef, valid)); - } - return valid; - } - - boolean isValidPolicyItemAccess(RangerPolicyItemAccess access, List failures, Set accessTypes) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValidPolicyItemAccess(%s, %s, %s)", access, failures, accessTypes)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(accessTypes)) { // caller should firewall this argument! - LOG.debug("isValidPolicyItemAccess: accessTypes was null!"); - } else if (access == null) { - LOG.debug("isValidPolicyItemAccess: policy item access was null!"); - } else { - String accessType = access.getType(); - if (StringUtils.isBlank(accessType)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item access type") - .isMissing() - .becauseOf(error.getMessage("policy item access type")) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - String matchedAccessType = getMatchedAccessType(accessType, accessTypes); - if (StringUtils.isEmpty(matchedAccessType)) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_ITEM_ACCESS_TYPE_INVALID; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item access type") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(accessType, accessTypes)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - access.setType(matchedAccessType); - } - } - Boolean isAllowed = access.getIsAllowed(); - // it can be null (which is treated as allowed) but not false - if (isAllowed != null && isAllowed == false) { - ValidationErrorCode error = ValidationErrorCode.POLICY_VALIDATION_ERR_POLICY_ITEM_ACCESS_TYPE_DENY; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy item access type allowed") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValidPolicyItemAccess(%s, %s, %s): %s", access, failures, accessTypes, valid)); - } - return valid; - } - - String getMatchedAccessType(String accessType, Set validAccessTypes) { - String ret = null; - for (String validType : validAccessTypes) { - if (StringUtils.equalsIgnoreCase(accessType, validType)) { - ret = validType; - break; - } - } - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerRoleValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerRoleValidator.java deleted file mode 100644 index de9dfd5589..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerRoleValidator.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.store.RoleStore; - -import java.util.ArrayList; -import java.util.List; - -public class RangerRoleValidator extends RangerValidator { - private static final Log LOG = LogFactory.getLog(RangerRoleValidator.class); - - public RangerRoleValidator(RoleStore store) { - super(store); - } - - public void validate(RangerRole rangeRole, Action action) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerRoleValidator.validate(%s, %s)", rangeRole, action)); - } - - List failures = new ArrayList<>(); - boolean valid = isValid(rangeRole, action, failures); - String message = ""; - try { - if (!valid) { - message = serializeFailures(failures); - throw new Exception(message); - } - } finally { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerRoleValidator.validate(%s, %s): %s, reason[%s]", rangeRole, action, valid, message)); - } - } - } - - @Override - boolean isValid(Long id, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerRoleValidator.isValid(%s, %s, %s)", id, action, failures)); - } - - boolean valid = true; - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_UNSUPPORTED_ACTION; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (id == null) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .becauseOf("Role id was null/missing") - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(id)) - .build()); - valid = false; - } else if (!roleExists(id)) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_INVALID_ROLE_ID; - failures.add(new ValidationFailureDetailsBuilder() - .becauseOf("Role with id[{0}] does not exist") - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(id)) - .build()); - valid = false; - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerRoleValidator.isValid(%s, %s, %s): %s", id, action, failures, valid)); - } - return valid; - } - - - @Override - boolean isValid(String name, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerRoleValidator.isValid(%s, %s, %s)", name, action, failures)); - } - - boolean valid = true; - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_UNSUPPORTED_ACTION; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else if (name == null) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .becauseOf("Role name was null/missing") - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } else if (!roleExists(name)) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_INVALID_ROLE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .becauseOf("Role with name[{0}] does not exist") - .field("name") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerRoleValidator.isValid(%s, %s, %s): %s", name, action, failures, valid)); - } - return valid; - } - - boolean isValid(RangerRole rangerRole, Action action, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerRoleValidator.isValid(%s, %s, %s)", rangerRole, action, failures)); - } - - boolean valid = true; - if (rangerRole == null) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_NULL_RANGER_ROLE_OBJECT; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } else { - String roleName = rangerRole.getName(); - if (StringUtils.isEmpty(roleName)) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_NULL_RANGER_ROLE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isMissing() - .becauseOf(error.getMessage()) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - - Long id = rangerRole.getId(); - RangerRole existingRangerRole = null; - if (null != id) { - existingRangerRole = getRangerRole(id); - } - - if (action == Action.CREATE) { - if (existingRangerRole != null) { - String existingRoleName = existingRangerRole.getName(); - if (roleName.equals(existingRoleName)) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_ROLE_NAME_CONFLICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(existingRoleName)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } else if (action == Action.UPDATE) { // id is ignored for CREATE - if (id == null) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .becauseOf(error.getMessage(id)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - if (existingRangerRole == null) { - ValidationErrorCode error = ValidationErrorCode.ROLE_VALIDATION_ERR_INVALID_ROLE_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isSemanticallyIncorrect() - .becauseOf(error.getMessage(id)) - .errorCode(error.getErrorCode()) - .build()); - valid = false; - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerRoleValidator.isValid(%s, %s, %s): %s", rangerRole, action, failures, valid)); - } - - return valid; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerSecurityZoneValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerSecurityZoneValidator.java deleted file mode 100644 index a9d5df52af..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerSecurityZoneValidator.java +++ /dev/null @@ -1,655 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.model.RangerSecurityZone.RangerSecurityZoneService; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.policyengine.RangerAccessResourceImpl; -import org.apache.atlas.plugin.policyengine.RangerResourceTrie; -import org.apache.atlas.plugin.policyresourcematcher.RangerDefaultPolicyResourceMatcher; -import org.apache.atlas.plugin.policyresourcematcher.RangerPolicyResourceMatcher; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; -import org.apache.atlas.plugin.store.SecurityZoneStore; -import org.apache.atlas.plugin.store.ServiceStore; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class RangerSecurityZoneValidator extends RangerValidator { - private static final Log LOG = LogFactory.getLog(RangerSecurityZoneValidator.class); - - private final SecurityZoneStore securityZoneStore; - - public RangerSecurityZoneValidator(ServiceStore store, SecurityZoneStore securityZoneStore) { - super(store); - this.securityZoneStore = securityZoneStore; - } - - public void validate(RangerSecurityZone securityZone, Action action) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validate(%s, %s)", securityZone, action)); - } - - List failures = new ArrayList<>(); - - boolean valid = isValid(securityZone, action, failures); - - String message; - try { - if (!valid) { - message = serializeFailures(failures); - throw new Exception(message); - } - - } finally { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validate(%s, %s)", securityZone, action)); - } - } - } - - @Override - boolean isValid(String name, Action action, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", name, action, failures)); - } - - boolean ret = true; - - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_UNSUPPORTED_ACTION; - - failures.add(new ValidationFailureDetailsBuilder().isAnInternalError().becauseOf(error.getMessage()).errorCode(error.getErrorCode()).build()); - ret = false; - } else { - if (StringUtils.isEmpty(name)) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_FIELD; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone name was null/missing").field("name").isMissing().errorCode(error.getErrorCode()).becauseOf(error.getMessage("name")).build()); - ret = false; - } else { - if (getSecurityZone(name) == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_ZONE_ID; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone does not exist").field("name").errorCode(error.getErrorCode()).becauseOf(error.getMessage(name)).build()); - ret = false; - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s) : %s", name, action, failures, ret)); - } - - return ret; - } - - @Override - boolean isValid(Long id, Action action, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", id, action, failures)); - } - - boolean ret = true; - - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_UNSUPPORTED_ACTION; - - failures.add(new ValidationFailureDetailsBuilder().isAnInternalError().becauseOf(error.getMessage()).errorCode(error.getErrorCode()).build()); - ret = false; - } else if (id == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_FIELD; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone id was null/missing").field("id").isMissing().errorCode(error.getErrorCode()).becauseOf(error.getMessage("id")).build()); - ret = false; - } else if (getSecurityZone(id) == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_ZONE_ID; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone id does not exist").field("id").errorCode(error.getErrorCode()).becauseOf(error.getMessage(id)).build()); - ret = false; - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s) : %s", id, action, failures, ret)); - } - - return ret; - } - - boolean isValid(RangerSecurityZone securityZone, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isValid(%s, %s, %s)", securityZone, action, failures)); - } - - if (!(action == Action.CREATE || action == Action.UPDATE)) { - throw new IllegalArgumentException("isValid(RangerPolicy, ...) is only supported for create/update"); - } - - boolean ret = true; - - RangerSecurityZone existingZone; - final String zoneName = securityZone.getName(); - if (StringUtils.isEmpty(StringUtils.trim(zoneName))) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_FIELD; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone name was null/missing").field("name").isMissing().errorCode(error.getErrorCode()).becauseOf(error.getMessage("name")).build()); - ret = false; - } - - if (action == Action.CREATE) { - securityZone.setId(-1L); - existingZone = getSecurityZone(zoneName); - if (existingZone != null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_ZONE_NAME_CONFLICT; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone name exists").field("name").errorCode(error.getErrorCode()).becauseOf(error.getMessage(existingZone.getId())).build()); - ret = false; - } - } else { - Long zoneId = securityZone.getId(); - existingZone = getSecurityZone(zoneId); - - if (existingZone == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_ZONE_ID; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone with id does not exist").field("id").errorCode(error.getErrorCode()).becauseOf(error.getMessage(zoneId)).build()); - ret = false; - } else if (StringUtils.isNotEmpty(StringUtils.trim(zoneName)) && !StringUtils.equals(zoneName, existingZone.getName())) { - existingZone = getSecurityZone(zoneName); - - if (existingZone != null) { - if (!StringUtils.equals(existingZone.getName(), zoneName)) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_ZONE_NAME_CONFLICT; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone name").field("name").errorCode(error.getErrorCode()).becauseOf(error.getMessage(existingZone.getId())).build()); - ret = false; - } - } - } - } - - ret = ret && validateWithinSecurityZone(securityZone, action, failures); - - ret = ret && validateAgainstAllSecurityZones(securityZone, action, failures); - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isValid(%s, %s, %s) : %s", securityZone, action, failures, ret)); - } - - return ret; - } - - private boolean validateWithinSecurityZone(RangerSecurityZone securityZone, Action action, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validateWithinSecurityZone(%s, %s, %s)", securityZone, action, failures)); - } - - boolean ret = true; - - // Validate each service for existence, not being tag-service and each resource-spec for validity - if (MapUtils.isNotEmpty(securityZone.getServices())) { - for (Map.Entry serviceSpecification : securityZone.getServices().entrySet()) { - String serviceName = serviceSpecification.getKey(); - RangerSecurityZone.RangerSecurityZoneService securityZoneService = serviceSpecification.getValue(); - - ret = ret && validateSecurityZoneService(serviceName, securityZoneService, failures); - } - } else { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_SERVICES; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf("security zone services").isMissing().field("services").errorCode(error.getErrorCode()).becauseOf(error.getMessage(securityZone.getName())).build()); - ret = false; - } - // both admin users and user-groups collections can't be empty - if (CollectionUtils.isEmpty(securityZone.getAdminUsers()) && CollectionUtils.isEmpty(securityZone.getAdminUserGroups())) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_USER_AND_GROUPS; - - failures.add(new ValidationFailureDetailsBuilder().field("security zone admin users/user-groups").isMissing().becauseOf(error.getMessage()).errorCode(error.getErrorCode()).build()); - ret = false; - } - // both audit users and user-groups collections can't be empty - if (CollectionUtils.isEmpty(securityZone.getAuditUsers()) && CollectionUtils.isEmpty(securityZone.getAuditUserGroups())) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_USER_AND_GROUPS; - - failures.add(new ValidationFailureDetailsBuilder().field("security zone audit users/user-groups").isMissing().becauseOf(error.getMessage()).errorCode(error.getErrorCode()).build()); - ret = false; - } - - if (securityZone.getServices() != null) { - for (Map.Entry serviceResourceMapEntry : securityZone.getServices() - .entrySet()) { - if (serviceResourceMapEntry.getValue().getResources() != null) { - for (Map> resource : serviceResourceMapEntry.getValue().getResources()) { - if (resource != null) { - for (Map.Entry> entry : resource.entrySet()) { - if (CollectionUtils.isEmpty(entry.getValue())) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_RESOURCES; - failures.add(new ValidationFailureDetailsBuilder().field("security zone resources") - .subField("resources").isMissing() - .becauseOf(error.getMessage(serviceResourceMapEntry.getKey())) - .errorCode(error.getErrorCode()).build()); - ret = false; - } - } - } - } - } - } - } - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validateWithinSecurityZone(%s, %s, %s) : %s", securityZone, action, failures, ret)); - } - return ret; - } - - private boolean validateAgainstAllSecurityZones(RangerSecurityZone securityZone, Action action, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validateAgainstAllSecurityZones(%s, %s, %s)", securityZone, action, failures)); - } - - boolean ret = true; - - final String zoneName; - - if (securityZone.getId() != -1L) { - RangerSecurityZone existingZone = getSecurityZone(securityZone.getId()); - zoneName = existingZone.getName(); - } else { - zoneName = securityZone.getName(); - } - - for (Map.Entry entry: securityZone.getServices().entrySet()) { - String serviceName = entry.getKey(); - RangerSecurityZone.RangerSecurityZoneService serviceResources = entry.getValue(); - - if (CollectionUtils.isNotEmpty(serviceResources.getResources())) { - SearchFilter filter = new SearchFilter(); - List zones = null; - - filter.setParam(SearchFilter.SERVICE_NAME, serviceName); - filter.setParam(SearchFilter.ZONE_NAME, zoneName); - - try { - zones = securityZoneStore.getSecurityZones(filter); - } catch (Exception excp) { - LOG.error("Failed to get Security-Zones", excp); - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INTERNAL_ERROR; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf(error.getMessage(excp.getMessage())).errorCode(error.getErrorCode()).build()); - ret = false; - } - - if (CollectionUtils.isNotEmpty(zones)) { - RangerService service = getService(serviceName); - RangerServiceDef serviceDef = service != null ? getServiceDef(service.getType()) : null; - - if (serviceDef == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INTERNAL_ERROR; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf(error.getMessage(serviceName)).errorCode(error.getErrorCode()).build()); - ret = false; - - } else { - zones.add(securityZone); - ret = ret && validateZoneServiceInAllZones(zones, serviceName, serviceDef, failures); - } - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validateAgainstAllSecurityZones(%s, %s, %s) : %s", securityZone, action, failures, ret)); - } - - return ret; - } - - private boolean validateZoneServiceInAllZones(List zones, String serviceName, RangerServiceDef serviceDef, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validateZoneServiceInAllZones(%s, %s, %s, %s)", zones, serviceName, serviceDef, failures)); - } - - boolean ret = true; - - // For each zone, get list-of-resources corresponding to serviceName. - // For each list-of-resources: - // get one resource (this is a map of >); convert it into map of . excludes is always false, recursive true only for HDFS - // build a subclass of RangerPolicyResourceEvaluator with id of zone, zoneName as a member, and RangerDefaultResourceMatcher as matcher. - // add this to list-of-evaluators - - Map> matchersForResourceDef = new HashMap<>(); - - for (RangerSecurityZone zone : zones) { - List>> resources = zone.getServices().get(serviceName).getResources(); - - for (Map> resource : resources) { - Map policyResources = new HashMap<>(); - - for (Map.Entry> entry : resource.entrySet()) { - String resourceDefName = entry.getKey(); - List resourceValues = entry.getValue(); - - RangerPolicy.RangerPolicyResource policyResource = new RangerPolicy.RangerPolicyResource(); - - policyResource.setIsExcludes(false); - policyResource.setIsRecursive(EmbeddedServiceDefsUtil.isRecursiveEnabled(serviceDef, resourceDefName)); - policyResource.setValues(resourceValues); - policyResources.put(resourceDefName, policyResource); - - if (matchersForResourceDef.get(resourceDefName) == null) { - matchersForResourceDef.put(resourceDefName, new ArrayList<>()); - } - } - - RangerZoneResourceMatcher matcher = new RangerZoneResourceMatcher(zone.getName(), policyResources, serviceDef); - - for (String resourceDefName : resource.keySet()) { - matchersForResourceDef.get(resourceDefName).add(matcher); - } - } - } - - // Build a map of trie with list-of-evaluators with one entry corresponds to one resource-def if it exists in the list-of-resources - - Map> trieMap = new HashMap<>(); - List resourceDefs = serviceDef.getResources(); - - for (Map.Entry> entry : matchersForResourceDef.entrySet()) { - String resourceDefName = entry.getKey(); - List matchers = entry.getValue(); - RangerServiceDef.RangerResourceDef resourceDef = null; - - for (RangerServiceDef.RangerResourceDef element : resourceDefs) { - if (StringUtils.equals(element.getName(), resourceDefName)) { - resourceDef = element; - break; - } - } - - trieMap.put(entry.getKey(), new RangerResourceTrie<>(resourceDef, matchers)); - } - - // For each zone, get list-of-resources corresponding to serviceName - // For each list-of-resources: - // get one resource; for each level in the resource, run it through map of trie and get possible evaluators. - // check each evaluator to see if the resource-match actually happens. If yes then add the zone-evaluator to matching evaluators. - // flag error if there are more than one matching evaluators with different zone-ids. - // - - RangerServiceDefHelper serviceDefHelper = new RangerServiceDefHelper(serviceDef, true); - - for (RangerSecurityZone zone : zones) { - List>> resources = zone.getServices().get(serviceName).getResources(); - - for (Map> resource : resources) { - - Set smallestList = null; - - List resourceKeys = serviceDefHelper.getOrderedResourceNames(resource.keySet()); - - for (String resourceDefName : resourceKeys) { - List resourceValues = resource.get(resourceDefName); - - RangerResourceTrie trie = trieMap.get(resourceDefName); - - Set zoneMatchersForResource = trie.getEvaluatorsForResource(resourceValues); - Set inheritedZoneMatchers = trie.getInheritedEvaluators(); - - if (LOG.isDebugEnabled()) { - LOG.debug("ResourceDefName:[" + resourceDefName + "], values:[" + resourceValues + "], matched-zones:[" + zoneMatchersForResource + "], inherited-zones:[" + inheritedZoneMatchers + "]"); - } - - if (smallestList != null) { - if (CollectionUtils.isEmpty(inheritedZoneMatchers) && CollectionUtils.isEmpty(zoneMatchersForResource)) { - smallestList = null; - } else if (CollectionUtils.isEmpty(inheritedZoneMatchers)) { - smallestList.retainAll(zoneMatchersForResource); - } else if (CollectionUtils.isEmpty(zoneMatchersForResource)) { - smallestList.retainAll(inheritedZoneMatchers); - } else { - Set smaller, bigger; - if (zoneMatchersForResource.size() < inheritedZoneMatchers.size()) { - smaller = zoneMatchersForResource; - bigger = inheritedZoneMatchers; - } else { - smaller = inheritedZoneMatchers; - bigger = zoneMatchersForResource; - } - Set tmp = new HashSet<>(); - if (smallestList.size() < smaller.size()) { - smallestList.stream().filter(smaller::contains).forEach(tmp::add); - smallestList.stream().filter(bigger::contains).forEach(tmp::add); - } else { - smaller.stream().filter(smallestList::contains).forEach(tmp::add); - if (smallestList.size() < bigger.size()) { - smallestList.stream().filter(bigger::contains).forEach(tmp::add); - } else { - bigger.stream().filter(smallestList::contains).forEach(tmp::add); - } - } - smallestList = tmp; - } - } else { - if (CollectionUtils.isEmpty(inheritedZoneMatchers) || CollectionUtils.isEmpty(zoneMatchersForResource)) { - Set tmp = CollectionUtils.isEmpty(inheritedZoneMatchers) ? zoneMatchersForResource : inheritedZoneMatchers; - smallestList = resourceKeys.size() == 1 || CollectionUtils.isEmpty(tmp) ? tmp : new HashSet<>(tmp); - } else { - smallestList = new HashSet<>(zoneMatchersForResource); - smallestList.addAll(inheritedZoneMatchers); - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Resource:[" + resource +"], matched-zones:[" + smallestList +"]"); - } - - if (CollectionUtils.isEmpty(smallestList) || smallestList.size() == 1) { - continue; - } - - final Set intersection = smallestList; - - RangerAccessResourceImpl accessResource = new RangerAccessResourceImpl(); - - accessResource.setServiceDef(serviceDef); - - for (Map.Entry> entry : resource.entrySet()) { - accessResource.setValue(entry.getKey(), entry.getValue()); - } - - Set matchedZoneNames = new HashSet<>(); - - for (RangerZoneResourceMatcher zoneMatcher : intersection) { - if (LOG.isDebugEnabled()) { - LOG.debug("Trying to match resource:[" + accessResource +"] using zoneMatcher:[" + zoneMatcher + "]"); - } - // These are potential matches. Try to really match them - if (zoneMatcher.getPolicyResourceMatcher().isMatch(accessResource, RangerPolicyResourceMatcher.MatchScope.ANY, null)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Matched resource:[" + accessResource +"] using zoneMatcher:[" + zoneMatcher + "]"); - } - // Actual match happened - matchedZoneNames.add(zoneMatcher.getSecurityZoneName()); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Did not match resource:[" + accessResource +"] using zoneMatcher:[" + zoneMatcher + "]"); - } - } - } - LOG.info("The following zone-names matched resource:[" + resource + "]: " + matchedZoneNames); - - if (matchedZoneNames.size() > 1) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_ZONE_RESOURCE_CONFLICT; - - failures.add(new ValidationFailureDetailsBuilder().becauseOf(error.getMessage(matchedZoneNames, resource)).errorCode(error.getErrorCode()).build()); - ret = false; - break; - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validateZoneServiceInAllZones(%s, %s, %s, %s) : %s", zones, serviceName, serviceDef, failures, ret)); - } - return ret; - } - - private boolean validateSecurityZoneService(String serviceName, RangerSecurityZone.RangerSecurityZoneService securityZoneService, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.validateSecurityZoneService(%s, %s, %s)", serviceName, securityZoneService, failures)); - } - - boolean ret = true; - - // Verify service with serviceName exists - get the service-type - RangerService service = getService(serviceName); - - if (service == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_SERVICE_NAME; - - failures.add(new ValidationFailureDetailsBuilder().field("security zone resource service-name").becauseOf(error.getMessage(serviceName)).errorCode(error.getErrorCode()).build()); - ret = false; - } else { - RangerServiceDef serviceDef = getServiceDef(service.getType()); - - if (serviceDef == null) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_SERVICE_TYPE; - failures.add(new ValidationFailureDetailsBuilder().field("security zone resource service-type").becauseOf(error.getMessage(service.getType())).errorCode(error.getErrorCode()).build()); - ret = false; - } else { - String serviceType = serviceDef.getName(); - - if (StringUtils.equals(serviceType, EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME)) { - if (CollectionUtils.isNotEmpty(securityZoneService.getResources())) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_UNEXPECTED_RESOURCES; - failures.add(new ValidationFailureDetailsBuilder().field("security zone resources").becauseOf(error.getMessage(serviceName)).errorCode(error.getErrorCode()).build()); - ret = false; - } - } else { - if (CollectionUtils.isEmpty(securityZoneService.getResources())) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_MISSING_RESOURCES; - failures.add(new ValidationFailureDetailsBuilder().field("security zone resources").isMissing().becauseOf(error.getMessage(serviceName)).errorCode(error.getErrorCode()).build()); - ret = false; - } else { - // For each resource-spec, verify that it forms valid hierarchy for some policy-type - for (Map> resource : securityZoneService.getResources()) { - Set resourceDefNames = resource.keySet(); - RangerServiceDefHelper serviceDefHelper = new RangerServiceDefHelper(serviceDef); - boolean isValidHierarchy = false; - - for (String policyType : RangerPolicy.POLICY_TYPES) { - Set> resourceHierarchies = serviceDefHelper.getResourceHierarchies(policyType, resourceDefNames); - - if (LOG.isDebugEnabled()) { - LOG.debug("Size of resourceHierarchies for resourceDefNames:[" + resourceDefNames + ", policyType=" + policyType + "] = " + resourceHierarchies.size()); - } - - for (List resourceHierarchy : resourceHierarchies) { - - if (RangerDefaultPolicyResourceMatcher.isHierarchyValidForResources(resourceHierarchy, resource)) { - isValidHierarchy = true; - break; - } else { - LOG.info("gaps found in resource, skipping hierarchy:[" + resourceHierarchies + "]"); - } - } - } - - if (!isValidHierarchy) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_INVALID_RESOURCE_HIERARCHY; - - failures.add(new ValidationFailureDetailsBuilder().field("security zone resource hierarchy").becauseOf(error.getMessage(serviceName, resourceDefNames)).errorCode(error.getErrorCode()).build()); - ret = false; - } - - /* - * Ignore this check. It should be possible to have all wildcard resource in a zone if zone-admin so desires - * - boolean isValidResourceSpec = isAnyNonWildcardResource(resource, failures); - - if (!isValidResourceSpec) { - ValidationErrorCode error = ValidationErrorCode.SECURITY_ZONE_VALIDATION_ERR_ALL_WILDCARD_RESOURCE_VALUES; - - failures.add(new ValidationFailureDetailsBuilder().field("security zone resource values").becauseOf(error.getMessage(serviceName)).errorCode(error.getErrorCode()).build()); - ret = false; - LOG.warn("RangerPolicyValidator.validateSecurityZoneService() : All wildcard resource-values specified for service :[" + serviceName + "]"); - } - */ - - } - } - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.validateSecurityZoneService(%s, %s, %s) : %s", serviceName, securityZoneService, failures, ret)); - } - - return ret; - } - - /* - private boolean isAnyNonWildcardResource(Map> resource, List failures) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerPolicyValidator.isAnyNonWildcardResource(%s, %s)", resource, failures)); - } - - boolean ret = false; - - for (Map.Entry> resourceDefValue : resource.entrySet()) { - boolean wildCardResourceFound = false; - List resourceValues = resourceDefValue.getValue(); - - for (String resourceValue : resourceValues) { - if (StringUtils.equals(resourceValue, RangerDefaultResourceMatcher.WILDCARD_ASTERISK)) { - wildCardResourceFound = true; - break; - } - } - - if (!wildCardResourceFound) { - ret = true; - break; - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerPolicyValidator.isAnyNonWildcardResource(%s, %s) : %s", resource, failures, ret)); - } - return ret; - } - */ -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefHelper.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefHelper.java index ddc359b3cd..e7cbb28ddf 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefHelper.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefHelper.java @@ -49,74 +49,6 @@ public class RangerServiceDefHelper { static final Map _Cache = new ConcurrentHashMap<>(); final Delegate _delegate; - static public RangerServiceDef getServiceDefForPolicyFiltering(RangerServiceDef serviceDef) { - - List modifiedResourceDefs = new ArrayList(); - - for (RangerResourceDef resourceDef : serviceDef.getResources()) { - - final RangerResourceDef modifiedResourceDef; - - String matcherClassName = resourceDef.getMatcher(); - - if (RangerPathResourceMatcher.class.getName().equals(matcherClassName)) { - - Map modifiedMatcherOptions = new HashMap(resourceDef.getMatcherOptions()); - - modifiedMatcherOptions.put(RangerAbstractResourceMatcher.OPTION_WILD_CARD, "false"); - - modifiedResourceDef = new RangerResourceDef(resourceDef); - modifiedResourceDef.setMatcherOptions(modifiedMatcherOptions); - modifiedResourceDef.setRecursiveSupported(false); - - } else { - modifiedResourceDef = resourceDef; - } - - modifiedResourceDefs.add(modifiedResourceDef); - } - - return new RangerServiceDef(serviceDef.getName(), serviceDef.getDisplayName(), serviceDef.getImplClass(), serviceDef.getLabel(), - serviceDef.getDescription(), serviceDef.getOptions(), serviceDef.getConfigs(), modifiedResourceDefs, serviceDef.getAccessTypes(), - serviceDef.getPolicyConditions(), serviceDef.getContextEnrichers(), serviceDef.getEnums()); - } - - public static Map getFilterResourcesForAncestorPolicyFiltering(RangerServiceDef serviceDef, Map filterResources) { - - Map ret = null; - - for (RangerResourceDef resourceDef : serviceDef.getResources()) { - - String matcherClassName = resourceDef.getMatcher(); - - if (RangerPathResourceMatcher.class.getName().equals(matcherClassName)) { - - String resourceDefName = resourceDef.getName(); - - final Map resourceMatcherOptions = resourceDef.getMatcherOptions(); - - String delimiter = resourceMatcherOptions.get(RangerPathResourceMatcher.OPTION_PATH_SEPARATOR); - if (StringUtils.isBlank(delimiter)) { - delimiter = Character.toString(RangerPathResourceMatcher.DEFAULT_PATH_SEPARATOR_CHAR); - } - - String resourceValue = filterResources.get(resourceDefName); - if (StringUtils.isNotBlank(resourceValue)) { - if (!resourceValue.endsWith(delimiter)) { - resourceValue += delimiter; - } - resourceValue += RangerAbstractResourceMatcher.WILDCARD_ASTERISK; - - if (ret == null) { - ret = new HashMap(); - } - ret.put(resourceDefName, resourceValue); - } - } - } - - return ret; - } public RangerServiceDefHelper(RangerServiceDef serviceDef) { this(serviceDef, true, false); @@ -166,10 +98,6 @@ public RangerServiceDef getServiceDef() { return _delegate._serviceDef; } - public void patchServiceDefWithDefaultValues() { - _delegate.patchServiceDefWithDefaultValues(); - } - /** * for a resource definition as follows: * @@ -186,18 +114,6 @@ public Set> getResourceHierarchies(String policyType) { return _delegate.getResourceHierarchies(policyType); } - public Set> filterHierarchies_containsOnlyMandatoryResources(String policyType) { - Set> hierarchies = getResourceHierarchies(policyType); - Set> result = new HashSet>(hierarchies.size()); - for (List aHierarchy : hierarchies) { - Set mandatoryResources = getMandatoryResourceNames(aHierarchy); - if (aHierarchy.size() == mandatoryResources.size()) { - result.add(aHierarchy); - } - } - return result; - } - public Set> getResourceHierarchies(String policyType, Collection keys) { if (LOG.isDebugEnabled()) { LOG.debug("==> getResourceHierarchies(policyType=" + policyType + ", keys=" + StringUtils.join(keys, ",") + ")"); @@ -248,42 +164,6 @@ public boolean hierarchyHasAllResources(List hierarchy, Colle return foundAllResourceKeys; } - public Set getMandatoryResourceNames(List hierarchy) { - Set result = new HashSet(hierarchy.size()); - for (RangerResourceDef resourceDef : hierarchy) { - if (Boolean.TRUE.equals(resourceDef.getMandatory())) { - result.add(resourceDef.getName()); - } - } - return result; - } - - /** - * Set view of a hierarchy's resource names for efficient searching - * @param hierarchy - * @return - */ - public Set getAllResourceNames(List hierarchy) { - Set result = new HashSet(hierarchy.size()); - for (RangerResourceDef resourceDef : hierarchy) { - result.add(resourceDef.getName()); - } - return result; - } - - /** - * Resources names matching the order of list of resource defs passed in. - * @param hierarchy - * @return - */ - public List getAllResourceNamesOrdered(List hierarchy) { - List result = new ArrayList(hierarchy.size()); - for (RangerResourceDef resourceDef : hierarchy) { - result.add(resourceDef.getName()); - } - return result; - } - public boolean isResourceGraphValid() { return _delegate.isResourceGraphValid(); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefValidator.java deleted file mode 100644 index 8d7a3f4acd..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceDefValidator.java +++ /dev/null @@ -1,835 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerAccessTypeDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerDataMaskTypeDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerEnumDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerEnumElementDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerPolicyConditionDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerServiceConfigDef; -import org.apache.atlas.plugin.store.ServiceStore; -import org.apache.atlas.plugin.util.ServiceDefUtil; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -public class RangerServiceDefValidator extends RangerValidator { - - private static final Log LOG = LogFactory.getLog(RangerServiceDefValidator.class); - - public RangerServiceDefValidator(ServiceStore store) { - super(store); - } - - public void validate(final RangerServiceDef serviceDef, final Action action) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.validate(%s, %s)", serviceDef, action)); - } - - RangerServiceDef normalizedServiceDef = ServiceDefUtil.normalize(serviceDef); - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("Normalized Service Definition being validated: (%s, %s)", serviceDef, action)); - } - - List failures = new ArrayList<>(); - boolean valid = isValid(normalizedServiceDef, action, failures); - String message = ""; - try { - if (!valid) { - message = serializeFailures(failures); - throw new Exception(message); - } - } finally { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.validate(%s, %s): %s, reason[%s]", normalizedServiceDef, action, valid, message)); - } - } - } - - boolean isValid(final Long id, final Action action, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceDefValidator.isValid(" + id + ")"); - } - - boolean valid = true; - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_UNSUPPORTED_ACTION; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(action)) - .build()); - valid = false; - } else if (id == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage("id")) - .build()); - valid = false; - } else if (getServiceDef(id) == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("No service found for id[" + id + "]! ok!"); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceDefValidator.isValid(" + id + "): " + valid); - } - return valid; - } - - boolean isValid(final RangerServiceDef serviceDef, final Action action, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceDefValidator.isValid(" + serviceDef + ")"); - } - - if (!(action == Action.CREATE || action == Action.UPDATE)) { - throw new IllegalArgumentException("isValid(RangerServiceDef, ...) is only supported for CREATE/UPDATE"); - } - boolean valid = true; - if (serviceDef == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_NULL_SERVICE_DEF_OBJECT; - failures.add(new ValidationFailureDetailsBuilder() - .field("service def") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(action)) - .build()); - valid = false; - } else { - Long id = serviceDef.getId(); - valid = isValidServiceDefId(id, action, failures) && valid; - valid = isValidServiceDefName(serviceDef.getName(), id, action, failures) && valid; - valid = isValidServiceDefDisplayName(serviceDef.getDisplayName(), id, action, failures) && valid; - valid = isValidAccessTypes(serviceDef.getId(), serviceDef.getAccessTypes(), failures, action) && valid; - if (isValidResources(serviceDef, failures, action)) { - // Semantic check of resource graph can only be done if resources are "syntactically" valid - valid = isValidResourceGraph(serviceDef, failures) && valid; - } else { - valid = false; - } - List enumDefs = serviceDef.getEnums(); - if (isValidEnums(enumDefs, failures)) { - // config def validation requires valid enums - valid = isValidConfigs(serviceDef.getConfigs(), enumDefs, failures) && valid; - } else { - valid = false; - } - valid = isValidPolicyConditions(serviceDef.getId(), serviceDef.getPolicyConditions(), failures, action) && valid; - valid = isValidDataMaskTypes(serviceDef.getId(), serviceDef.getDataMaskDef().getMaskTypes(), failures, action) && valid; - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceDefValidator.isValid(" + serviceDef + "): " + valid); - } - return valid; - } - - boolean isValidServiceDefId(Long id, final Action action, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidServiceDefId(%s, %s, %s)", id, action, failures)); - } - boolean valid = true; - - if (action == Action.UPDATE) { // id is ignored for CREATE - if (id == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_EMPTY_SERVICE_DEF_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - } else if (getServiceDef(id) == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_INVALID_SERVICE_DEF_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(id)) - .build()); - valid = false; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidServiceDefId(%s, %s, %s): %s", id, action, failures, valid)); - } - return valid; - } - - boolean isValidServiceDefName(String name, Long id, final Action action, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidServiceDefName(%s, %s, %s, %s)", name, id, action, failures)); - } - boolean valid = true; - - if (StringUtils.isBlank(name)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_INVALID_SERVICE_DEF_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } else { - RangerServiceDef otherServiceDef = getServiceDef(name); - if (otherServiceDef != null && action == Action.CREATE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } else if (otherServiceDef != null && !Objects.equals(id, otherServiceDef.getId())) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_ID_NAME_CONFLICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("id/name") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name, otherServiceDef.getId())) - .build()); - valid = false; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidServiceDefName(%s, %s, %s, %s): %s", name, id, action, failures, valid)); - } - return valid; - } - - /** - * Performs all validations related to ServiceDef displayName. - * @param displayName - * @param id - * @param action - * @param failures - * @return - */ - boolean isValidServiceDefDisplayName(final String displayName, Long id, final Action action, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidServiceDefDisplayName(%s, %s, %s, %s)", displayName, id, action, failures)); - } - boolean valid = true; - - if (StringUtils.isBlank(displayName)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_INVALID_SERVICE_DEF_DISPLAY_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("displayName") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName)) - .build()); - valid = false; - } else { - RangerServiceDef otherServiceDef = getServiceDefByDisplayName(displayName); - if (otherServiceDef != null && action == Action.CREATE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF__DISPLAY_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("displayName") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName, otherServiceDef.getName())) - .build()); - valid = false; - } else if (otherServiceDef != null && !Objects.equals(id, otherServiceDef.getId())) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF__DISPLAY_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("id/displayName") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName, otherServiceDef.getName())) - .build()); - valid = false; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidServiceDefName(%s, %s, %s, %s): %s", displayName, id, action, failures, valid)); - } - return valid; - } - - boolean isValidAccessTypes(final Long serviceDefId, final List accessTypeDefs, - final List failures, final Action action) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidAccessTypes(%s, %s)", accessTypeDefs, failures)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(accessTypeDefs)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("access types") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage("access types")) - .build()); - valid = false; - } else { - Map existingAccessTypeIDNameMap = new HashMap<>(); - if (action == Action.UPDATE) { - List existingAccessTypes = this.getServiceDef(serviceDefId).getAccessTypes(); - for (RangerAccessTypeDef existingAccessType : existingAccessTypes) { - existingAccessTypeIDNameMap.put(existingAccessType.getItemId(), existingAccessType.getName()); - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("accessType names from db = " + existingAccessTypeIDNameMap.values()); - } - List defsWithImpliedGrants = new ArrayList<>(); - Set accessNames = new HashSet<>(); - Set ids = new HashSet<>(); - for (RangerAccessTypeDef def : accessTypeDefs) { - String name = def.getName(); - Long itemId = def.getItemId(); - valid = isUnique(name, accessNames, "access type name", "access types", failures) && valid; - valid = isUnique(def.getItemId(), ids, "access type itemId", "access types", failures) && valid; - if (action == Action.UPDATE) { - if (existingAccessTypeIDNameMap.get(itemId) != null && !existingAccessTypeIDNameMap.get(itemId).equals(name)) { - ValidationErrorCode error; - error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF_NAME_CONFICT; - failures.add((new ValidationFailureDetailsBuilder()).field("access type name").isSemanticallyIncorrect().errorCode(error.getErrorCode()).becauseOf(String.format("changing %s[%s] in %s is not supported", "access type name", name, "access types")).build()); - valid = false; - } - } - if (CollectionUtils.isNotEmpty(def.getImpliedGrants())) { - defsWithImpliedGrants.add(def); - } - } - // validate implied grants - for (RangerAccessTypeDef def : defsWithImpliedGrants) { - Collection impliedGrants = getImpliedGrants(def); - Set unknownAccessTypes = Sets.difference(Sets.newHashSet(impliedGrants), accessNames); - if (!unknownAccessTypes.isEmpty()) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_IMPLIED_GRANT_UNKNOWN_ACCESS_TYPE; - failures.add(new ValidationFailureDetailsBuilder() - .field("implied grants") - .subField(unknownAccessTypes.iterator().next()) // we return just on item here. Message has all unknow items - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(impliedGrants, unknownAccessTypes)) - .build()); - valid = false; - } - // implied grant should not imply itself! - String name = def.getName(); // note: this name could be null/blank/empty! - if (impliedGrants.contains(name)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_IMPLIED_GRANT_IMPLIES_ITSELF; - failures.add(new ValidationFailureDetailsBuilder() - .field("implied grants") - .subField(name) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(impliedGrants, name)) - .build()); - valid = false; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidAccessTypes(%s, %s): %s", accessTypeDefs, failures, valid)); - } - return valid; - } - - boolean isValidPolicyConditions(Long serviceDefId, List policyConditions, - List failures, final Action action) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidPolicyConditions(%s, %s)", policyConditions, failures)); - } - boolean valid = true; - - if (CollectionUtils.isEmpty(policyConditions)) { - LOG.debug("Configs collection was null/empty! ok"); - } else { - Map existingPolicyCondIDNameMap = new HashMap<>(); - if (action == Action.UPDATE) { - List existingPolicyConditions = this.getServiceDef(serviceDefId).getPolicyConditions(); - for (RangerPolicyConditionDef existingPolicyCondition : existingPolicyConditions) { - existingPolicyCondIDNameMap.put(existingPolicyCondition.getItemId(), existingPolicyCondition.getName()); - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("policy condition names from db = " + existingPolicyCondIDNameMap.values()); - } - Set ids = new HashSet<>(); - Set names = new HashSet<>(); - for (RangerPolicyConditionDef conditionDef : policyConditions) { - Long itemId = conditionDef.getItemId(); - valid = isUnique(itemId, ids, "policy condition def itemId", "policy condition defs", failures) && valid; - String name = conditionDef.getName(); - valid = isUnique(name, names, "policy condition def name", "policy condition defs", failures) && valid; - if (action == Action.UPDATE) { - if (existingPolicyCondIDNameMap.get(itemId) != null && !existingPolicyCondIDNameMap.get(itemId).equals(name)) { - ValidationErrorCode error; - error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF_NAME_CONFICT; - failures.add((new ValidationFailureDetailsBuilder()).field("policy condition def name").isSemanticallyIncorrect().errorCode(error.getErrorCode()).becauseOf(String.format("changing %s[%s] in %s is not supported", "policy condition def name", name, "policy condition defs")).build()); - valid = false; - } - } - if (StringUtils.isBlank(conditionDef.getEvaluator())) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_POLICY_CONDITION_NULL_EVALUATOR; - failures.add(new ValidationFailureDetailsBuilder() - .field("policy condition def evaluator") - .subField(name) - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidPolicyConditions(%s, %s): %s", policyConditions, failures, valid)); - } - return valid; - } - - boolean isValidConfigs(List configs, List enumDefs, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidConfigs(%s, %s, %s)", configs, enumDefs, failures)); - } - boolean valid = true; - - if (CollectionUtils.isEmpty(configs)) { - LOG.debug("Configs collection was null/empty! ok"); - } else { - Set ids = new HashSet(configs.size()); - Set names = new HashSet(configs.size()); - for (RangerServiceConfigDef aConfig : configs) { - valid = isUnique(aConfig.getItemId(), ids, "config def itemId", "config defs", failures) && valid; - String configName = aConfig.getName(); - valid = isUnique(configName, names, "config def name", "config defs", failures) && valid; - String type = aConfig.getType(); - valid = isValidConfigType(type, configName, failures) && valid; - if ("enum".equals(type)) { - valid = isValidConfigOfEnumType(aConfig, enumDefs, failures) && valid; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidConfigs(%s, %s, %s): %s", configs, enumDefs, failures, valid)); - } - return valid; - } - - boolean isValidConfigOfEnumType(RangerServiceConfigDef configDef, List enumDefs, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidConfigOfEnumType(%s, %s, %s)", configDef, enumDefs, failures)); - } - boolean valid = true; - - if (!"enum".equals(configDef.getType())) { - LOG.debug("ConfigDef wasn't of enum type!"); - } else { - Map enumDefsMap = getEnumDefMap(enumDefs); - Set enumTypes = enumDefsMap.keySet(); - String subType = configDef.getSubType(); - String configName = configDef.getName(); - - if (!enumTypes.contains(subType)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_CONFIG_DEF_UNKNOWN_ENUM; - failures.add(new ValidationFailureDetailsBuilder() - .field("config def subtype") - .subField(configName) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(subType, configName, enumTypes)) - .build()); - valid = false; - } else { - // default value check is possible only if sub-type is correctly configured - String defaultValue = configDef.getDefaultValue(); - if (StringUtils.isNotBlank(defaultValue)) { - RangerEnumDef enumDef = enumDefsMap.get(subType); - Set enumValues = getEnumValues(enumDef); - if (!enumValues.contains(defaultValue)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_CONFIG_DEF_UNKNOWN_ENUM_VALUE; - failures.add(new ValidationFailureDetailsBuilder() - .field("config def default value") - .subField(configName) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(defaultValue, configName, enumValues, subType)) - .build()); - valid = false; - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidConfigOfEnumType(%s, %s, %s): %s", configDef, enumDefs, failures, valid)); - } - return valid; - } - - boolean isValidConfigType(String type, String configName, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidConfigType(%s, %s, %s)", type, configName, failures)); - } - boolean valid = true; - - Set validTypes = ImmutableSet.of("bool", "enum", "int", "string", "password", "path"); - if (StringUtils.isBlank(type)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_CONFIG_DEF_MISSING_TYPE; - failures.add(new ValidationFailureDetailsBuilder() - .field("config def type") - .subField(configName) - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(configName)) - .build()); - valid = false; - } else if (!validTypes.contains(type)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_CONFIG_DEF_INVALID_TYPE; - failures.add(new ValidationFailureDetailsBuilder() - .field("config def type") - .subField(configName) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(type, configName, validTypes)) - .build()); - valid = false; - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidConfigType(%s, %s, %s): %s", type, configName, failures, valid)); - } - return valid; - } - - public boolean isValidResources(RangerServiceDef serviceDef, List failures, final Action action) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidResources(%s, %s)", serviceDef, failures)); - } - boolean valid = true; - - List resources = serviceDef.getResources(); - if (CollectionUtils.isEmpty(resources)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("resources") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage("resources")) - .build()); - valid = false; - } else { - Map existingResourceIDNameMap = new HashMap<>(); - if (action == Action.UPDATE) { - List existingResources = this.getServiceDef(serviceDef.getId()).getResources(); - for (RangerResourceDef existingResource : existingResources) { - existingResourceIDNameMap.put(existingResource.getItemId(), existingResource.getName()); - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("resource names from db = " + existingResourceIDNameMap.values()); - } - - Set names = new HashSet(resources.size()); - Set ids = new HashSet(resources.size()); - for (RangerResourceDef resource : resources) { - valid = isValidResourceName(resource.getName(), "resource type name", failures) && valid; - - /* - * While id is the natural key, name is a surrogate key. At several places code expects resource name to be unique within a service. - */ - String name = resource.getName(); - Long itemId = resource.getItemId(); - valid = isUnique(name, names, "resource name", "resources", failures) && valid; - valid = isUnique(itemId, ids, "resource itemId", "resources", failures) && valid; - if (action == Action.UPDATE) { - if (existingResourceIDNameMap.get(itemId) != null && !existingResourceIDNameMap.get(itemId).equals(name)) { - ValidationErrorCode error; - error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF_NAME_CONFICT; - failures.add((new ValidationFailureDetailsBuilder()).field("resource name").isSemanticallyIncorrect().errorCode(error.getErrorCode()).becauseOf(String.format("changing %s[%s] in %s is not supported", "resource name", name, "resources")).build()); - valid = false; - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidResources(%s, %s): %s", serviceDef, failures, valid)); - } - return valid; - } - - boolean isValidResourceGraph(RangerServiceDef serviceDef, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidResourceGraph(%s, %s)", serviceDef, failures)); - } - boolean valid = true; - // We don't want this helper to get into the cache or to use what is in the cache!! - RangerServiceDefHelper defHelper = _factory.createServiceDefHelper(serviceDef, false); - if (!defHelper.isResourceGraphValid()) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_RESOURCE_GRAPH_INVALID; - failures.add(new ValidationFailureDetailsBuilder() - .field("resource graph") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - } - // resource level should be unique within a hierarchy - for(String policyType : RangerPolicy.POLICY_TYPES) { - Set> hierarchies = defHelper.getResourceHierarchies(policyType); - for (List aHierarchy : hierarchies) { - Set levels = new HashSet(aHierarchy.size()); - for (RangerResourceDef resourceDef : aHierarchy) { - valid = isUnique(resourceDef.getLevel(), levels, "resource level", "resources", failures) && valid; - } - - // Ensure that aHierarchy contains resource-defs with increasing level values - int lastResourceLevel = Integer.MIN_VALUE; - for (RangerResourceDef resourceDef : aHierarchy) { - Integer resourceDefLevel = resourceDef.getLevel(); - if (resourceDefLevel == null || resourceDefLevel < lastResourceLevel) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_INVALID_SERVICE_RESOURCE_LEVELS; - failures.add(new ValidationFailureDetailsBuilder() - .field("resource level") - .subField(String.valueOf(resourceDefLevel)) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - break; - } else { - lastResourceLevel = resourceDef.getLevel(); - } - } - } - } - // If a resource is not mandatory, then it cannot be non-leaf in any hierarchy (RANGER-2207) - List resources = serviceDef.getResources(); - List resourceNames = new ArrayList<>(resources.size()); - for (RangerResourceDef resourceDef : resources) { - resourceNames.add(resourceDef.getName()); - } - for (String resourceName : resourceNames) { - for (String policyType : RangerPolicy.POLICY_TYPES) { - Set> hierarchies = defHelper.getResourceHierarchies(policyType); - for (List aHierarchy : hierarchies) { - boolean foundOptionalResource = false; - for (RangerResourceDef resourceDef : aHierarchy) { - if (!foundOptionalResource) { - if (resourceDef.getName().equalsIgnoreCase(resourceName) && !Boolean.TRUE.equals(resourceDef.getMandatory())) { - foundOptionalResource = true; - } - } else { - if (Boolean.TRUE.equals(resourceDef.getMandatory())) { - valid = false; - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_INVALID_MANADORY_VALUE_FOR_SERVICE_RESOURCE; - failures.add(new ValidationFailureDetailsBuilder() - .field(resourceDef.getName()) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(resourceDef.getName(), resourceName)) - .build()); - } - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidResourceGraph(%s, %s): %s", serviceDef, failures, valid)); - } - return valid; - } - - boolean isValidEnums(List enumDefs, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidEnums(%s, %s)", enumDefs, failures)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(enumDefs)) { - LOG.debug("enum def collection passed in was null/empty. Ok."); - } else { - Set names = new HashSet<>(); - Set ids = new HashSet<>(); - for (RangerEnumDef enumDef : enumDefs) { - if (enumDef == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_ENUM_DEF_NULL_OBJECT; - failures.add(new ValidationFailureDetailsBuilder() - .field("enum def") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - } else { - // enum-names and ids must non-blank and be unique to a service definition - String enumName = enumDef.getName(); - valid = isUnique(enumName, names, "enum def name", "enum defs", failures) && valid; - valid = isUnique(enumDef.getItemId(), ids, "enum def itemId", "enum defs", failures) && valid; - // enum must contain at least one valid value and those values should be non-blank and distinct - if (CollectionUtils.isEmpty(enumDef.getElements())) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_ENUM_DEF_NO_VALUES; - failures.add(new ValidationFailureDetailsBuilder() - .field("enum values") - .subField(enumName) - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(enumName)) - .build()); - valid = false; - } else { - valid = isValidEnumElements(enumDef.getElements(), failures, enumName) && valid; - // default index should be valid - int defaultIndex = getEnumDefaultIndex(enumDef); - if (defaultIndex < 0 || defaultIndex >= enumDef.getElements().size()) { // max index is one less than the size of the elements list - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_ENUM_DEF_INVALID_DEFAULT_INDEX; - failures.add(new ValidationFailureDetailsBuilder() - .field("enum default index") - .subField(enumName) - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(defaultIndex, enumName)) - .build()); - valid = false; - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidEnums(%s, %s): %s", enumDefs, failures, valid)); - } - return valid; - } - - boolean isValidEnumElements(List enumElementsDefs, List failures, String enumName) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidEnumElements(%s, %s)", enumElementsDefs, failures)); - } - - boolean valid = true; - if (CollectionUtils.isEmpty(enumElementsDefs)) { - LOG.debug("Enum elements list passed in was null/empty!"); - } else { - // enum element names should be valid and distinct - Set elementNames = new HashSet<>(); - Set ids = new HashSet<>(); - for (RangerEnumElementDef elementDef : enumElementsDefs) { - if (elementDef == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_ENUM_DEF_NULL_ENUM_ELEMENT; - failures.add(new ValidationFailureDetailsBuilder() - .field("enum element") - .subField(enumName) - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(enumName)) - .build()); - valid = false; - } else { - valid = isUnique(elementDef.getName(), enumName, elementNames, "enum element name", "enum elements", failures) && valid; - valid = isUnique(elementDef.getItemId(), enumName, ids, "enum element itemId", "enum elements", failures) && valid; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidEnumElements(%s, %s): %s", enumElementsDefs, failures, valid)); - } - return valid; - } - - boolean isValidDataMaskTypes(Long serviceDefId, List dataMaskTypes, List failures, final Action action) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValidDataMaskTypes(%s, %s)", dataMaskTypes, failures)); - } - boolean valid = true; - - if (CollectionUtils.isEmpty(dataMaskTypes)) { - LOG.debug("Configs collection was null/empty! ok"); - } else { - Map existingDataMaskTypeIDNameMap = new HashMap<>(); - if (action == Action.UPDATE) { - List existingDataMaskTypes = this.getServiceDef(serviceDefId).getDataMaskDef().getMaskTypes(); - for (RangerDataMaskTypeDef existingDataMaskType : existingDataMaskTypes) { - existingDataMaskTypeIDNameMap.put(existingDataMaskType.getItemId(), existingDataMaskType.getName()); - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("data mask type names from db = " + existingDataMaskTypeIDNameMap.values()); - } - - Set ids = new HashSet(); - Set names = new HashSet(); - for (RangerDataMaskTypeDef dataMaskType : dataMaskTypes) { - String name = dataMaskType.getName(); - Long itemId = dataMaskType.getItemId(); - valid = isUnique(itemId, ids, "data mask type def itemId", "data mask type defs", failures) && valid; - valid = isUnique(name, names, "data mask type def name", "data mask type defs", failures) && valid; - if (action == Action.UPDATE) { - if (existingDataMaskTypeIDNameMap.get(itemId) != null && !existingDataMaskTypeIDNameMap.get(itemId).equals(name)) { - ValidationErrorCode error; - error = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_SERVICE_DEF_NAME_CONFICT; - failures.add((new ValidationFailureDetailsBuilder()).field("data mask type def name").isSemanticallyIncorrect().errorCode(error.getErrorCode()).becauseOf(String.format("changing %s[%s] in %s is not supported", "data mask type def name", name, "data mask type defs")).build()); - valid = false; - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceDefValidator.isValidDataMaskTypes(%s, %s): %s", dataMaskTypes, failures, valid)); - } - return valid; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceValidator.java deleted file mode 100644 index 5a1e88d54a..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerServiceValidator.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - -import com.google.common.collect.Sets; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; -import org.apache.atlas.plugin.store.ServiceStore; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.regex.Pattern; - -public class RangerServiceValidator extends RangerValidator { - private static final Log LOG = LogFactory.getLog(RangerServiceValidator.class); - - private static final Pattern SERVICE_NAME_VALIDATION_REGEX = Pattern.compile("^[a-zA-Z0-9_-][a-zA-Z0-9_-]{0,254}", Pattern.CASE_INSENSITIVE); - private static final Pattern LEGACY_SERVICE_NAME_VALIDATION_REGEX = Pattern.compile("^[a-zA-Z0-9_-][a-zA-Z0-9\\s_-]{0,254}", Pattern.CASE_INSENSITIVE); - private static final Pattern SERVICE_DISPLAY_NAME_VALIDATION_REGEX = Pattern.compile("^[a-zA-Z0-9_-][a-zA-Z0-9\\s_-]{0,254}", Pattern.CASE_INSENSITIVE); - - public RangerServiceValidator(ServiceStore store) { - super(store); - } - - public void validate(RangerService service, Action action) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceValidator.validate(%s, %s)", service, action)); - } - List failures = new ArrayList<>(); - boolean valid = isValid(service, action, failures); - String message = ""; - try { - if (!valid) { - message = serializeFailures(failures); - throw new Exception(message); - } - } finally { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("<== RangerServiceValidator.validate(%s, %s): %s, reason[%s]", service, action, valid, message)); - } - } - } - - boolean isValid(Long id, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceValidator.isValid(" + id + ")"); - } - - boolean valid = true; - if (action != Action.DELETE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_UNSUPPORTED_ACTION; - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(action)) - .build()); - valid = false; - } else if (id == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_MISSING_FIELD; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(id)) - .build()); - valid = false; - } else if (getService(id) == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("No service found for id[" + id + "]! ok!"); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceValidator.isValid(" + id + "): " + valid); - } - return valid; - } - - boolean isValid(RangerService service, Action action, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceValidator.isValid(" + service + ")"); - } - if (!(action == Action.CREATE || action == Action.UPDATE)) { - throw new IllegalArgumentException("isValid(RangerService, ...) is only supported for CREATE/UPDATE"); - } - boolean valid = true; - if (service == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_NULL_SERVICE_OBJECT; - failures.add(new ValidationFailureDetailsBuilder() - .field("service") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - } else { - Long id = service.getId(); - if (action == Action.UPDATE) { // id is ignored for CREATE - if (id == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_EMPTY_SERVICE_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage()) - .build()); - valid = false; - } else if (getService(id) == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_INVALID_SERVICE_ID; - failures.add(new ValidationFailureDetailsBuilder() - .field("id") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(id)) - .build()); - valid = false; - } - } - String name = service.getName(); - boolean nameSpecified = StringUtils.isNotBlank(name); - RangerServiceDef serviceDef = null; - if (!nameSpecified) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_INVALID_SERVICE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } else { - Pattern serviceNameRegex = SERVICE_NAME_VALIDATION_REGEX; - if (action == Action.UPDATE) { - RangerService rangerService = getService(service.getId()); - if (rangerService != null && StringUtils.isNotBlank(rangerService.getName()) && rangerService.getName().contains(" ")) { - //RANGER-2808 Support for space in services created with space in earlier version - serviceNameRegex = LEGACY_SERVICE_NAME_VALIDATION_REGEX; - } - } - - if(!isValidString(serviceNameRegex, name)){ - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_SPECIAL_CHARACTERS_SERVICE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - }else{ - RangerService otherService = getService(name); - if (otherService != null && action == Action.CREATE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_SERVICE_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("name") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name)) - .build()); - valid = false; - } else if (otherService != null && otherService.getId() !=null && !otherService.getId().equals(id)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_ID_NAME_CONFLICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("id/name") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(name, otherService.getId())) - .build()); - valid = false; - } - } - } - // Display name - String displayName = service.getDisplayName(); - if(!isValidString(SERVICE_DISPLAY_NAME_VALIDATION_REGEX, displayName)){ - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_SPECIAL_CHARACTERS_SERVICE_DISPLAY_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .field("displayName") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName)) - .build()); - valid = false; - }else{ - RangerService otherService = getServiceByDisplayName(displayName); - if (otherService != null && action == Action.CREATE) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_SERVICE_DISPLAY_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("displayName") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName, otherService.getName())) - .build()); - valid = false; - } else if (otherService != null && otherService.getId() !=null && !otherService.getId().equals(id)) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_SERVICE_DISPLAY_NAME_CONFICT; - failures.add(new ValidationFailureDetailsBuilder() - .field("id/displayName") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(displayName, otherService.getName())) - .build()); - valid = false; - } - } - String type = service.getType(); - boolean typeSpecified = StringUtils.isNotBlank(type); - if (!typeSpecified) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_MISSING_SERVICE_DEF; - failures.add(new ValidationFailureDetailsBuilder() - .field("type") - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(type)) - .build()); - valid = false; - } else { - serviceDef = getServiceDef(type); - if (serviceDef == null) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_INVALID_SERVICE_DEF; - failures.add(new ValidationFailureDetailsBuilder() - .field("type") - .isSemanticallyIncorrect() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(type)) - .build()); - valid = false; - } - } - if (nameSpecified && serviceDef != null) { - // check if required parameters were specified - Set reqiredParameters = getRequiredParameters(serviceDef); - Set inputParameters = getServiceConfigParameters(service); - Set missingParameters = Sets.difference(reqiredParameters, inputParameters); - if (!missingParameters.isEmpty()) { - ValidationErrorCode error = ValidationErrorCode.SERVICE_VALIDATION_ERR_REQUIRED_PARM_MISSING; - failures.add(new ValidationFailureDetailsBuilder() - .field("configuration") - .subField(missingParameters.iterator().next()) // we return any one parameter! - .isMissing() - .errorCode(error.getErrorCode()) - .becauseOf(error.getMessage(missingParameters)) - .build()); - valid = false; - } - } - String tagServiceName = service.getTagService(); - - if (StringUtils.isNotBlank(tagServiceName) && StringUtils.equals(type, EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME)) { - failures.add(new ValidationFailureDetailsBuilder() - .field("tag_service") - .isSemanticallyIncorrect() - .becauseOf("tag service cannot be part of any other service") - .build()); - valid = false; - } - - boolean needToEnsureServiceType = false; - if (action == Action.UPDATE) { - RangerService otherService = getService(name); - String otherTagServiceName = otherService == null ? null : otherService.getTagService(); - - if (StringUtils.isNotBlank(tagServiceName)) { - if (!StringUtils.equals(tagServiceName, otherTagServiceName)) { - needToEnsureServiceType = true; - } - } - } else { // action == Action.CREATE - if (StringUtils.isNotBlank(tagServiceName)) { - needToEnsureServiceType = true; - } - } - if (needToEnsureServiceType) { - RangerService maybeTagService = getService(tagServiceName); - if (maybeTagService == null || !StringUtils.equals(maybeTagService.getType(), EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME)) { - failures.add(new ValidationFailureDetailsBuilder() - .field("tag_service") - .isSemanticallyIncorrect() - .becauseOf("tag service name does not refer to existing tag service:" + tagServiceName) - .build()); - valid = false; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceValidator.isValid(" + service + "): " + valid); - } - return valid; - } - - public boolean isValidString(final Pattern pattern, final String name) { - return pattern != null && StringUtils.isNotBlank(name) && pattern.matcher(name).matches(); - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerValidator.java deleted file mode 100644 index b19af0cddd..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/model/validation/RangerValidator.java +++ /dev/null @@ -1,866 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.model.validation; - - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.errors.ValidationErrorCode; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerAccessTypeDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerEnumDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerEnumElementDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerServiceConfigDef; -import org.apache.atlas.plugin.store.RoleStore; -import org.apache.atlas.plugin.store.ServiceStore; -import org.apache.atlas.plugin.util.RangerObjectFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public abstract class RangerValidator { - - private static final Log LOG = LogFactory.getLog(RangerValidator.class); - - RoleStore _roleStore; - ServiceStore _store; - RangerObjectFactory _factory = new RangerObjectFactory(); - - public enum Action { - CREATE, UPDATE, DELETE; - }; - - protected RangerValidator(ServiceStore store) { - if (store == null) { - throw new IllegalArgumentException("ServiceValidator(): store is null!"); - } - _store = store; - } - - protected RangerValidator(RoleStore roleStore) { - if (roleStore == null) { - throw new IllegalArgumentException("ServiceValidator(): store is null!"); - } - _roleStore = roleStore; - } - - public void validate(Long id, Action action) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.validate(" + id + ")"); - } - - List failures = new ArrayList<>(); - if (isValid(id, action, failures)) { - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.validate(" + id + "): valid"); - } - } else { - String message = serializeFailures(failures); - LOG.debug("<== RangerValidator.validate(" + id + "): invalid, reason[" + message + "]"); - throw new Exception(message); - } - } - public void validate(String name, Action action) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.validate(" + name + ")"); - } - - List failures = new ArrayList<>(); - if (isValid(name, action, failures)) { - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.validate(" + name + "): valid"); - } - } else { - String message = serializeFailures(failures); - LOG.debug("<== RangerValidator.validate(" + name + "): invalid, reason[" + message + "]"); - throw new Exception(message); - } - } - - /** - * This method is expected to be overridden by sub-classes. Default implementation provided to not burden implementers from having to implement methods that they know would never be called. - * @param id - * @param action - * @param failures - * @return - */ - boolean isValid(Long id, Action action, List failures) { - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .becauseOf("unimplemented method called") - .build()); - return false; - } - - boolean isValid(String name, Action action, List failures) { - failures.add(new ValidationFailureDetailsBuilder() - .isAnInternalError() - .becauseOf("unimplemented method called") - .build()); - return false; - } - - public static String serializeFailures(List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getFailureMessage()"); - } - - String message = null; - if (CollectionUtils.isEmpty(failures)) { - LOG.warn("serializeFailures: called while list of failures is null/empty!"); - } else { - StringBuilder builder = new StringBuilder(); - for (int i = 0; i < failures.size(); i++) { - builder.append(String.format("(%d)", i)); - builder.append(failures.get(i)); - builder.append(" "); - } - message = builder.toString(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.serializeFailures(): " + message); - } - return message; - } - - Set getServiceConfigParameters(RangerService service) { - if (service == null || service.getConfigs() == null) { - return new HashSet<>(); - } else { - return service.getConfigs().keySet(); - } - } - - Set getRequiredParameters(RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getRequiredParameters(" + serviceDef + ")"); - } - - Set result; - if (serviceDef == null) { - result = Collections.emptySet(); - } else { - List configs = serviceDef.getConfigs(); - if (CollectionUtils.isEmpty(configs)) { - result = Collections.emptySet(); - } else { - result = new HashSet(configs.size()); // at worse all of the config items are required! - for (RangerServiceConfigDef configDef : configs) { - if (configDef.getMandatory()) { - result.add(configDef.getName()); - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getRequiredParameters(" + serviceDef + "): " + result); - } - return result; - } - - RangerServiceDef getServiceDef(Long id) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getServiceDef(" + id + ")"); - } - RangerServiceDef result = null; - try { - result = _store.getServiceDef(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving service def from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getServiceDef(" + id + "): " + result); - } - return result; - } - - RangerServiceDef getServiceDef(String type) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getServiceDef(" + type + ")"); - } - RangerServiceDef result = null; - try { - result = _store.getServiceDefByName(type); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving service definition from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getServiceDef(" + type + "): " + result); - } - return result; - } - - /** - * @param displayName - * @return {@link RangerServiceDef} - service using display name if present, null otherwise. - */ - RangerServiceDef getServiceDefByDisplayName(final String displayName) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getServiceDefByDisplayName(" + displayName + ")"); - } - RangerServiceDef result = null; - try { - result = _store.getServiceDefByDisplayName(displayName); - } catch (Exception e) { - LOG.debug("Encountered exception while retrieving service definition from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getServiceDefByDisplayName(" + displayName + "): " + result); - } - return result; - } - - RangerService getService(Long id) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getService(" + id + ")"); - } - RangerService result = null; - try { - result = _store.getService(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving service from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getService(" + id + "): " + result); - } - return result; - } - - RangerService getService(String name) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getService(" + name + ")"); - } - RangerService result = null; - try { - result = _store.getServiceByName(name); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving service from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getService(" + name + "): " + result); - } - return result; - } - - RangerService getServiceByDisplayName(final String displayName) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getService(" + displayName + ")"); - } - RangerService result = null; - try { - result = _store.getServiceByDisplayName(displayName); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving service from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getService(" + displayName + "): " + result); - } - return result; - } - - boolean policyExists(Long id) { - try { - return _store.policyExists(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving policy from service store!", e); - return false; - } - } - - RangerPolicy getPolicy(Long id) { - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getPolicy(" + id + ")"); - } - RangerPolicy result = null; - try { - result = _store.getPolicy(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving policy from service store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getPolicy(" + id + "): " + result); - } - return result; - } - - Long getPolicyId(final Long serviceId, final String policyName, final Long zoneId) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getPolicyId(" + serviceId + ", " + policyName + ", " + zoneId + ")"); - } - - Long policyId = _store.getPolicyId(serviceId, policyName, zoneId); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getPolicyId(" + serviceId + ", " + policyName + ", " + zoneId + "): policy-id[" + policyId + "]"); - } - return policyId; - } - - List getPoliciesForResourceSignature(String serviceName, String policySignature) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerValidator.getPoliciesForResourceSignature(%s, %s)", serviceName, policySignature)); - } - - List policies = null; - try { - policies = _store.getPoliciesByResourceSignature(serviceName, policySignature, true); // only look for enabled policies - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving policies from service store!", e); - } - - if(LOG.isDebugEnabled()) { - int count = policies == null ? 0 : policies.size(); - LOG.debug(String.format("<== RangerValidator.getPoliciesForResourceSignature(%s, %s): count[%d], %s", serviceName, policySignature, count, policies)); - } - return policies; - } - - RangerSecurityZone getSecurityZone(Long id) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getSecurityZone(" + id + ")"); - } - RangerSecurityZone result = null; - - if (id != null) { - try { - result = _store.getSecurityZone(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving security zone from service store!", e); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getSecurityZone(" + id + "): " + result); - } - return result; - } - - RangerSecurityZone getSecurityZone(String name) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getSecurityZone(" + name + ")"); - } - RangerSecurityZone result = null; - - if (StringUtils.isNotEmpty(name)) { - try { - result = _store.getSecurityZone(name); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving security zone from service store!", e); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getSecurityZone(" + name + "): " + result); - } - return result; - } - Set getAccessTypes(RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getAccessTypes(" + serviceDef + ")"); - } - - Set accessTypes = new HashSet<>(); - if (serviceDef == null) { - LOG.warn("serviceDef passed in was null!"); - } else if (CollectionUtils.isEmpty(serviceDef.getAccessTypes())) { - LOG.warn("AccessTypeDef collection on serviceDef was null!"); - } else { - for (RangerAccessTypeDef accessTypeDef : serviceDef.getAccessTypes()) { - if (accessTypeDef == null) { - LOG.warn("Access type def was null!"); - } else { - String accessType = accessTypeDef.getName(); - if (StringUtils.isBlank(accessType)) { - LOG.warn("Access type def name was null/empty/blank!"); - } else { - accessTypes.add(accessType); - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getAccessTypes(" + serviceDef + "): " + accessTypes); - } - return accessTypes; - } - - /** - * This function exists to encapsulates the current behavior of code which treats and unspecified audit preference to mean audit is enabled. - * @param policy - * @return - */ - boolean getIsAuditEnabled(RangerPolicy policy) { - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getIsAuditEnabled(" + policy + ")"); - } - - boolean isEnabled = false; - if (policy == null) { - LOG.warn("policy was null!"); - } else if (policy.getIsAuditEnabled() == null) { - isEnabled = true; - } else { - isEnabled = policy.getIsAuditEnabled(); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getIsAuditEnabled(" + policy + "): " + isEnabled); - } - return isEnabled; - } - - /** - * Returns names of resource types set to lower-case to allow for case-insensitive comparison. - * @param serviceDef - * @return - */ - Set getMandatoryResourceNames(RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getMandatoryResourceNames(" + serviceDef + ")"); - } - - Set resourceNames = new HashSet<>(); - if (serviceDef == null) { - LOG.warn("serviceDef passed in was null!"); - } else if (CollectionUtils.isEmpty(serviceDef.getResources())) { - LOG.warn("ResourceDef collection on serviceDef was null!"); - } else { - for (RangerResourceDef resourceTypeDef : serviceDef.getResources()) { - if (resourceTypeDef == null) { - LOG.warn("resource type def was null!"); - } else { - Boolean mandatory = resourceTypeDef.getMandatory(); - if (mandatory != null && mandatory == true) { - String resourceName = resourceTypeDef.getName(); - if (StringUtils.isBlank(resourceName)) { - LOG.warn("Resource def name was null/empty/blank!"); - } else { - resourceNames.add(resourceName.toLowerCase()); - } - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getMandatoryResourceNames(" + serviceDef + "): " + resourceNames); - } - return resourceNames; - } - - Set getAllResourceNames(RangerServiceDef serviceDef) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getAllResourceNames(" + serviceDef + ")"); - } - - Set resourceNames = new HashSet<>(); - if (serviceDef == null) { - LOG.warn("serviceDef passed in was null!"); - } else if (CollectionUtils.isEmpty(serviceDef.getResources())) { - LOG.warn("ResourceDef collection on serviceDef was null!"); - } else { - for (RangerResourceDef resourceTypeDef : serviceDef.getResources()) { - if (resourceTypeDef == null) { - LOG.warn("resource type def was null!"); - } else { - String resourceName = resourceTypeDef.getName(); - if (StringUtils.isBlank(resourceName)) { - LOG.warn("Resource def name was null/empty/blank!"); - } else { - resourceNames.add(resourceName.toLowerCase()); - } - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getAllResourceNames(" + serviceDef + "): " + resourceNames); - } - return resourceNames; - } - - /** - * Converts, in place, the resources defined in the policy to have lower-case resource-def-names - * @param policy - * @return - */ - - void convertPolicyResourceNamesToLower(RangerPolicy policy) { - Map lowerCasePolicyResources = new HashMap<>(); - if (policy.getResources() != null) { - for (Map.Entry entry : policy.getResources().entrySet()) { - String lowerCasekey = entry.getKey().toLowerCase(); - lowerCasePolicyResources.put(lowerCasekey, entry.getValue()); - } - } - policy.setResources(lowerCasePolicyResources); - } - - Map getValidationRegExes(RangerServiceDef serviceDef) { - if (serviceDef == null || CollectionUtils.isEmpty(serviceDef.getResources())) { - return new HashMap<>(); - } else { - Map result = new HashMap<>(); - for (RangerResourceDef resourceDef : serviceDef.getResources()) { - if (resourceDef == null) { - LOG.warn("A resource def in resource def collection is null"); - } else { - String name = resourceDef.getName(); - String regEx = resourceDef.getValidationRegEx(); - if (StringUtils.isBlank(name)) { - LOG.warn("resource name is null/empty/blank"); - } else if (StringUtils.isBlank(regEx)) { - LOG.debug("validation regex is null/empty/blank"); - } else { - result.put(name, regEx); - } - } - } - return result; - } - } - - int getEnumDefaultIndex(RangerEnumDef enumDef) { - int index; - if (enumDef == null) { - index = -1; - } else if (enumDef.getDefaultIndex() == null) { - index = 0; - } else { - index = enumDef.getDefaultIndex(); - } - return index; - } - - Collection getImpliedGrants(RangerAccessTypeDef def) { - if (def == null) { - return null; - } else if (CollectionUtils.isEmpty(def.getImpliedGrants())) { - return new ArrayList<>(); - } else { - List result = new ArrayList(def.getImpliedGrants().size()); - for (String name : def.getImpliedGrants()) { - if (StringUtils.isBlank(name)) { - result.add(name); // could be null! - } else { - result.add(name.toLowerCase()); - } - } - return result; - } - } - - /** - * Returns a copy of the policy resource map where all keys (resource-names) are lowercase - * @param input - * @return - */ - Map getPolicyResourceWithLowerCaseKeys(Map input) { - if (input == null) { - return null; - } - Map output = new HashMap(input.size()); - for (Map.Entry entry : input.entrySet()) { - output.put(entry.getKey().toLowerCase(), entry.getValue()); - } - return output; - } - - boolean isUnique(Long value, Set alreadySeen, String valueName, String collectionName, List failures) { - return isUnique(value, null, alreadySeen, valueName, collectionName, failures); - } - - /** - * NOTE: alreadySeen collection passed in gets updated. - * @param value - * @param alreadySeen - * @param valueName - use-friendly name of the value that would be used when generating failure message - * @param collectionName - use-friendly name of the value collection that would be used when generating failure message - * @param failures - * @return - */ - boolean isUnique(Long value, String valueContext, Set alreadySeen, String valueName, String collectionName, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s)", value, alreadySeen, valueName, collectionName, failures)); - } - boolean valid = true; - - if (value == null) { // null/empty/blank value is an error - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .subField(valueContext) - .isMissing() - .becauseOf(String.format("%s[%s] is null/empty", valueName, value)) - .build()); - valid = false; - } else if (alreadySeen.contains(value)) { // it shouldn't have been seen already - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .subField(value.toString()) - .isSemanticallyIncorrect() - .becauseOf(String.format("duplicate %s[%s] in %s", valueName, value, collectionName)) - .build()); - valid = false; - } else { - alreadySeen.add(value); // we have a new unique access type - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s): %s", value, alreadySeen, valueName, collectionName, failures, valid)); - } - return valid; - } - - /** - * NOTE: alreadySeen collection passed in gets updated. - * @param value - * @param alreadySeen - * @param valueName - use-friendly name of the value that would be used when generating failure message - * @param collectionName - use-friendly name of the value collection that would be used when generating failure message - * @param failures - * @return - */ - boolean isUnique(Integer value, Set alreadySeen, String valueName, String collectionName, List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s)", value, alreadySeen, valueName, collectionName, failures)); - } - boolean valid = true; - - if (value == null) { // null/empty/blank value is an error - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .isMissing() - .becauseOf(String.format("%s[%s] is null/empty", valueName, value)) - .build()); - valid = false; - } else if (alreadySeen.contains(value)) { // it shouldn't have been seen already - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .subField(value.toString()) - .isSemanticallyIncorrect() - .becauseOf(String.format("duplicate %s[%s] in %s", valueName, value, collectionName)) - .build()); - valid = false; - } else { - alreadySeen.add(value); // we have a new unique access type - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s): %s", value, alreadySeen, valueName, collectionName, failures, valid)); - } - return valid; - } - - /* - * Important: Resource-names are required to be lowercase. This is used in validating policy create/update operations. - * Ref: RANGER-2272 - */ - boolean isValidResourceName(final String value, final String valueContext, final List failures) { - boolean ret = true; - - if (value != null && !StringUtils.isEmpty(value)) { - int sz = value.length(); - - for(int i = 0; i < sz; ++i) { - char c = value.charAt(i); - if (!(Character.isLowerCase(c) || c == '-' || c == '_')) { // Allow only lowercase, hyphen or underscore characters - ret = false; - break; - } - } - } else { - ret = false; - } - if (!ret) { - ValidationErrorCode errorCode = ValidationErrorCode.SERVICE_DEF_VALIDATION_ERR_NOT_LOWERCASE_NAME; - failures.add(new ValidationFailureDetailsBuilder() - .errorCode(errorCode.getErrorCode()) - .field(value) - .becauseOf(errorCode.getMessage(valueContext, value)) - .build()); - } - return ret; - } - - boolean isUnique(final String value, final Set alreadySeen, final String valueName, final String collectionName, final List failures) { - return isUnique(value, null, alreadySeen, valueName, collectionName, failures); - } - /** - * NOTE: alreadySeen collection passed in gets updated. - * @param value - * @param alreadySeen - * @param valueName - use-friendly name of the value that would be used when generating failure message - * @param collectionName - use-friendly name of the value collection that would be used when generating failure message - * @param failures - * @return - */ - boolean isUnique(final String value, final String valueContext, final Set alreadySeen, final String valueName, final String collectionName, final List failures) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s)", value, alreadySeen, valueName, collectionName, failures)); - } - boolean valid = true; - - if (StringUtils.isBlank(value)) { // null/empty/blank value is an error - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .subField(valueContext) - .isMissing() - .becauseOf(String.format("%s[%s] is null/empty", valueName, value)) - .build()); - valid = false; - } else if (alreadySeen.contains(value.toLowerCase())) { // it shouldn't have been seen already - failures.add(new ValidationFailureDetailsBuilder() - .field(valueName) - .subField(value) - .isSemanticallyIncorrect() - .becauseOf(String.format("duplicate %s[%s] in %s", valueName, value, collectionName)) - .build()); - valid = false; - } else { - alreadySeen.add(value.toLowerCase()); // we have a new unique access type - } - - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("==> RangerServiceDefValidator.isValueUnique(%s, %s, %s, %s, %s): %s", value, alreadySeen, valueName, collectionName, failures, valid)); - } - return valid; - } - - Map getEnumDefMap(List enumDefs) { - Map result = new HashMap<>(); - if (enumDefs != null) { - for (RangerEnumDef enumDef : enumDefs) { - result.put(enumDef.getName(), enumDef); - } - } - return result; - } - - Set getEnumValues(RangerEnumDef enumDef) { - Set result = new HashSet<>(); - if (enumDef != null) { - for (RangerEnumElementDef element : enumDef.getElements()) { - result.add(element.getName()); - } - } - return result; - } - - static Map createMap(int[][] data) { - Map result = new HashMap<>(); - if (data != null) { - for (int[] row : data) { - Integer key = row[0]; - Integer value = row[1]; - if (result.containsKey(key)) { - LOG.warn("createMap: Internal error: duplicate key: multiple rows found for [" + key + "]. Skipped"); - } else { - result.put(key, value); - } - } - } - return result; - } - - static Map createMap(Object[][] data) { - Map result = new HashMap<>(); - if (data != null) { - for (Object[] row : data) { - Integer key = (Integer)row[0]; - String value = (String)row[1]; - if (key == null) { - LOG.warn("createMap: error converting key[" + row[0] + "] to Integer! Sipped!"); - } else if (StringUtils.isEmpty(value)) { - LOG.warn("createMap: empty/null value. Skipped!"); - } else if (result.containsKey(key)) { - LOG.warn("createMap: Internal error: duplicate key. Multiple rows found for [" + key + "]. Skipped"); - } else { - result.put(key, value); - } - } - } - return result; - } - - RangerRole getRangerRole(Long id) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerValidator.getRangerRole(" + id + ")"); - } - RangerRole result = null; - try { - result = _roleStore.getRole(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving RangerRole from RoleStore store!", e); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerValidator.getRangerRole(" + id + "): " + result); - } - return result; - } - - boolean roleExists(Long id) { - try { - return _roleStore.roleExists(id); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving RangerRole from role store!", e); - return false; - } - } - - boolean roleExists(String name) { - try { - return _roleStore.roleExists(name); - } catch (Exception e) { - LOG.debug("Encountred exception while retrieving RangerRole from role store!", e); - return false; - } - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/PolicyEngine.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/PolicyEngine.java index 6cd52d2692..e35bac61e4 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/PolicyEngine.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/PolicyEngine.java @@ -35,7 +35,7 @@ import org.apache.atlas.plugin.policyresourcematcher.RangerPolicyResourceMatcher; import org.apache.atlas.plugin.resourcematcher.RangerAbstractResourceMatcher; import org.apache.atlas.plugin.service.RangerAuthContext; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; +import org.apache.atlas.plugin.store.ServiceDefsUtil; import org.apache.atlas.plugin.util.RangerPerfTracer; import org.apache.atlas.plugin.util.RangerPolicyDeltaUtil; import org.apache.atlas.plugin.util.RangerReadWriteLock; @@ -704,7 +704,7 @@ private void buildZoneTrie(ServicePolicies servicePolicies) { List resourceValues = entry.getValue(); RangerPolicy.RangerPolicyResource policyResource = new RangerPolicy.RangerPolicyResource(); policyResource.setIsExcludes(false); - policyResource.setIsRecursive(EmbeddedServiceDefsUtil.isRecursiveEnabled(serviceDef, resourceDefName)); + policyResource.setIsRecursive(ServiceDefsUtil.isRecursiveEnabled(serviceDef, resourceDefName)); policyResource.setValues(resourceValues); policyResources.put(resourceDefName, policyResource); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPluginContext.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPluginContext.java index 9abcb8bd95..aed2c76875 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPluginContext.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPluginContext.java @@ -24,8 +24,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.admin.client.RangerAdminRESTClient; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.service.RangerAuthContext; import org.apache.atlas.plugin.service.RangerAuthContextListener; @@ -85,7 +84,7 @@ private AtlasAuthAdminClient initAtlasAuthAdminClient() { if(StringUtils.isEmpty(policySourceImpl)) { if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Value for property[%s] was null or empty. Unexpected! Will use policy source of type[%s]", propertyName, RangerAdminRESTClient.class.getName())); + LOG.error("Value for property[%s] was null or empty"); } } else { if (LOG.isDebugEnabled()) { @@ -98,7 +97,7 @@ private AtlasAuthAdminClient initAtlasAuthAdminClient() { ret = adminClass.newInstance(); } catch (Exception excp) { - LOG.error("failed to instantiate policy source of type '" + policySourceImpl + "'. Will use policy source of type '" + RangerAdminRESTClient.class.getName() + "'", excp); + LOG.error("failed to instantiate policy source of type " + policySourceImpl ); } } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java index 90d18bf752..02e845a6fb 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyEngineImpl.java @@ -26,8 +26,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.authorization.utils.StringUtil; +import org.apache.atlas.authorization.config.RangerPluginConfig; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.atlas.plugin.contextenricher.RangerTagForEval; import org.apache.atlas.plugin.model.RangerPolicy; import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItemDataMaskInfo; @@ -1299,12 +1299,12 @@ private static class ServiceConfig { public ServiceConfig(Map svcConfig) { if (svcConfig != null) { - auditExcludedUsers = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_USERS)); - auditExcludedGroups = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_GROUPS)); - auditExcludedRoles = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_ROLES)); - superUsers = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SUPER_USERS)); - superGroups = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SUPER_GROUPS)); - serviceAdmins = StringUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SERVICE_ADMINS)); + auditExcludedUsers = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_USERS)); + auditExcludedGroups = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_GROUPS)); + auditExcludedRoles = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_AUDIT_EXCLUDE_ROLES)); + superUsers = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SUPER_USERS)); + superGroups = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SUPER_GROUPS)); + serviceAdmins = RangerUtil.toSet(svcConfig.get(RangerPolicyEngine.PLUGIN_SERVICE_ADMINS)); } else { auditExcludedUsers = Collections.emptySet(); auditExcludedGroups = Collections.emptySet(); diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyRepository.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyRepository.java index 5b3b469fb0..f8e7ed0f6f 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyRepository.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/policyengine/RangerPolicyRepository.java @@ -19,12 +19,13 @@ package org.apache.atlas.plugin.policyengine; +import org.apache.atlas.authorization.utils.RangerAtlasConstants; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.utils.JsonUtils; import org.apache.atlas.plugin.contextenricher.RangerAbstractContextEnricher; import org.apache.atlas.plugin.contextenricher.RangerContextEnricher; import org.apache.atlas.plugin.contextenricher.RangerTagEnricher; @@ -40,7 +41,6 @@ import org.apache.atlas.plugin.policyevaluator.RangerCachedPolicyEvaluator; import org.apache.atlas.plugin.policyevaluator.RangerOptimizedPolicyEvaluator; import org.apache.atlas.plugin.policyevaluator.RangerPolicyEvaluator; -import org.apache.atlas.plugin.store.AbstractServiceStore; import org.apache.atlas.plugin.util.RangerPerfTracer; import org.apache.atlas.plugin.util.ServiceDefUtil; import org.apache.atlas.plugin.util.ServicePolicies; @@ -308,7 +308,7 @@ private List buildAuditPolicyEvaluators(Map auditFilters = JsonUtils.jsonToAuditFilterList(jsonStr); + List auditFilters = RangerUtil.jsonToAuditFilterList(jsonStr); int filterCount = auditFilters != null ? auditFilters.size() : 0; if (filterCount > 0) { @@ -822,7 +822,7 @@ private boolean isPolicyNeedsPruning(RangerPolicy policy, final String component private List normalizeAndPrunePolicyItems(List policyItems, final String componentType) { if(CollectionUtils.isNotEmpty(policyItems)) { - final String prefix = componentType + AbstractServiceStore.COMPONENT_ACCESSTYPE_SEPARATOR; + final String prefix = componentType + RangerAtlasConstants.COMPONENT_ACCESSTYPE_SEPARATOR; List itemsToPrune = null; for (RangerPolicy.RangerPolicyItem policyItem : policyItems) { @@ -838,7 +838,7 @@ private List normalizeAndPrunePolicyIte String newAccessType = StringUtils.removeStart(accessType, prefix); access.setType(newAccessType); - } else if (accessType.contains(AbstractServiceStore.COMPONENT_ACCESSTYPE_SEPARATOR)) { + } else if (accessType.contains(RangerAtlasConstants.COMPONENT_ACCESSTYPE_SEPARATOR)) { if(accessesToPrune == null) { accessesToPrune = new ArrayList<>(); } @@ -868,7 +868,7 @@ private List normalizeAndPrunePolicyIte if (StringUtils.startsWith(maskType, prefix)) { dataMaskInfo.setDataMaskType(StringUtils.removeStart(maskType, prefix)); - } else if (maskType.contains(AbstractServiceStore.COMPONENT_ACCESSTYPE_SEPARATOR)) { + } else if (maskType.contains(RangerAtlasConstants.COMPONENT_ACCESSTYPE_SEPARATOR)) { if (itemsToPrune == null) { itemsToPrune = new ArrayList<>(); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java index b224cccc7e..3a3a252cca 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBasePlugin.java @@ -25,39 +25,30 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.admin.client.RangerAdminClient; -import org.apache.atlas.admin.client.RangerAdminRESTClient; import org.apache.atlas.audit.provider.AuditHandler; import org.apache.atlas.audit.provider.AuditProviderFactory; -import org.apache.atlas.audit.provider.StandAloneAuditProviderFactory; -import org.apache.atlas.authorization.hadoop.config.RangerAuditConfig; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; -import org.apache.atlas.authorization.utils.StringUtil; +import org.apache.atlas.authorization.config.RangerPluginConfig; +import org.apache.atlas.authorization.utils.RangerUtil; import org.apache.atlas.plugin.conditionevaluator.RangerScriptExecutionContext; import org.apache.atlas.plugin.contextenricher.RangerContextEnricher; import org.apache.atlas.plugin.contextenricher.RangerTagEnricher; import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerRole; import org.apache.atlas.plugin.model.RangerServiceDef; import org.apache.atlas.plugin.policyengine.RangerAccessRequest; -import org.apache.atlas.plugin.policyengine.RangerAccessRequestImpl; -import org.apache.atlas.plugin.policyengine.RangerAccessResourceImpl; import org.apache.atlas.plugin.policyengine.RangerAccessResult; import org.apache.atlas.plugin.policyengine.RangerAccessResultProcessor; import org.apache.atlas.plugin.policyengine.RangerPluginContext; import org.apache.atlas.plugin.policyengine.RangerPolicyEngine; import org.apache.atlas.plugin.policyengine.RangerPolicyEngineImpl; import org.apache.atlas.plugin.policyengine.RangerResourceACLs; -import org.apache.atlas.plugin.policyengine.RangerResourceAccessInfo; import org.apache.atlas.plugin.policyevaluator.RangerPolicyEvaluator; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; +import org.apache.atlas.plugin.store.ServiceDefsUtil; import org.apache.atlas.plugin.util.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; @@ -72,7 +63,6 @@ public class RangerBasePlugin { private final RangerPluginContext pluginContext; private final Map logHistoryList = new Hashtable<>(); private final int logInterval = 30000; // 30 seconds - private final DownloadTrigger accessTrigger = new DownloadTrigger(); private PolicyRefresher refresher; private RangerPolicyEngine policyEngine; private RangerAuthContext currentAuthContext; @@ -117,25 +107,6 @@ public RangerBasePlugin(RangerPluginConfig pluginConfig) { this.chainedPlugins = initChainedPlugins(); } - public RangerBasePlugin(RangerPluginConfig pluginConfig, ServicePolicies policies, ServiceTags tags, RangerRoles roles) { - this(pluginConfig); - - init(); - - setPolicies(policies); - setRoles(roles); - - if (tags != null) { - RangerTagEnricher tagEnricher = getTagEnricher(); - - if (tagEnricher != null) { - tagEnricher.setServiceTags(tags); - } else { - LOG.warn("RangerBasePlugin(tagsVersion=" + tags.getTagVersion() + "): no tag enricher found. Plugin will not enforce tag-based policies"); - } - } - } - public static AuditHandler getAuditProvider(String serviceName) { AuditProviderFactory providerFactory = RangerBasePlugin.getAuditProviderFactory(serviceName); AuditHandler ret = providerFactory.getAuditProvider(); @@ -234,8 +205,6 @@ public String getServiceName() { return pluginConfig.getServiceName(); } - public AuditProviderFactory getAuditProviderFactory() { return RangerBasePlugin.getAuditProviderFactory(getServiceName()); } - public AtlasTypeRegistry getTypeRegistry() { return typeRegistry; } @@ -456,10 +425,6 @@ public void setResultProcessor(RangerAccessResultProcessor resultProcessor) { this.resultProcessor = resultProcessor; } - public RangerAccessResultProcessor getResultProcessor() { - return this.resultProcessor; - } - public RangerAccessResult isAccessAllowed(RangerAccessRequest request) { return isAccessAllowed(request, resultProcessor); } @@ -576,64 +541,6 @@ public RangerAccessResult evalRowFilterPolicies(RangerAccessRequest request, Ran return ret; } - public void evalAuditPolicies(RangerAccessResult result) { - RangerPolicyEngine policyEngine = this.policyEngine; - - if (policyEngine != null) { - policyEngine.evaluateAuditPolicies(result); - } - } - - public RangerResourceAccessInfo getResourceAccessInfo(RangerAccessRequest request) { - RangerPolicyEngine policyEngine = this.policyEngine; - - if(policyEngine != null) { - return policyEngine.getResourceAccessInfo(request); - } - - return null; - } - - public RangerResourceACLs getResourceACLs(RangerAccessRequest request) { - return getResourceACLs(request, null); - } - - public RangerResourceACLs getResourceACLs(RangerAccessRequest request, String policyType) { - RangerResourceACLs ret = null; - RangerPolicyEngine policyEngine = this.policyEngine; - - if(policyEngine != null) { - ret = policyEngine.getResourceACLs(request, policyType); - } - - for (RangerChainedPlugin chainedPlugin : chainedPlugins) { - RangerResourceACLs chainedResourceACLs = chainedPlugin.getResourceACLs(request, policyType); - - if (chainedResourceACLs != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Chained-plugin returned non-null ACLs!!"); - } - if (chainedPlugin.isAuthorizeOnlyWithChainedPlugin()) { - if (LOG.isDebugEnabled()) { - LOG.debug("Chained-plugin is configured to ignore Base-plugin's ACLs"); - } - ret = chainedResourceACLs; - break; - } else { - if (ret != null) { - ret = getMergedResourceACLs(ret, chainedResourceACLs); - } - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Chained-plugin returned null ACLs!!"); - } - } - } - - return ret; - } - public Set getRolesFromUserAndGroups(String user, Set groups) { RangerPolicyEngine policyEngine = this.policyEngine; @@ -644,331 +551,12 @@ public Set getRolesFromUserAndGroups(String user, Set groups) { return null; } - public RangerRoles getRangerRoles() { - RangerPolicyEngine policyEngine = this.policyEngine; - - if(policyEngine != null) { - return policyEngine.getRangerRoles(); - } - - return null; - } - - public Set getRangerRoleForPrincipal(String principal, String type) { - Set ret = new HashSet<>(); - Set rangerRoles = null; - Map> roleMapping = null; - RangerRoles roles = getRangerRoles(); - if (roles != null) { - rangerRoles = roles.getRangerRoles(); - } - - if (rangerRoles != null) { - RangerPluginContext rangerPluginContext = policyEngine.getPluginContext(); - if (rangerPluginContext != null) { - RangerAuthContext rangerAuthContext = rangerPluginContext.getAuthContext(); - if (rangerAuthContext != null) { - RangerRolesUtil rangerRolesUtil = rangerAuthContext.getRangerRolesUtil(); - if (rangerRolesUtil != null) { - switch (type) { - case "USER": - roleMapping = rangerRolesUtil.getUserRoleMapping(); - break; - case "GROUP": - roleMapping = rangerRolesUtil.getGroupRoleMapping(); - break; - case "ROLE": - roleMapping = rangerRolesUtil.getRoleRoleMapping(); - break; - } - } - } - } - if (roleMapping != null) { - Set principalRoles = roleMapping.get(principal); - if (CollectionUtils.isNotEmpty(principalRoles)) { - for (String role : principalRoles) { - for (RangerRole rangerRole : rangerRoles) { - if (rangerRole.getName().equals(role)) { - ret.add(rangerRole); - } - } - } - } - } - } - return ret; - } - - public boolean isServiceAdmin(String userName) { - boolean ret = false; - - RangerPolicyEngine policyEngine = this.policyEngine; - - if(policyEngine != null) { - RangerPolicyEngineImpl rangerPolicyEngine = (RangerPolicyEngineImpl) policyEngine; - ret = rangerPolicyEngine.isServiceAdmin(userName); - } - - return ret; - } - - public RangerRole createRole(RangerRole request, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.createRole(" + request + ")"); - } - - RangerRole ret = getAdminClient().createRole(request); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.createRole(" + request + ")"); - } - return ret; - } - - public void dropRole(String execUser, String roleName, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.dropRole(" + roleName + ")"); - } - - getAdminClient().dropRole(execUser, roleName); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.dropRole(" + roleName + ")"); - } - } - - public List getUserRoles(String execUser, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.getUserRoleNames(" + execUser + ")"); - } - - final List ret = getAdminClient().getUserRoles(execUser); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.getUserRoleNames(" + execUser + ")"); - } - return ret; - } - - public List getAllRoles(String execUser, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.getAllRoles()"); - } - - final List ret = getAdminClient().getAllRoles(execUser); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.getAllRoles()"); - } - return ret; - } - - public RangerRole getRole(String execUser, String roleName, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.getPrincipalsForRole(" + roleName + ")"); - } - - final RangerRole ret = getAdminClient().getRole(execUser, roleName); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.getPrincipalsForRole(" + roleName + ")"); - } - return ret; - } - - public void grantRole(GrantRevokeRoleRequest request, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.grantRole(" + request + ")"); - } - - getAdminClient().grantRole(request); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.grantRole(" + request + ")"); - } - } - - public void revokeRole(GrantRevokeRoleRequest request, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.revokeRole(" + request + ")"); - } - - getAdminClient().revokeRole(request); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.revokeRole(" + request + ")"); - } - } - - public void grantAccess(GrantRevokeRequest request, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.grantAccess(" + request + ")"); - } - - boolean isSuccess = false; - - try { - RangerPolicyEngine policyEngine = this.policyEngine; - - if (policyEngine != null) { - request.setZoneName(policyEngine.getUniquelyMatchedZoneName(request)); - } - - getAdminClient().grantAccess(request); - - isSuccess = true; - } finally { - auditGrantRevoke(request, "grant", isSuccess, resultProcessor); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.grantAccess(" + request + ")"); - } - } - - public void revokeAccess(GrantRevokeRequest request, RangerAccessResultProcessor resultProcessor) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.revokeAccess(" + request + ")"); - } - - boolean isSuccess = false; - - try { - RangerPolicyEngine policyEngine = this.policyEngine; - - if (policyEngine != null) { - request.setZoneName(policyEngine.getUniquelyMatchedZoneName(request)); - } - - getAdminClient().revokeAccess(request); - - isSuccess = true; - } finally { - auditGrantRevoke(request, "revoke", isSuccess, resultProcessor); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.revokeAccess(" + request + ")"); - } - } - - public void registerAuthContextEventListener(RangerAuthContextListener authContextListener) { - this.pluginContext.setAuthContextListener(authContextListener); - } - - public static RangerAdminClient createAdminClient(RangerPluginConfig pluginConfig) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.createAdminClient(" + pluginConfig.getServiceName() + ", " + pluginConfig.getAppId() + ", " + pluginConfig.getPropertyPrefix() + ")"); - } - - RangerAdminClient ret = null; - String propertyName = pluginConfig.getPropertyPrefix() + ".policy.source.impl"; - String policySourceImpl = pluginConfig.get(propertyName); - - if(StringUtils.isEmpty(policySourceImpl)) { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Value for property[%s] was null or empty. Unexpected! Will use policy source of type[%s]", propertyName, RangerAdminRESTClient.class.getName())); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Value for property[%s] was [%s].", propertyName, policySourceImpl)); - } - - try { - @SuppressWarnings("unchecked") - Class adminClass = (Class)Class.forName(policySourceImpl); - - ret = adminClass.newInstance(); - } catch (Exception excp) { - LOG.error("failed to instantiate policy source of type '" + policySourceImpl + "'. Will use policy source of type '" + RangerAdminRESTClient.class.getName() + "'", excp); - } - } - - if(ret == null) { - ret = new RangerAdminRESTClient(); - } - - ret.init(pluginConfig.getServiceName(), pluginConfig.getAppId(), pluginConfig.getPropertyPrefix(), pluginConfig); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.createAdminClient(" + pluginConfig.getServiceName() + ", " + pluginConfig.getAppId() + ", " + pluginConfig.getPropertyPrefix() + "): policySourceImpl=" + policySourceImpl + ", client=" + ret); - } - return ret; - } - - public void refreshPoliciesAndTags() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> refreshPoliciesAndTags()"); - } - - try { - RangerPolicyEngine policyEngine = this.policyEngine; - - // Synch-up policies - long oldPolicyVersion = policyEngine.getPolicyVersion(); - - if (refresher != null) { - refresher.syncPoliciesWithAdmin(accessTrigger); - } - - policyEngine = this.policyEngine; // might be updated in syncPoliciesWithAdmin() - - long newPolicyVersion = policyEngine.getPolicyVersion(); - - if (oldPolicyVersion == newPolicyVersion) { - // Synch-up tags - RangerTagEnricher tagEnricher = getTagEnricher(); - - if (tagEnricher != null) { - tagEnricher.syncTagsWithAdmin(accessTrigger); - } - } - } catch (InterruptedException exception) { - LOG.error("Failed to update policy-engine, continuing to use old policy-engine and/or tags", exception); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== refreshPoliciesAndTags()"); - } - } - - - private void auditGrantRevoke(GrantRevokeRequest request, String action, boolean isSuccess, RangerAccessResultProcessor resultProcessor) { - if(request != null && resultProcessor != null) { - RangerAccessRequestImpl accessRequest = new RangerAccessRequestImpl(); - - accessRequest.setResource(new RangerAccessResourceImpl(StringUtil.toStringObjectMap(request.getResource()))); - accessRequest.setUser(request.getGrantor()); - accessRequest.setAccessType(RangerPolicyEngine.ANY_ACCESS); - accessRequest.setAction(action); - accessRequest.setClientIPAddress(request.getClientIPAddress()); - accessRequest.setClientType(request.getClientType()); - accessRequest.setRequestData(request.getRequestData()); - accessRequest.setSessionId(request.getSessionId()); - - // call isAccessAllowed() to determine if audit is enabled or not - RangerAccessResult accessResult = isAccessAllowed(accessRequest, null); - - if(accessResult != null && accessResult.getIsAudited()) { - accessRequest.setAccessType(action); - accessResult.setIsAllowed(isSuccess); - - if(! isSuccess) { - accessResult.setPolicyId("-1"); - } - - resultProcessor.processResult(accessResult); - } - } - } - private RangerServiceDef getDefaultServiceDef() { RangerServiceDef ret = null; if (StringUtils.isNotBlank(getServiceType())) { try { - ret = EmbeddedServiceDefsUtil.instance().getEmbeddedServiceDef(getServiceType()); + ret = ServiceDefsUtil.instance().getEmbeddedServiceDef(getServiceType()); } catch (Exception exp) { LOG.error("Could not get embedded service-def for " + getServiceType()); } @@ -995,29 +583,8 @@ private ServicePolicies getDefaultSvcPolicies() { return ret; } - public boolean logErrorMessage(String message) { - LogHistory log = logHistoryList.get(message); - if (log == null) { - log = new LogHistory(); - logHistoryList.put(message, log); - } - if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) { - log.lastLogTime = System.currentTimeMillis(); - int counter = log.counter; - log.counter = 0; - if( counter > 0) { - message += ". Messages suppressed before: " + counter; - } - LOG.error(message); - return true; - } else { - log.counter++; - } - return false; - } - private Set toSet(String value) { - return StringUtils.isNotBlank(value) ? StringUtil.toSet(value) : Collections.emptySet(); + return StringUtils.isNotBlank(value) ? RangerUtil.toSet(value) : Collections.emptySet(); } static private final class LogHistory { @@ -1047,38 +614,11 @@ public RangerTagEnricher getTagEnricher() { return ret; } - public static RangerResourceACLs getMergedResourceACLs(RangerResourceACLs baseACLs, RangerResourceACLs chainedACLs) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerBasePlugin.getMergedResourceACLs()"); - LOG.debug("baseACLs:[" + baseACLs + "]"); - LOG.debug("chainedACLS:[" + chainedACLs + "]"); - } - - overrideACLs(chainedACLs, baseACLs, RangerRolesUtil.ROLES_FOR.USER); - overrideACLs(chainedACLs, baseACLs, RangerRolesUtil.ROLES_FOR.GROUP); - overrideACLs(chainedACLs, baseACLs, RangerRolesUtil.ROLES_FOR.ROLE); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerBasePlugin.getMergedResourceACLs() : ret:[" + baseACLs + "]"); - } - return baseACLs; - } - - private RangerAdminClient getAdminClient() throws Exception { - PolicyRefresher refresher = this.refresher; - RangerAdminClient admin = refresher == null ? null : refresher.getRangerAdminClient(); - - if(admin == null) { - throw new Exception("ranger-admin client is null"); - } - return admin; - } - private List initChainedPlugins() { List ret = new ArrayList<>(); String chainedServicePropPrefix = pluginConfig.getPropertyPrefix() + ".chained.services"; - for (String chainedService : StringUtil.toList(pluginConfig.get(chainedServicePropPrefix))) { + for (String chainedService : RangerUtil.toList(pluginConfig.get(chainedServicePropPrefix))) { if (StringUtils.isBlank(chainedService)) { continue; } @@ -1216,18 +756,8 @@ private static void overrideACLs(final RangerResourceACLs chainedResourceACLs, R private static AuditProviderFactory getAuditProviderFactory(String serviceName) { AuditProviderFactory ret = AuditProviderFactory.getInstance(); - if (!ret.isInitDone()) { - LOG.warn("RangerBasePlugin.getAuditProviderFactory(serviceName=" + serviceName + "): audit not initialized yet. Will use stand-alone audit factory"); - - ret = StandAloneAuditProviderFactory.getInstance(); - - if (!ret.isInitDone()) { - RangerAuditConfig conf = new RangerAuditConfig(); - - if (conf.isInitSuccess()) { - ret.init(conf.getProperties(), "StandAlone"); - } - } + if (ret == null || !ret.isInitDone()) { + LOG.error("AuditProviderFactory not configured properly, will not log authz events"); } return ret; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBaseService.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBaseService.java deleted file mode 100644 index 35107e3dad..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerBaseService.java +++ /dev/null @@ -1,466 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.service; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.SecureClientLogin; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.atlas.authorization.hadoop.config.RangerAdminConfig; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItemAccess; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.model.validation.RangerServiceDefHelper; -import org.apache.atlas.plugin.resourcematcher.RangerAbstractResourceMatcher; -import org.apache.atlas.plugin.util.ServiceDefUtil; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - - -public abstract class RangerBaseService { - private static final Log LOG = LogFactory.getLog(RangerBaseService.class); - - protected static final String ADMIN_USER_PRINCIPAL = "ranger.admin.kerberos.principal"; - protected static final String ADMIN_USER_KEYTAB = "ranger.admin.kerberos.keytab"; - protected static final String LOOKUP_PRINCIPAL = "ranger.lookup.kerberos.principal"; - protected static final String LOOKUP_KEYTAB = "ranger.lookup.kerberos.keytab"; - protected static final String RANGER_AUTH_TYPE = "hadoop.security.authentication"; - - protected static final String KERBEROS_TYPE = "kerberos"; - - private static final String PROP_DEFAULT_POLICY_PREFIX = "default-policy."; - private static final String PROP_DEFAULT_POLICY_NAME_SUFFIX = "name"; - - - protected RangerServiceDef serviceDef; - protected RangerService service; - - protected Map configs; - protected String serviceName; - protected String serviceType; - protected String lookUpUser; - - protected final RangerAdminConfig config; - - public RangerBaseService() { - this.config = RangerAdminConfig.getInstance(); - String authType = config.get(RANGER_AUTH_TYPE,"simple"); - String lookupPrincipal = config.get(LOOKUP_PRINCIPAL); - String lookupKeytab = config.get(LOOKUP_KEYTAB); - lookUpUser = getLookupUser(authType, lookupPrincipal, lookupKeytab); - } - - public void init(RangerServiceDef serviceDef, RangerService service) { - this.serviceDef = serviceDef; - this.service = service; - this.configs = service.getConfigs(); - this.serviceName = service.getName(); - this.serviceType = service.getType(); - } - - /** - * @return the serviceDef - */ - public RangerServiceDef getServiceDef() { - return serviceDef; - } - - /** - * @return the service - */ - public RangerService getService() { - return service; - } - - public Map getConfigs() { - return configs; - } - - public void setConfigs(Map configs) { - this.configs = configs; - } - - public String getServiceName() { - return serviceName; - } - - public void setServiceName(String serviceName) { - this.serviceName = serviceName; - } - - public String getServiceType() { - return serviceType; - } - - public void setServiceType(String serviceType) { - this.serviceType = serviceType; - } - - public RangerAdminConfig getConfig() { return config; } - - public abstract Map validateConfig() throws Exception; - - public abstract List lookupResource(ResourceLookupContext context) throws Exception; - - public List getDefaultRangerPolicies() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerBaseService.getDefaultRangerPolicies() "); - } - - List ret = new ArrayList(); - - try { - // we need to create one policy for each resource hierarchy - RangerServiceDefHelper serviceDefHelper = new RangerServiceDefHelper(serviceDef); - for (List aHierarchy : serviceDefHelper.filterHierarchies_containsOnlyMandatoryResources(RangerPolicy.POLICY_TYPE_ACCESS)) { - RangerPolicy policy = getDefaultPolicy(aHierarchy); - if (policy != null) { - ret.add(policy); - } - } - } catch (Exception e) { - LOG.error("Error getting default polcies for Service: " + service.getName(), e); - } - - final Boolean additionalDefaultPolicySetup = Boolean.valueOf(configs.get("setup.additional.default.policies")); - - if (additionalDefaultPolicySetup) { - LOG.info(getServiceName() + ": looking for additional default policies in service-config"); - - Set policyIndexes = new TreeSet<>(); - - for (String configName : configs.keySet()) { - if (configName.startsWith(PROP_DEFAULT_POLICY_PREFIX) && configName.endsWith(PROP_DEFAULT_POLICY_NAME_SUFFIX)) { - policyIndexes.add(configName.substring(PROP_DEFAULT_POLICY_PREFIX.length(), configName.length() - PROP_DEFAULT_POLICY_NAME_SUFFIX.length() - 1)); - } - } - - LOG.info(getServiceName() + ": found " + policyIndexes.size() + " additional default policies in service-config"); - - for (String policyIndex : policyIndexes) { - String policyPropertyPrefix = PROP_DEFAULT_POLICY_PREFIX + policyIndex + "."; - String resourcePropertyPrefix = policyPropertyPrefix + "resource."; - Map policyResources = getResourcesForPrefix(resourcePropertyPrefix); - - if (MapUtils.isNotEmpty(policyResources)) { - addCustomRangerDefaultPolicies(ret, policyResources, policyPropertyPrefix); - } else { - LOG.warn(getServiceName() + ": no resources specified for default policy with prefix '" + policyPropertyPrefix + "'. Ignored"); - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerBaseService.getDefaultRangerPolicies(): " + ret); - } - - return ret; - } - - private Map getResourcesForPrefix(String resourcePropertyPrefix) { - Map policyResourceMap = new HashMap(); - - if (configs != null) { - for (Map.Entry entry : configs.entrySet()) { - String configName = entry.getKey(); - String configValue = entry.getValue(); - - if(configName.startsWith(resourcePropertyPrefix) && StringUtils.isNotBlank(configValue)){ - RangerPolicyResource rPolRes = new RangerPolicyResource(); - String resourceKey = configName.substring(resourcePropertyPrefix.length()); - List resourceList = new ArrayList(Arrays.asList(configValue.split(","))); - - rPolRes.setIsExcludes(false); - rPolRes.setIsRecursive(false); - rPolRes.setValues(resourceList); - policyResourceMap.put(resourceKey, rPolRes); - } - } - } - - return policyResourceMap; - } - - private void addCustomRangerDefaultPolicies(List ret, Map policyResourceMap, String policyPropertyPrefix) throws Exception { - String policyName = configs.get(policyPropertyPrefix + PROP_DEFAULT_POLICY_NAME_SUFFIX); - String description = configs.get(policyPropertyPrefix + "description"); - - if (StringUtils.isEmpty(description)) { - description = "Policy for " + policyName; - } - - RangerPolicy policy = new RangerPolicy(); - - policy.setName(policyName); - policy.setIsEnabled(true); - policy.setVersion(1L); - policy.setIsAuditEnabled(true); - policy.setService(serviceName); - policy.setDescription(description); - policy.setName(policyName); - policy.setResources(policyResourceMap); - - for (int i = 1; ; i++) { - String policyItemPropertyPrefix = policyPropertyPrefix + "policyItem." + i + "."; - String policyItemUsers = configs.get(policyItemPropertyPrefix + "users"); - String policyItemGroups = configs.get(policyItemPropertyPrefix + "groups"); - String policyItemRoles = configs.get(policyItemPropertyPrefix + "roles"); - String policyItemAccessTypes = configs.get(policyItemPropertyPrefix + "accessTypes"); - String isDelegateAdmin = configs.get(policyItemPropertyPrefix + "isDelegateAdmin"); - - if (StringUtils.isEmpty(policyItemAccessTypes) || - (StringUtils.isEmpty(policyItemUsers) && StringUtils.isEmpty(policyItemGroups) && StringUtils.isEmpty(policyItemRoles))) { - - break; - } - - RangerPolicyItem policyItem = new RangerPolicyItem(); - - policyItem.setDelegateAdmin(Boolean.parseBoolean(isDelegateAdmin)); - - if (StringUtils.isNotBlank(policyItemUsers)) { - policyItem.setUsers(Arrays.asList(policyItemUsers.split(","))); - } - - if (StringUtils.isNotBlank(policyItemGroups)) { - policyItem.setGroups(Arrays.asList(policyItemGroups.split(","))); - } - - if (StringUtils.isNotBlank(policyItemRoles)) { - policyItem.setRoles(Arrays.asList(policyItemRoles.split(","))); - } - - if (StringUtils.isNotBlank(policyItemAccessTypes)) { - for (String accessType : Arrays.asList(policyItemAccessTypes.split(","))) { - RangerPolicyItemAccess polAccess = new RangerPolicyItemAccess(accessType, true); - - policyItem.getAccesses().add(polAccess); - } - } - - policy.getPolicyItems().add(policyItem); - } - - LOG.info(getServiceName() + ": adding default policy: name=" + policy.getName()); - - ret.add(policy); - } - - private RangerPolicy getDefaultPolicy(List resourceHierarchy) throws Exception { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerBaseService.getDefaultPolicy()"); - } - - RangerPolicy policy = new RangerPolicy(); - - String policyName=buildPolicyName(resourceHierarchy); - - policy.setIsEnabled(true); - policy.setVersion(1L); - policy.setName(policyName); - policy.setService(service.getName()); - policy.setDescription("Policy for " + policyName); - policy.setIsAuditEnabled(true); - policy.setResources(createDefaultPolicyResource(resourceHierarchy)); - - List policyItems = new ArrayList(); - //Create Default policy item for the service user - RangerPolicy.RangerPolicyItem policyItem = createDefaultPolicyItem(policy.getResources()); - policyItems.add(policyItem); - policy.setPolicyItems(policyItems); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerBaseService.getDefaultPolicy()" + policy); - } - - return policy; - } - - private RangerPolicy.RangerPolicyItem createDefaultPolicyItem(Map policyResources) throws Exception { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerBaseService.createDefaultPolicyItem()"); - } - - RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); - - policyItem.setUsers(getUserList()); - policyItem.setGroups(getGroupList()); - List accesses = getAllowedAccesses(policyResources); - policyItem.setAccesses(accesses); - - policyItem.setDelegateAdmin(true); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerBaseService.createDefaultPolicyItem(): " + policyItem ); - } - return policyItem; - } - - protected List getAllowedAccesses(Map policyResources) { - List ret = new ArrayList(); - - RangerServiceDef.RangerResourceDef leafResourceDef = ServiceDefUtil.getLeafResourceDef(serviceDef, policyResources); - - if (leafResourceDef != null) { - Set accessTypeRestrictions = leafResourceDef.getAccessTypeRestrictions(); - - for (RangerServiceDef.RangerAccessTypeDef accessTypeDef : serviceDef.getAccessTypes()) { - boolean isAccessTypeAllowed = CollectionUtils.isEmpty(accessTypeRestrictions) || accessTypeRestrictions.contains(accessTypeDef.getName()); - - if (isAccessTypeAllowed) { - RangerPolicy.RangerPolicyItemAccess access = new RangerPolicy.RangerPolicyItemAccess(); - access.setType(accessTypeDef.getName()); - access.setIsAllowed(true); - ret.add(access); - } - } - } - return ret; - } - - protected Map createDefaultPolicyResource(List resourceHierarchy) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerBaseService.createDefaultPolicyResource()"); - } - Map resourceMap = new HashMap<>(); - - for (RangerServiceDef.RangerResourceDef resourceDef : resourceHierarchy) { - RangerPolicy.RangerPolicyResource polRes = new RangerPolicy.RangerPolicyResource(); - - polRes.setIsExcludes(false); - polRes.setIsRecursive(resourceDef.getRecursiveSupported()); - polRes.setValue(RangerAbstractResourceMatcher.WILDCARD_ASTERISK); - - resourceMap.put(resourceDef.getName(), polRes); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerBaseService.createDefaultPolicyResource():" + resourceMap); - } - return resourceMap; - } - - private String buildPolicyName(List resourceHierarchy) { - StringBuilder sb = new StringBuilder("all"); - if (CollectionUtils.isNotEmpty(resourceHierarchy)) { - int resourceDefCount = 0; - for (RangerServiceDef.RangerResourceDef resourceDef : resourceHierarchy) { - if (resourceDefCount > 0) { - sb.append(", "); - } else { - sb.append(" - "); - } - sb.append(resourceDef.getName()); - resourceDefCount++; - } - } - return sb.toString().trim(); - } - - private List getUserList() { - List ret = new ArrayList<>(); - - HashSet uniqueUsers = new HashSet(); - String[] users = config.getStrings("ranger.default.policy.users"); - - if (users != null) { - for (String user : users) { - uniqueUsers.add(user); - } - } - - Map serviceConfig = service.getConfigs(); - if (serviceConfig != null ) { - String serviceConfigUser = serviceConfig.get("username"); - if (StringUtils.isNotBlank(serviceConfigUser)){ - uniqueUsers.add(serviceConfig.get("username")); - } - String defaultUsers = serviceConfig.get("default.policy.users"); - if (!StringUtils.isEmpty(defaultUsers)) { - List defaultUserList = new ArrayList<>(Arrays.asList(StringUtils.split(defaultUsers,","))); - if (!defaultUserList.isEmpty()) { - uniqueUsers.addAll(defaultUserList); - } - } - } - - ret.addAll(uniqueUsers); - return ret; - } - private List getGroupList() { - List ret = new ArrayList<>(); - - HashSet uniqueGroups = new HashSet(); - String[] groups = config.getStrings("ranger.default.policy.groups"); - - if (groups != null) { - for (String group : groups) { - uniqueGroups.add(group); - } - } - - Map serviceConfig = service.getConfigs(); - if (serviceConfig != null) { - String defaultGroups = serviceConfig.get("default.policy.groups"); - if (!StringUtils.isEmpty(defaultGroups)) { - List defaultGroupList = new ArrayList<>(Arrays.asList(StringUtils.split(defaultGroups, ","))); - if (!defaultGroupList.isEmpty()) { - uniqueGroups.addAll(defaultGroupList); - } - } - } - ret.addAll(uniqueGroups); - - return ret; - } - - protected String getLookupUser(String authType, String lookupPrincipal, String lookupKeytab) { - String lookupUser = null; - if(!StringUtils.isEmpty(authType) && authType.equalsIgnoreCase(KERBEROS_TYPE)){ - if(SecureClientLogin.isKerberosCredentialExists(lookupPrincipal, lookupKeytab)){ - KerberosName krbName = new KerberosName(lookupPrincipal); - try { - lookupUser = krbName.getShortName(); - } catch (IOException e) { - LOG.error("Unknown lookup user", e); - } - } - } - return lookupUser; - } - - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerChainedPlugin.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerChainedPlugin.java index 58daf8efd1..7ea35eedf3 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerChainedPlugin.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerChainedPlugin.java @@ -19,11 +19,11 @@ package org.apache.atlas.plugin.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.atlas.plugin.policyengine.RangerAccessRequest; import org.apache.atlas.plugin.policyengine.RangerAccessResult; import org.apache.atlas.plugin.policyengine.RangerResourceACLs; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import java.util.Collection; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerDefaultService.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerDefaultService.java deleted file mode 100644 index d2541c4666..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/RangerDefaultService.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.service; - -import org.apache.commons.collections.ListUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.util.List; -import java.util.Map; - -public class RangerDefaultService extends RangerBaseService { - private static final Log LOG = LogFactory.getLog(RangerDefaultService.class); - - @Override - public Map validateConfig() throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("RangerDefaultService.validateConfig Service: (" + serviceName + " ), returning empty map"); - } - return MapUtils.EMPTY_MAP; - } - - @Override - public List lookupResource(ResourceLookupContext context) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("RangerDefaultService.lookupResource Context: (" + context + "), returning empty list"); - } - return ListUtils.EMPTY_LIST; - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/ResourceLookupContext.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/ResourceLookupContext.java deleted file mode 100644 index 444488d1c9..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/service/ResourceLookupContext.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.service; - -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.util.List; -import java.util.Map; - -@JsonAutoDetect(getterVisibility=Visibility.NONE, setterVisibility=Visibility.NONE, fieldVisibility=Visibility.ANY) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL ) -@JsonIgnoreProperties(ignoreUnknown=true) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class ResourceLookupContext { - private String userInput; - private String resourceName; - private Map> resources; - - - public ResourceLookupContext() { - - } - - /** - * @return the userInput - */ - public String getUserInput() { - return userInput; - } - /** - * @param userInput the userInput to set - */ - public void setUserInput(String userInput) { - this.userInput = userInput; - } - /** - * @return the resourceName - */ - public String getResourceName() { - return resourceName; - } - /** - * @param resourceName the resourceName to set - */ - public void setResourceName(String resourceName) { - this.resourceName = resourceName; - } - /** - * @return the resources - */ - public Map> getResources() { - return resources; - } - /** - * @param resources the resources to set - */ - public void setResources(Map> resources) { - this.resources = resources; - } - - @Override - public String toString() { - return String.format("ResourceLookupContext={resourceName=%s,userInput=%s,resources=%s}", resourceName, userInput, resources); - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractPredicateUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractPredicateUtil.java deleted file mode 100644 index 1ddc65b616..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractPredicateUtil.java +++ /dev/null @@ -1,1053 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.collections.Predicate; -import org.apache.commons.collections.PredicateUtils; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang.ObjectUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.RangerBaseModelObject; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AbstractPredicateUtil { - private static Map> sorterMap = new HashMap<>(); - - public void applyFilter(List objList, SearchFilter filter) { - if(CollectionUtils.isEmpty(objList)) { - return; - } - - Predicate pred = getPredicate(filter); - - if(pred != null) { - CollectionUtils.filter(objList, pred); - } - - Comparator sorter = getSorter(filter); - - if(sorter != null) { - Collections.sort(objList, sorter); - } - } - - public Predicate getPredicate(SearchFilter filter) { - if(filter == null || filter.isEmpty()) { - return null; - } - - List predicates = new ArrayList<>(); - - addPredicates(filter, predicates); - - Predicate ret = CollectionUtils.isEmpty(predicates) ? null : PredicateUtils.allPredicate(predicates); - - return ret; - } - - public void addPredicates(SearchFilter filter, List predicates) { - addPredicateForServiceType(filter.getParam(SearchFilter.SERVICE_TYPE), predicates); - addPredicateForServiceTypeId(filter.getParam(SearchFilter.SERVICE_TYPE_ID), predicates); - addPredicateForServiceName(filter.getParam(SearchFilter.SERVICE_NAME), predicates); - // addPredicateForServiceId(filter.getParam(SearchFilter.SERVICE_ID), predicates); // not supported - addPredicateForPolicyName(filter.getParam(SearchFilter.POLICY_NAME), predicates); - addPredicateForPolicyId(filter.getParam(SearchFilter.POLICY_ID), predicates); - addPredicateForIsEnabled(filter.getParam(SearchFilter.IS_ENABLED), predicates); - addPredicateForIsRecursive(filter.getParam(SearchFilter.IS_RECURSIVE), predicates); - addPredicateForTagServiceName(filter.getParam(SearchFilter.TAG_SERVICE_NAME), predicates); - // addPredicateForTagServiceId(filter.getParam(SearchFilter.TAG_SERVICE_ID), predicates); // not supported - addPredicateForUserName(filter.getParam(SearchFilter.USER), predicates); - addPredicateForGroupName(filter.getParam(SearchFilter.GROUP), predicates); - addPredicateForRoleName(filter.getParam(SearchFilter.ROLE), predicates); - addPredicateForResources(filter.getParamsWithPrefix(SearchFilter.RESOURCE_PREFIX, true), predicates); - addPredicateForPolicyResource(filter.getParam(SearchFilter.POL_RESOURCE), predicates); - addPredicateForPartialPolicyName(filter.getParam(SearchFilter.POLICY_NAME_PARTIAL), predicates); - addPredicateForResourceSignature(filter.getParam(SearchFilter.RESOURCE_SIGNATURE), predicates); - addPredicateForPolicyType(filter.getParam(SearchFilter.POLICY_TYPE), predicates); - addPredicateForPolicyPriority(filter.getParam(SearchFilter.POLICY_PRIORITY), predicates); - addPredicateForPartialPolicyLabels(filter.getParam(SearchFilter.POLICY_LABELS_PARTIAL), predicates); - addPredicateForZoneName(filter.getParam(SearchFilter.ZONE_NAME), predicates); - // addPredicateForZoneId(filter.getParam(SearchFilter.ZONE_ID), predicates); // not supported - } - - public Comparator getSorter(SearchFilter filter) { - String sortBy = filter == null ? null : filter.getSortBy(); - - if(StringUtils.isEmpty(sortBy)) { - return null; - } - - Comparator ret = sorterMap.get(sortBy); - - return ret; - } - - public final static Comparator idComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - Long val1 = (o1 != null) ? o1.getId() : null; - Long val2 = (o2 != null) ? o2.getId() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator createTimeComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - Date val1 = (o1 != null) ? o1.getCreateTime() : null; - Date val2 = (o2 != null) ? o2.getCreateTime() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator updateTimeComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - Date val1 = (o1 != null) ? o1.getUpdateTime() : null; - Date val2 = (o2 != null) ? o2.getUpdateTime() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator serviceDefNameComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - String val1 = null; - String val2 = null; - - if(o1 != null) { - if(o1 instanceof RangerServiceDef) { - val1 = ((RangerServiceDef)o1).getName(); - } else if(o1 instanceof RangerService) { - val1 = ((RangerService)o1).getType(); - } - } - - if(o2 != null) { - if(o2 instanceof RangerServiceDef) { - val2 = ((RangerServiceDef)o2).getName(); - } else if(o2 instanceof RangerService) { - val2 = ((RangerService)o2).getType(); - } - } - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator serviceNameComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - String val1 = null; - String val2 = null; - - if(o1 != null) { - if(o1 instanceof RangerPolicy) { - val1 = ((RangerPolicy)o1).getService(); - } else if(o1 instanceof RangerService) { - val1 = ((RangerService)o1).getType(); - } - } - - if(o2 != null) { - if(o2 instanceof RangerPolicy) { - val2 = ((RangerPolicy)o2).getService(); - } else if(o2 instanceof RangerService) { - val2 = ((RangerService)o2).getType(); - } - } - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator policyNameComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - String val1 = (o1 instanceof RangerPolicy) ? ((RangerPolicy)o1).getName() : null; - String val2 = (o2 instanceof RangerPolicy) ? ((RangerPolicy)o2).getName() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - public final static Comparator resourceLevelComparator = new Comparator() { - @Override - public int compare(RangerResourceDef o1, RangerResourceDef o2) { - Integer val1 = (o1 != null) ? o1.getLevel() : null; - Integer val2 = (o2 != null) ? o2.getLevel() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - protected final static Comparator zoneNameComparator = new Comparator() { - @Override - public int compare(RangerBaseModelObject o1, RangerBaseModelObject o2) { - String val1 = (o1 instanceof RangerSecurityZone) ? ((RangerSecurityZone)o1).getName() : null; - String val2 = (o2 instanceof RangerSecurityZone) ? ((RangerSecurityZone)o2).getName() : null; - - return ObjectUtils.compare(val1, val2); - } - }; - - static { - sorterMap.put(SearchFilter.SERVICE_TYPE, serviceDefNameComparator); - sorterMap.put(SearchFilter.SERVICE_TYPE_ID, idComparator); - sorterMap.put(SearchFilter.SERVICE_NAME, serviceNameComparator); - sorterMap.put(SearchFilter.SERVICE_TYPE_ID, idComparator); - sorterMap.put(SearchFilter.POLICY_NAME, policyNameComparator); - sorterMap.put(SearchFilter.POLICY_ID, idComparator); - sorterMap.put(SearchFilter.CREATE_TIME, createTimeComparator); - sorterMap.put(SearchFilter.UPDATE_TIME, updateTimeComparator); - sorterMap.put(SearchFilter.ZONE_ID, idComparator); - sorterMap.put(SearchFilter.ZONE_NAME, zoneNameComparator); - } - - private Predicate addPredicateForServiceType(final String serviceType, List predicates) { - if(StringUtils.isEmpty(serviceType)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerServiceDef) { - RangerServiceDef serviceDef = (RangerServiceDef)object; - String svcType = serviceDef.getName(); - - ret = StringUtils.equals(svcType, serviceType); - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForServiceTypeId(final String serviceTypeId, List predicates) { - if(StringUtils.isEmpty(serviceTypeId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerServiceDef) { - RangerServiceDef serviceDef = (RangerServiceDef)object; - Long svcDefId = serviceDef.getId(); - - if(svcDefId != null) { - ret = StringUtils.equals(serviceTypeId, svcDefId.toString()); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForServiceName(final String serviceName, List predicates) { - if(StringUtils.isEmpty(serviceName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - ret = StringUtils.equals(serviceName, policy.getService()); - } else if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - ret = StringUtils.equals(serviceName, service.getName()); - } else { - ret = true; - } - - return ret; - } - }; - - if(ret != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPolicyName(final String policyName, List predicates) { - if(StringUtils.isEmpty(policyName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - ret = StringUtils.equals(policyName, policy.getName()); - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPartialPolicyName(final String policyName, List predicates) { - if(StringUtils.isEmpty(policyName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - ret = StringUtils.containsIgnoreCase(policy.getName(), policyName); - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPolicyId(final String policyId, List predicates) { - if(StringUtils.isEmpty(policyId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - if(policy.getId() != null) { - ret = StringUtils.equals(policyId, policy.getId().toString()); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForUserName(final String userName, List predicates) { - if(StringUtils.isEmpty(userName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - List[] policyItemsList = new List[] { policy.getPolicyItems(), - policy.getDenyPolicyItems(), - policy.getAllowExceptions(), - policy.getDenyExceptions(), - policy.getDataMaskPolicyItems(), - policy.getRowFilterPolicyItems() - }; - - for(List policyItemsObj : policyItemsList) { - @SuppressWarnings("unchecked") - List policyItems = (List)policyItemsObj; - - for(RangerPolicyItem policyItem : policyItems) { - if(! policyItem.getUsers().isEmpty()) { - for(String user : policyItem.getUsers()) { - if(StringUtils.containsIgnoreCase(user, userName)) { - ret = true; - break; - } - } - } - } - if (ret) { - break; - } - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForGroupName(final String groupName, List predicates) { - if(StringUtils.isEmpty(groupName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - List[] policyItemsList = new List[] { policy.getPolicyItems(), - policy.getDenyPolicyItems(), - policy.getAllowExceptions(), - policy.getDenyExceptions(), - policy.getDataMaskPolicyItems(), - policy.getRowFilterPolicyItems() - }; - - for(List policyItemsObj : policyItemsList) { - @SuppressWarnings("unchecked") - List policyItems = (List)policyItemsObj; - - for(RangerPolicyItem policyItem : policyItems) { - if(! policyItem.getGroups().isEmpty()) { - for(String group : policyItem.getGroups()) { - if(StringUtils.containsIgnoreCase(group, groupName)) { - ret = true; - break; - } - } - } - } - if (ret) { - break; - } - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForRoleName(final String roleName, List predicates) { - if(StringUtils.isEmpty(roleName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - List[] policyItemsList = new List[] { policy.getPolicyItems(), - policy.getDenyPolicyItems(), - policy.getAllowExceptions(), - policy.getDenyExceptions(), - policy.getDataMaskPolicyItems(), - policy.getRowFilterPolicyItems() - }; - for(List policyItemsObj : policyItemsList) { - @SuppressWarnings("unchecked") - List policyItems = (List)policyItemsObj; - - for(RangerPolicyItem policyItem : policyItems) { - if(! policyItem.getRoles().isEmpty()) { - for(String role : policyItem.getRoles()) { - if(StringUtils.containsIgnoreCase(role, roleName)) { - ret = true; - break; - } - } - } - } - if (ret) { - break; - } - } - }else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - - } - - private Predicate addPredicateForIsEnabled(final String status, List predicates) { - if(StringUtils.isEmpty(status)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerBaseModelObject) { - RangerBaseModelObject obj = (RangerBaseModelObject)object; - - if(Boolean.parseBoolean(status)) { - ret = obj.getIsEnabled(); - } else { - ret = !obj.getIsEnabled(); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForResources(final Map resources, List predicates) { - if(MapUtils.isEmpty(resources)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - if(! MapUtils.isEmpty(policy.getResources())) { - int numFound = 0; - for(String name : resources.keySet()) { - boolean isMatch = false; - - RangerPolicyResource policyResource = policy.getResources().get(name); - - if(policyResource != null && !CollectionUtils.isEmpty(policyResource.getValues())) { - String val = resources.get(name); - - if(policyResource.getValues().contains(val)) { - isMatch = true; - } else { - for(String policyResourceValue : policyResource.getValues()) { - if(FilenameUtils.wildcardMatch(val, policyResourceValue)) { - isMatch = true; - break; - } - } - } - } - - if(isMatch) { - numFound++; - } else { - break; - } - } - - ret = numFound == resources.size(); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPolicyResource(final String resourceValue, List predicates) { - if(StringUtils.isEmpty(resourceValue)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - Map policyResources = policy.getResources(); - - if(MapUtils.isNotEmpty(policyResources)) { - - for (Map.Entry entry : policyResources.entrySet()) { - - RangerPolicyResource policyResource = entry.getValue(); - - if (policyResource != null && CollectionUtils.isNotEmpty(policyResource.getValues())) { - - for (String policyResoureValue : policyResource.getValues()) { - if (StringUtils.containsIgnoreCase(policyResoureValue, resourceValue)) { - ret = true; - - break; - } - } - } - - } - - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForIsRecursive(final String isRecursiveStr, List predicates) { - if(StringUtils.isEmpty(isRecursiveStr)) { - return null; - } - - final boolean isRecursive = Boolean.parseBoolean(isRecursiveStr); - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = true; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - if(! MapUtils.isEmpty(policy.getResources())) { - for(Map.Entry e : policy.getResources().entrySet()) { - RangerPolicyResource resValue = e.getValue(); - - if(resValue.getIsRecursive() == null) { - ret = !isRecursive; - } else { - ret = resValue.getIsRecursive().booleanValue() == isRecursive; - } - - if(ret) { - break; - } - } - } - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagServiceName(final String tagServiceName, List predicates) { - if(StringUtils.isEmpty(tagServiceName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - ret = StringUtils.equals(tagServiceName, service.getTagService()); - } else { - ret = true; - } - - return ret; - } - }; - - if(ret != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForResourceSignature(String signature, List predicates) { - - Predicate ret = createPredicateForResourceSignature(signature); - - if(predicates != null && ret != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPolicyType(final String policyType, List predicates) { - if(StringUtils.isEmpty(policyType)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = true; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - if(policy.getPolicyType() != null) { - ret = StringUtils.equalsIgnoreCase(policyType, policy.getPolicyType().toString()); - } - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPartialPolicyLabels(final String policyLabels, List predicates) { - if (StringUtils.isEmpty(policyLabels)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if (object == null) { - return false; - } - boolean ret = false; - - if (object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy) object; - // exact match - /*if (policy.getPolicyLabels().contains(policyLabels)) { - ret = true; - }*/ - /*partial match*/ - for (String label :policy.getPolicyLabels()){ - ret = StringUtils.containsIgnoreCase(label, policyLabels); - if(ret){ - return ret; - } - } - - } else { - ret = true; - } - return ret; - } - }; - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPolicyPriority(final String policyPriority, List predicates) { - if(StringUtils.isEmpty(policyPriority)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if (object == null) { - return false; - } - - boolean ret = true; - - if (object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy) object; - - Integer priority = policy.getPolicyPriority() != null ? policy.getPolicyPriority() : RangerPolicy.POLICY_PRIORITY_NORMAL; - - if (priority == RangerPolicy.POLICY_PRIORITY_NORMAL) { - ret = StringUtils.equalsIgnoreCase(policyPriority, policy.POLICY_PRIORITY_NAME_NORMAL) - || StringUtils.equalsIgnoreCase(policyPriority, priority.toString()); - } else if (priority == RangerPolicy.POLICY_PRIORITY_OVERRIDE) { - ret = StringUtils.equalsIgnoreCase(policyPriority, policy.POLICY_PRIORITY_NAME_OVERRIDE) - || StringUtils.equalsIgnoreCase(policyPriority, priority.toString()); - } else { - ret = false; - } - } - return ret; - } - - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - - public Predicate createPredicateForResourceSignature(final String policySignature) { - - if (StringUtils.isEmpty(policySignature)) { - return null; - } - - return new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if (object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - ret = StringUtils.equals(policy.getResourceSignature(), policySignature); - } else { - ret = true; - } - - return ret; - } - }; - } - private Predicate addPredicateForZoneName(final String zoneName, List predicates) { - - if(StringUtils.isEmpty(zoneName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - final boolean ret; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - if (policy.getZoneName() != null) { - ret = StringUtils.equals(zoneName, policy.getZoneName()); - } else { - ret = StringUtils.isEmpty(zoneName); - } - } else if (object instanceof RangerSecurityZone) { - RangerSecurityZone securityZone = (RangerSecurityZone)object; - - return StringUtils.equals(securityZone.getName(), zoneName); - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractServiceStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractServiceStore.java deleted file mode 100644 index a60df515de..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractServiceStore.java +++ /dev/null @@ -1,644 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerAdminConfig; -import org.apache.atlas.plugin.model.RangerBaseModelObject; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.util.SearchFilter; -import org.apache.atlas.services.tag.RangerServiceTag; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; - -public abstract class AbstractServiceStore implements ServiceStore { - private static final Log LOG = LogFactory.getLog(AbstractServiceStore.class); - - public static final String COMPONENT_ACCESSTYPE_SEPARATOR = ":"; - - public static final String AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP = "ranger.servicedef.autopropagate.rowfilterdef.to.tag"; - - public static final boolean AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP_DEFAULT = false; - - private static final int MAX_ACCESS_TYPES_IN_SERVICE_DEF = 1000; - - private final RangerAdminConfig config; - - // when a service-def is updated, the updated service-def should be made available to plugins - // this is achieved by incrementing policyVersion of all its services - protected abstract void updateServicesForServiceDefUpdate(RangerServiceDef serviceDef) throws Exception; - - protected AbstractServiceStore() { - this.config = RangerAdminConfig.getInstance(); - } - - @Override - public void updateTagServiceDefForAccessTypes() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> ServiceDefDBStore.updateTagServiceDefForAccessTypes()"); - } - List allServiceDefs = getServiceDefs(new SearchFilter()); - for (RangerServiceDef serviceDef : allServiceDefs) { - updateTagServiceDefForUpdatingAccessTypes(serviceDef); - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== ServiceDefDBStore.updateTagServiceDefForAccessTypes()"); - } - } - - @Override - public PList getPaginatedServiceDefs(SearchFilter filter) throws Exception { - List resultList = getServiceDefs(filter); - - return CollectionUtils.isEmpty(resultList) ? new PList() : new PList(resultList, 0, resultList.size(), - (long) resultList.size(), resultList.size(), filter.getSortType(), filter.getSortBy()); - } - - @Override - public PList getPaginatedServices(SearchFilter filter) throws Exception { - List resultList = getServices(filter); - - return CollectionUtils.isEmpty(resultList) ? new PList() : new PList(resultList, 0, resultList.size(), (long) resultList.size(), - resultList.size(), filter.getSortType(), filter.getSortBy()); - } - - @Override - public PList getPaginatedPolicies(SearchFilter filter) throws Exception { - List resultList = getPolicies(filter); - - return CollectionUtils.isEmpty(resultList) ? new PList() : new PList(resultList, 0, resultList.size(), (long) resultList.size(), - resultList.size(), filter.getSortType(), filter.getSortBy()); - } - - @Override - public PList getPaginatedServicePolicies(Long serviceId, SearchFilter filter) throws Exception { - List resultList = getServicePolicies(serviceId, filter); - - return CollectionUtils.isEmpty(resultList) ? new PList() : new PList(resultList, 0, resultList.size(), (long) resultList.size(), - resultList.size(), filter.getSortType(), filter.getSortBy()); - } - - @Override - public PList getPaginatedServicePolicies(String serviceName, SearchFilter filter) throws Exception { - List resultList = getServicePolicies(serviceName, filter); - - return CollectionUtils.isEmpty(resultList) ? new PList() : new PList(resultList, 0, resultList.size(), (long) resultList.size(), - resultList.size(), filter.getSortType(), filter.getSortBy()); - } - - @Override - public Long getServicePolicyVersion(String serviceName) { - RangerService service = null; - try { - service = getServiceByName(serviceName); - } catch (Exception exception) { - LOG.error("Failed to get service object for service:" + serviceName); - } - return service != null ? service.getPolicyVersion() : null; - } - - protected void postCreate(RangerBaseModelObject obj) throws Exception { - if (obj instanceof RangerServiceDef) { - updateTagServiceDefForUpdatingAccessTypes((RangerServiceDef) obj); - } - } - - protected void postUpdate(RangerBaseModelObject obj) throws Exception { - if (obj instanceof RangerServiceDef) { - RangerServiceDef serviceDef = (RangerServiceDef) obj; - - updateTagServiceDefForUpdatingAccessTypes(serviceDef); - updateServicesForServiceDefUpdate(serviceDef); - } - } - - protected void postDelete(RangerBaseModelObject obj) throws Exception { - if (obj instanceof RangerServiceDef) { - updateTagServiceDefForDeletingAccessTypes(((RangerServiceDef) obj).getName()); - } - } - - public static long getNextVersion(Long currentVersion) { - return currentVersion == null ? 1L : currentVersion + 1; - } - - private RangerServiceDef.RangerAccessTypeDef findAccessTypeDef(long itemId, List accessTypeDefs) { - RangerServiceDef.RangerAccessTypeDef ret = null; - - for (RangerServiceDef.RangerAccessTypeDef accessTypeDef : accessTypeDefs) { - if (itemId == accessTypeDef.getItemId()) { - ret = accessTypeDef; - break; - } - } - return ret; - } - - private boolean updateTagAccessTypeDef(RangerServiceDef.RangerAccessTypeDef tagAccessType, RangerServiceDef.RangerAccessTypeDef svcAccessType, String prefix) { - - boolean isUpdated = false; - - if (!Objects.equals(tagAccessType.getName().substring(prefix.length()), svcAccessType.getName())) { - isUpdated = true; - } else if (!Objects.equals(tagAccessType.getLabel(), svcAccessType.getLabel())) { - isUpdated = true; - } else if (!Objects.equals(tagAccessType.getRbKeyLabel(), svcAccessType.getRbKeyLabel())) { - isUpdated = true; - } else { - Collection tagImpliedGrants = tagAccessType.getImpliedGrants(); - Collection svcImpliedGrants = svcAccessType.getImpliedGrants(); - - int tagImpliedGrantsLen = tagImpliedGrants == null ? 0 : tagImpliedGrants.size(); - int svcImpliedGrantsLen = svcImpliedGrants == null ? 0 : svcImpliedGrants.size(); - - if (tagImpliedGrantsLen != svcImpliedGrantsLen) { - isUpdated = true; - } else if (tagImpliedGrantsLen > 0) { - for (String svcImpliedGrant : svcImpliedGrants) { - if (!tagImpliedGrants.contains(prefix + svcImpliedGrant)) { - isUpdated = true; - break; - } - } - } - } - - if (isUpdated) { - tagAccessType.setName(prefix + svcAccessType.getName()); - tagAccessType.setLabel(svcAccessType.getLabel()); - tagAccessType.setRbKeyLabel(svcAccessType.getRbKeyLabel()); - - tagAccessType.setImpliedGrants(new HashSet()); - if (CollectionUtils.isNotEmpty(svcAccessType.getImpliedGrants())) { - for (String svcImpliedGrant : svcAccessType.getImpliedGrants()) { - tagAccessType.getImpliedGrants().add(prefix + svcImpliedGrant); - } - } - } - return isUpdated; - } - - private boolean updateTagAccessTypeDefs(List svcDefAccessTypes, List tagDefAccessTypes, - long itemIdOffset, String prefix) { - - List toAdd = new ArrayList<>(); - List toUpdate = new ArrayList<>(); - List toDelete = new ArrayList<>(); - - for (RangerServiceDef.RangerAccessTypeDef svcAccessType : svcDefAccessTypes) { - long tagAccessTypeItemId = svcAccessType.getItemId() + itemIdOffset; - - RangerServiceDef.RangerAccessTypeDef tagAccessType = findAccessTypeDef(tagAccessTypeItemId, tagDefAccessTypes); - - if (tagAccessType == null) { - tagAccessType = new RangerServiceDef.RangerAccessTypeDef(); - - tagAccessType.setItemId(tagAccessTypeItemId); - tagAccessType.setName(prefix + svcAccessType.getName()); - tagAccessType.setLabel(svcAccessType.getLabel()); - tagAccessType.setRbKeyLabel(svcAccessType.getRbKeyLabel()); - - tagAccessType.setImpliedGrants(new HashSet()); - if (CollectionUtils.isNotEmpty(svcAccessType.getImpliedGrants())) { - for (String svcImpliedGrant : svcAccessType.getImpliedGrants()) { - tagAccessType.getImpliedGrants().add(prefix + svcImpliedGrant); - } - } - - toAdd.add(tagAccessType); - } - } - - - for (RangerServiceDef.RangerAccessTypeDef tagAccessType : tagDefAccessTypes) { - if (tagAccessType.getName().startsWith(prefix)) { - long svcAccessTypeItemId = tagAccessType.getItemId() - itemIdOffset; - - RangerServiceDef.RangerAccessTypeDef svcAccessType = findAccessTypeDef(svcAccessTypeItemId, svcDefAccessTypes); - - if (svcAccessType == null) { // accessType has been deleted in service - toDelete.add(tagAccessType); - } else if (updateTagAccessTypeDef(tagAccessType, svcAccessType, prefix)) { - toUpdate.add(tagAccessType); - } - } - } - - boolean updateNeeded = false; - - if (CollectionUtils.isNotEmpty(toAdd) || CollectionUtils.isNotEmpty(toUpdate) || CollectionUtils.isNotEmpty(toDelete)) { - if (LOG.isDebugEnabled()) { - for (RangerServiceDef.RangerAccessTypeDef accessTypeDef : toDelete) { - LOG.debug("accessTypeDef-to-delete:[" + accessTypeDef + "]"); - } - - for (RangerServiceDef.RangerAccessTypeDef accessTypeDef : toUpdate) { - LOG.debug("accessTypeDef-to-update:[" + accessTypeDef + "]"); - } - for (RangerServiceDef.RangerAccessTypeDef accessTypeDef : toAdd) { - LOG.debug("accessTypeDef-to-add:[" + accessTypeDef + "]"); - } - } - - tagDefAccessTypes.addAll(toAdd); - tagDefAccessTypes.removeAll(toDelete); - - updateNeeded = true; - } - return updateNeeded; - } - - private void updateTagServiceDefForUpdatingAccessTypes(RangerServiceDef serviceDef) throws Exception { - if (StringUtils.equals(serviceDef.getName(), EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME)) { - return; - } - - if (EmbeddedServiceDefsUtil.instance().getTagServiceDefId() == -1) { - LOG.info("AbstractServiceStore.updateTagServiceDefForUpdatingAccessTypes(" + serviceDef.getName() + "): tag service-def does not exist"); - } - - RangerServiceDef tagServiceDef; - try { - tagServiceDef = this.getServiceDef(EmbeddedServiceDefsUtil.instance().getTagServiceDefId()); - } catch (Exception e) { - LOG.error("AbstractServiceStore.updateTagServiceDefForUpdatingAccessTypes" + serviceDef.getName() + "): could not find TAG ServiceDef.. ", e); - throw e; - } - - if (tagServiceDef == null) { - LOG.error("AbstractServiceStore.updateTagServiceDefForUpdatingAccessTypes(" + serviceDef.getName() + "): could not find TAG ServiceDef.. "); - - return; - } - - String serviceDefName = serviceDef.getName(); - String prefix = serviceDefName + COMPONENT_ACCESSTYPE_SEPARATOR; - - List svcDefAccessTypes = serviceDef.getAccessTypes(); - List tagDefAccessTypes = tagServiceDef.getAccessTypes(); - - long itemIdOffset = serviceDef.getId() * (MAX_ACCESS_TYPES_IN_SERVICE_DEF + 1); - - boolean updateNeeded = updateTagAccessTypeDefs(svcDefAccessTypes, tagDefAccessTypes, itemIdOffset, prefix); - - if (updateTagServiceDefForUpdatingDataMaskDef(tagServiceDef, serviceDef, itemIdOffset, prefix)) { - updateNeeded = true; - } - - if (updateTagServiceDefForUpdatingRowFilterDef(tagServiceDef, serviceDef, itemIdOffset, prefix)) { - updateNeeded = true; - } - - boolean resourceUpdated = updateResourceInTagServiceDef(tagServiceDef); - - updateNeeded = updateNeeded || resourceUpdated; - - if (updateNeeded) { - try { - updateServiceDef(tagServiceDef); - LOG.info("AbstractServiceStore.updateTagServiceDefForUpdatingAccessTypes -- updated TAG service def with " + serviceDefName + " access types"); - } catch (Exception e) { - LOG.error("AbstractServiceStore.updateTagServiceDefForUpdatingAccessTypes -- Failed to update TAG ServiceDef.. ", e); - throw e; - } - } - } - - private void updateTagServiceDefForDeletingAccessTypes(String serviceDefName) throws Exception { - if (EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME.equals(serviceDefName)) { - return; - } - - RangerServiceDef tagServiceDef; - try { - tagServiceDef = this.getServiceDef(EmbeddedServiceDefsUtil.instance().getTagServiceDefId()); - } catch (Exception e) { - LOG.error("AbstractServiceStore.updateTagServiceDefForDeletingAccessTypes(" + serviceDefName + "): could not find TAG ServiceDef.. ", e); - throw e; - } - - if (tagServiceDef == null) { - LOG.error("AbstractServiceStore.updateTagServiceDefForDeletingAccessTypes(" + serviceDefName + "): could not find TAG ServiceDef.. "); - - return; - } - - List accessTypes = new ArrayList<>(); - - for (RangerServiceDef.RangerAccessTypeDef accessType : tagServiceDef.getAccessTypes()) { - if (accessType.getName().startsWith(serviceDefName + COMPONENT_ACCESSTYPE_SEPARATOR)) { - accessTypes.add(accessType); - } - } - - tagServiceDef.getAccessTypes().removeAll(accessTypes); - - updateTagServiceDefForDeletingDataMaskDef(tagServiceDef, serviceDefName); - - updateTagServiceDefForDeletingRowFilterDef(tagServiceDef, serviceDefName); - - updateResourceInTagServiceDef(tagServiceDef); - - try { - updateServiceDef(tagServiceDef); - LOG.info("AbstractServiceStore.updateTagServiceDefForDeletingAccessTypes -- updated TAG service def with " + serviceDefName + " access types"); - } catch (Exception e) { - LOG.error("AbstractServiceStore.updateTagServiceDefForDeletingAccessTypes -- Failed to update TAG ServiceDef.. ", e); - throw e; - } - } - - private boolean updateTagServiceDefForUpdatingDataMaskDef(RangerServiceDef tagServiceDef, RangerServiceDef serviceDef, long itemIdOffset, String prefix) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AbstractServiceStore.updateTagServiceDefForUpdatingDataMaskDef(" + serviceDef.getName() + ")"); - } - boolean ret = false; - - RangerServiceDef.RangerDataMaskDef svcDataMaskDef = serviceDef.getDataMaskDef(); - RangerServiceDef.RangerDataMaskDef tagDataMaskDef = tagServiceDef.getDataMaskDef(); - - List svcDefMaskTypes = svcDataMaskDef.getMaskTypes(); - List tagDefMaskTypes = tagDataMaskDef.getMaskTypes(); - - List svcDefAccessTypes = svcDataMaskDef.getAccessTypes(); - List tagDefAccessTypes = tagDataMaskDef.getAccessTypes(); - - List maskTypesToAdd = new ArrayList<>(); - List maskTypesToUpdate = new ArrayList<>(); - List maskTypesToDelete = new ArrayList<>(); - - for (RangerServiceDef.RangerDataMaskTypeDef svcMaskType : svcDefMaskTypes) { - long tagMaskTypeItemId = itemIdOffset + svcMaskType.getItemId(); - RangerServiceDef.RangerDataMaskTypeDef foundTagMaskType = null; - for (RangerServiceDef.RangerDataMaskTypeDef tagMaskType : tagDefMaskTypes) { - if (tagMaskType.getItemId().equals(tagMaskTypeItemId)) { - foundTagMaskType = tagMaskType; - break; - } - } - if (foundTagMaskType == null) { - RangerServiceDef.RangerDataMaskTypeDef tagMaskType = new RangerServiceDef.RangerDataMaskTypeDef(svcMaskType); - tagMaskType.setName(prefix + svcMaskType.getName()); - tagMaskType.setItemId(itemIdOffset + svcMaskType.getItemId()); - tagMaskType.setLabel(svcMaskType.getLabel()); - tagMaskType.setRbKeyLabel(svcMaskType.getRbKeyLabel()); - maskTypesToAdd.add(tagMaskType); - } - } - - for (RangerServiceDef.RangerDataMaskTypeDef tagMaskType : tagDefMaskTypes) { - if (StringUtils.startsWith(tagMaskType.getName(), prefix)) { - - RangerServiceDef.RangerDataMaskTypeDef foundSvcMaskType = null; - for (RangerServiceDef.RangerDataMaskTypeDef svcMaskType : svcDefMaskTypes) { - long tagMaskTypeItemId = itemIdOffset + svcMaskType.getItemId(); - if (tagMaskType.getItemId().equals(tagMaskTypeItemId)) { - foundSvcMaskType = svcMaskType; - break; - } - } - if (foundSvcMaskType == null) { - maskTypesToDelete.add(tagMaskType); - continue; - } - - RangerServiceDef.RangerDataMaskTypeDef checkTagMaskType = new RangerServiceDef.RangerDataMaskTypeDef(foundSvcMaskType); - - checkTagMaskType.setName(prefix + foundSvcMaskType.getName()); - checkTagMaskType.setItemId(itemIdOffset + foundSvcMaskType.getItemId()); - - if (!checkTagMaskType.equals(tagMaskType)) { - tagMaskType.setLabel(checkTagMaskType.getLabel()); - tagMaskType.setDescription(checkTagMaskType.getDescription()); - tagMaskType.setTransformer(checkTagMaskType.getTransformer()); - tagMaskType.setDataMaskOptions(checkTagMaskType.getDataMaskOptions()); - tagMaskType.setRbKeyLabel(checkTagMaskType.getRbKeyLabel()); - tagMaskType.setRbKeyDescription(checkTagMaskType.getRbKeyDescription()); - maskTypesToUpdate.add(tagMaskType); - } - } - } - - if (CollectionUtils.isNotEmpty(maskTypesToAdd) || CollectionUtils.isNotEmpty(maskTypesToUpdate) || CollectionUtils.isNotEmpty(maskTypesToDelete)) { - ret = true; - - if (LOG.isDebugEnabled()) { - for (RangerServiceDef.RangerDataMaskTypeDef maskTypeDef : maskTypesToDelete) { - LOG.debug("maskTypeDef-to-delete:[" + maskTypeDef + "]"); - } - - for (RangerServiceDef.RangerDataMaskTypeDef maskTypeDef : maskTypesToUpdate) { - LOG.debug("maskTypeDef-to-update:[" + maskTypeDef + "]"); - } - - for (RangerServiceDef.RangerDataMaskTypeDef maskTypeDef : maskTypesToAdd) { - LOG.debug("maskTypeDef-to-add:[" + maskTypeDef + "]"); - } - } - - tagDefMaskTypes.removeAll(maskTypesToDelete); - tagDefMaskTypes.addAll(maskTypesToAdd); - - tagDataMaskDef.setMaskTypes(tagDefMaskTypes); - } - - boolean tagMaskDefAccessTypesUpdated = updateTagAccessTypeDefs(svcDefAccessTypes, tagDefAccessTypes, itemIdOffset, prefix); - - if (tagMaskDefAccessTypesUpdated) { - tagDataMaskDef.setAccessTypes(tagDefAccessTypes); - ret = true; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AbstractServiceStore.updateTagServiceDefForUpdatingDataMaskDef(" + serviceDef.getName() + ") : " + ret); - } - - return ret; - } - - private void updateTagServiceDefForDeletingDataMaskDef(RangerServiceDef tagServiceDef, String serviceDefName) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AbstractServiceStore.updateTagServiceDefForDeletingDataMaskDef(" + serviceDefName + ")"); - } - RangerServiceDef.RangerDataMaskDef tagDataMaskDef = tagServiceDef.getDataMaskDef(); - - if (tagDataMaskDef == null) { - return; - } - - String prefix = serviceDefName + COMPONENT_ACCESSTYPE_SEPARATOR; - - List accessTypes = new ArrayList<>(); - - for (RangerServiceDef.RangerAccessTypeDef accessType : tagDataMaskDef.getAccessTypes()) { - if (accessType.getName().startsWith(prefix)) { - accessTypes.add(accessType); - } - } - List maskTypes = new ArrayList<>(); - for (RangerServiceDef.RangerDataMaskTypeDef maskType : tagDataMaskDef.getMaskTypes()) { - if (maskType.getName().startsWith(prefix)) { - maskTypes.add(maskType); - } - } - tagDataMaskDef.getAccessTypes().removeAll(accessTypes); - tagDataMaskDef.getMaskTypes().removeAll(maskTypes); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AbstractServiceStore.updateTagServiceDefForDeletingDataMaskDef(" + serviceDefName + ")"); - } - } - - private boolean updateTagServiceDefForUpdatingRowFilterDef(RangerServiceDef tagServiceDef, RangerServiceDef serviceDef, long itemIdOffset, String prefix) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AbstractServiceStore.updateTagServiceDefForUpdatingRowFilterDef(" + serviceDef.getName() + ")"); - } - boolean ret = false; - - boolean autopropagateRowfilterdefToTag = config.getBoolean(AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP, AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP_DEFAULT); - - if (autopropagateRowfilterdefToTag) { - RangerServiceDef.RangerRowFilterDef svcRowFilterDef = serviceDef.getRowFilterDef(); - RangerServiceDef.RangerRowFilterDef tagRowFilterDef = tagServiceDef.getRowFilterDef(); - - List svcDefAccessTypes = svcRowFilterDef.getAccessTypes(); - List tagDefAccessTypes = tagRowFilterDef.getAccessTypes(); - - boolean tagRowFilterAccessTypesUpdated = updateTagAccessTypeDefs(svcDefAccessTypes, tagDefAccessTypes, itemIdOffset, prefix); - - if (tagRowFilterAccessTypesUpdated) { - tagRowFilterDef.setAccessTypes(tagDefAccessTypes); - ret = true; - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== AbstractServiceStore.updateTagServiceDefForUpdatingRowFilterDef(" + serviceDef.getName() + ") : " + ret); - } - - return ret; - } - - private void updateTagServiceDefForDeletingRowFilterDef(RangerServiceDef tagServiceDef, String serviceDefName) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AbstractServiceStore.updateTagServiceDefForDeletingRowFilterDef(" + serviceDefName + ")"); - } - RangerServiceDef.RangerRowFilterDef tagRowFilterDef = tagServiceDef.getRowFilterDef(); - - if (tagRowFilterDef == null) { - return; - } - - String prefix = serviceDefName + COMPONENT_ACCESSTYPE_SEPARATOR; - - List accessTypes = new ArrayList<>(); - - for (RangerServiceDef.RangerAccessTypeDef accessType : tagRowFilterDef.getAccessTypes()) { - if (accessType.getName().startsWith(prefix)) { - accessTypes.add(accessType); - } - } - - tagRowFilterDef.getAccessTypes().removeAll(accessTypes); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AbstractServiceStore.updateTagServiceDefForDeletingRowFilterDef(" + serviceDefName + ")"); - } - } - - private boolean updateResourceInTagServiceDef(RangerServiceDef tagServiceDef) throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AbstractServiceStore.updateResourceInTagServiceDef(" + tagServiceDef + ")"); - } - boolean ret = false; - - final RangerServiceDef.RangerResourceDef accessPolicyTagResource = getResourceDefForTagResource(tagServiceDef.getResources()); - - final List resources = new ArrayList<>(); - - if (accessPolicyTagResource == null) { - LOG.warn("Resource with name :[" + RangerServiceTag.TAG_RESOURCE_NAME + "] not found in tag-service-definition!!"); - } else { - resources.add(accessPolicyTagResource); - } - - RangerServiceDef.RangerDataMaskDef dataMaskDef = tagServiceDef.getDataMaskDef(); - - if (dataMaskDef != null) { - if (CollectionUtils.isNotEmpty(dataMaskDef.getAccessTypes())) { - if (CollectionUtils.isEmpty(dataMaskDef.getResources())) { - dataMaskDef.setResources(resources); - ret = true; - } - } else { - if (CollectionUtils.isNotEmpty(dataMaskDef.getResources())) { - dataMaskDef.setResources(null); - ret = true; - } - } - } - - RangerServiceDef.RangerRowFilterDef rowFilterDef = tagServiceDef.getRowFilterDef(); - - if (rowFilterDef != null) { - boolean autopropagateRowfilterdefToTag = config.getBoolean(AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP, AUTOPROPAGATE_ROWFILTERDEF_TO_TAG_PROP_DEFAULT); - if (autopropagateRowfilterdefToTag) { - if (CollectionUtils.isNotEmpty(rowFilterDef.getAccessTypes())) { - if (CollectionUtils.isEmpty(rowFilterDef.getResources())) { - rowFilterDef.setResources(resources); - ret = true; - } - } else { - if (CollectionUtils.isNotEmpty(rowFilterDef.getResources())) { - rowFilterDef.setResources(null); - ret = true; - } - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AbstractServiceStore.updateResourceInTagServiceDef(" + tagServiceDef + ") : " + ret); - } - return ret; - } - - private RangerServiceDef.RangerResourceDef getResourceDefForTagResource(List resourceDefs) { - RangerServiceDef.RangerResourceDef ret = null; - - if (CollectionUtils.isNotEmpty(resourceDefs)) { - for (RangerServiceDef.RangerResourceDef resourceDef : resourceDefs) { - if (resourceDef.getName().equals(RangerServiceTag.TAG_RESOURCE_NAME)) { - ret = resourceDef; - break; - } - } - } - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractTagStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractTagStore.java deleted file mode 100644 index e0f30e743d..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/AbstractTagStore.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -public abstract class AbstractTagStore implements TagStore { - - protected ServiceStore svcStore; - - @Override - public void init() throws Exception { - // Empty - } - - @Override - final public void setServiceStore(ServiceStore svcStore) { - this.svcStore = svcStore; - } - - @Override - final public ServiceStore getServiceStore() { - return svcStore; - } - -} - - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/EmbeddedServiceDefsUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/EmbeddedServiceDefsUtil.java deleted file mode 100755 index 270141f22f..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/EmbeddedServiceDefsUtil.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.authorization.hadoop.config.RangerAdminConfig; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.util.ServiceDefUtil; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/* - * This utility class deals with service-defs embedded in ranger-plugins-common - * library (hdfs/hbase/hive/knox/storm/..). If any of these service-defs - * don't exist in the given service store, they will be created in the store - * using the embedded definitions. - * - * init() method should be called from ServiceStore implementations to - * initialize embedded service-defs. - */ -public class EmbeddedServiceDefsUtil { - private static final Log LOG = LogFactory.getLog(EmbeddedServiceDefsUtil.class); - - - // following servicedef list should be reviewed/updated whenever a new embedded service-def is added - public static final String DEFAULT_BOOTSTRAP_SERVICEDEF_LIST = "tag,hdfs,hbase,hive,kms,knox,storm,yarn,kafka,solr,atlas,nifi,nifi-registry,sqoop,kylin,elasticsearch,presto,ozone,kudu,schema-registry"; - private static final String PROPERTY_SUPPORTED_SERVICE_DEFS = "ranger.supportedcomponents"; - private Set supportedServiceDefs; - public static final String EMBEDDED_SERVICEDEF_TAG_NAME = "tag"; - public static final String EMBEDDED_SERVICEDEF_HDFS_NAME = "hdfs"; - public static final String EMBEDDED_SERVICEDEF_HBASE_NAME = "hbase"; - public static final String EMBEDDED_SERVICEDEF_HIVE_NAME = "hive"; - public static final String EMBEDDED_SERVICEDEF_KMS_NAME = "kms"; - public static final String EMBEDDED_SERVICEDEF_KNOX_NAME = "knox"; - public static final String EMBEDDED_SERVICEDEF_STORM_NAME = "storm"; - public static final String EMBEDDED_SERVICEDEF_YARN_NAME = "yarn"; - public static final String EMBEDDED_SERVICEDEF_KAFKA_NAME = "kafka"; - public static final String EMBEDDED_SERVICEDEF_SOLR_NAME = "solr"; - public static final String EMBEDDED_SERVICEDEF_SCHEMA_REGISTRY_NAME = "schema-registry"; - public static final String EMBEDDED_SERVICEDEF_NIFI_NAME = "nifi"; - public static final String EMBEDDED_SERVICEDEF_NIFI_REGISTRY_NAME = "nifi-registry"; - public static final String EMBEDDED_SERVICEDEF_ATLAS_NAME = "atlas"; - public static final String EMBEDDED_SERVICEDEF_WASB_NAME = "wasb"; - public static final String EMBEDDED_SERVICEDEF_SQOOP_NAME = "sqoop"; - public static final String EMBEDDED_SERVICEDEF_KYLIN_NAME = "kylin"; - public static final String EMBEDDED_SERVICEDEF_ABFS_NAME = "abfs"; - public static final String EMBEDDED_SERVICEDEF_ELASTICSEARCH_NAME = "elasticsearch"; - public static final String EMBEDDED_SERVICEDEF_PRESTO_NAME = "presto"; - public static final String EMBEDDED_SERVICEDEF_OZONE_NAME = "ozone"; - public static final String EMBEDDED_SERVICEDEF_KUDU_NAME = "kudu"; - - public static final String PROPERTY_CREATE_EMBEDDED_SERVICE_DEFS = "ranger.service.store.create.embedded.service-defs"; - - public static final String HDFS_IMPL_CLASS_NAME = "org.apache.atlas.services.hdfs.RangerServiceHdfs"; - public static final String HBASE_IMPL_CLASS_NAME = "org.apache.atlas.services.hbase.RangerServiceHBase"; - public static final String HIVE_IMPL_CLASS_NAME = "org.apache.atlas.services.hive.RangerServiceHive"; - public static final String KMS_IMPL_CLASS_NAME = "org.apache.atlas.services.kms.RangerServiceKMS"; - public static final String KNOX_IMPL_CLASS_NAME = "org.apache.atlas.services.knox.RangerServiceKnox"; - public static final String STORM_IMPL_CLASS_NAME = "org.apache.atlas.services.storm.RangerServiceStorm"; - public static final String YARN_IMPL_CLASS_NAME = "org.apache.atlas.services.yarn.RangerServiceYarn"; - public static final String KAFKA_IMPL_CLASS_NAME = "org.apache.atlas.services.kafka.RangerServiceKafka"; - public static final String SOLR_IMPL_CLASS_NAME = "org.apache.atlas.services.solr.RangerServiceSolr"; - public static final String SCHEMA_REGISTRY_IMPL_CLASS_NAME = "org.apache.atlas.services.schemaregistry.RangerServiceSchemaRegistry"; - public static final String NIFI_IMPL_CLASS_NAME = "org.apache.atlas.services.nifi.RangerServiceNiFi"; - public static final String ATLAS_IMPL_CLASS_NAME = "org.apache.atlas.services.atlas.RangerServiceAtlas"; - public static final String PRESTO_IMPL_CLASS_NAME = "org.apache.atlas.services.presto.RangerServicePresto"; - public static final String OZONE_IMPL_CLASS_NAME = "org.apache.atlas.services.ozone.RangerServiceOzone"; - public static final String KUDU_IMPL_CLASS_NAME = "org.apache.atlas.services.kudu.RangerServiceKudu"; - - private static EmbeddedServiceDefsUtil instance = new EmbeddedServiceDefsUtil(); - - private boolean createEmbeddedServiceDefs = true; - private RangerServiceDef hdfsServiceDef; - private RangerServiceDef hBaseServiceDef; - private RangerServiceDef hiveServiceDef; - private RangerServiceDef kmsServiceDef; - private RangerServiceDef knoxServiceDef; - private RangerServiceDef stormServiceDef; - private RangerServiceDef yarnServiceDef; - private RangerServiceDef kafkaServiceDef; - private RangerServiceDef solrServiceDef; - private RangerServiceDef schemaRegistryServiceDef; - private RangerServiceDef nifiServiceDef; - private RangerServiceDef nifiRegistryServiceDef; - private RangerServiceDef atlasServiceDef; - private RangerServiceDef wasbServiceDef; - private RangerServiceDef sqoopServiceDef; - private RangerServiceDef kylinServiceDef; - private RangerServiceDef abfsServiceDef; - private RangerServiceDef elasticsearchServiceDef; - private RangerServiceDef prestoServiceDef; - private RangerServiceDef ozoneServiceDef; - private RangerServiceDef kuduServiceDef; - - private RangerServiceDef tagServiceDef; - - private final Gson gsonBuilder; - private final RangerAdminConfig config; - - /** Private constructor to restrict instantiation of this singleton utility class. */ - private EmbeddedServiceDefsUtil() { - gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create(); - config = RangerAdminConfig.getInstance(); - } - - public static EmbeddedServiceDefsUtil instance() { - return instance; - } - - public void init(ServiceStore store) { - LOG.info("==> EmbeddedServiceDefsUtil.init()"); - - try { - createEmbeddedServiceDefs = config.getBoolean(PROPERTY_CREATE_EMBEDDED_SERVICE_DEFS, true); - - supportedServiceDefs =getSupportedServiceDef(); - /* - * Maintaining the following service-def create-order is critical for the - * the legacy service-defs (HDFS/HBase/Hive/Knox/Storm) to be assigned IDs - * that were used in earlier version (0.4) */ - hdfsServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_HDFS_NAME); - hBaseServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_HBASE_NAME); - hiveServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_HIVE_NAME); - kmsServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_KMS_NAME); - knoxServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_KNOX_NAME); - stormServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_STORM_NAME); - yarnServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_YARN_NAME); - kafkaServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_KAFKA_NAME); - solrServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_SOLR_NAME); - schemaRegistryServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_SCHEMA_REGISTRY_NAME); - nifiServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_NIFI_NAME); - nifiRegistryServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_NIFI_REGISTRY_NAME); - atlasServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_ATLAS_NAME); - - tagServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_TAG_NAME); - wasbServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_WASB_NAME); - sqoopServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_SQOOP_NAME); - kylinServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_KYLIN_NAME); - abfsServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_ABFS_NAME); - elasticsearchServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_ELASTICSEARCH_NAME); - prestoServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_PRESTO_NAME); - ozoneServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_OZONE_NAME); - kuduServiceDef = getOrCreateServiceDef(store, EMBEDDED_SERVICEDEF_KUDU_NAME); - - // Ensure that tag service def is updated with access types of all service defs - store.updateTagServiceDefForAccessTypes(); - } catch(Throwable excp) { - LOG.fatal("EmbeddedServiceDefsUtil.init(): failed", excp); - } - - LOG.info("<== EmbeddedServiceDefsUtil.init()"); - } - - public long getHdfsServiceDefId() { - return getId(hdfsServiceDef); - } - - public long getHBaseServiceDefId() { - return getId(hBaseServiceDef); - } - - public long getHiveServiceDefId() { - return getId(hiveServiceDef); - } - - public long getKmsServiceDefId() { - return getId(kmsServiceDef); - } - - public long getKnoxServiceDefId() { - return getId(knoxServiceDef); - } - - public long getStormServiceDefId() { - return getId(stormServiceDef); - } - - public long getYarnServiceDefId() { - return getId(yarnServiceDef); - } - - public long getKafkaServiceDefId() { - return getId(kafkaServiceDef); - } - - public long getSolrServiceDefId() { - return getId(solrServiceDef); - } - - public long getSchemaRegistryServiceDefId() { - return getId(schemaRegistryServiceDef); - } - - public long getNiFiServiceDefId() { - return getId(nifiServiceDef); - } - - public long getNiFiRegistryServiceDefId() { - return getId(nifiRegistryServiceDef); - } - - public long getAtlasServiceDefId() { - return getId(atlasServiceDef); - } - - public long getSqoopServiceDefId() { - return getId(sqoopServiceDef); - } - - public long getKylinServiceDefId() { - return getId(kylinServiceDef); - } - - public long getElasticsearchServiceDefId() { - return getId(elasticsearchServiceDef); - } - public long getTagServiceDefId() { return getId(tagServiceDef); } - - public long getWasbServiceDefId() { return getId(wasbServiceDef); } - - public long getAbfsServiceDefId() { return getId(abfsServiceDef); } - - public long getPrestoServiceDefId() { return getId(prestoServiceDef); } - - public long getOzoneServiceDefId() { return getId(ozoneServiceDef); } - - public long getKuduServiceDefId() { return getId(kuduServiceDef); } - - public RangerServiceDef getEmbeddedServiceDef(String defType) throws Exception { - RangerServiceDef serviceDef=null; - if(StringUtils.isNotEmpty(defType)){ - serviceDef=loadEmbeddedServiceDef(defType); - } - return serviceDef; - } - - public static boolean isRecursiveEnabled(final RangerServiceDef rangerServiceDef, final String resourceDefName) { - boolean ret = false; - List resourceDefs = rangerServiceDef.getResources(); - for(RangerServiceDef.RangerResourceDef resourceDef:resourceDefs) { - if (resourceDefName.equals(resourceDef.getName())) { - ret = resourceDef.getRecursiveSupported(); - break; - } - } - return ret; - } - - private long getId(RangerServiceDef serviceDef) { - return serviceDef == null || serviceDef.getId() == null ? -1 : serviceDef.getId().longValue(); - } - - private RangerServiceDef getOrCreateServiceDef(ServiceStore store, String serviceDefName) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> EmbeddedServiceDefsUtil.getOrCreateServiceDef(" + serviceDefName + ")"); - } - - RangerServiceDef ret = null; - boolean createServiceDef = (CollectionUtils.isEmpty(supportedServiceDefs) || supportedServiceDefs.contains(serviceDefName)); - try { - ret = store.getServiceDefByName(serviceDefName); - if(ret == null && createEmbeddedServiceDefs && createServiceDef) { - ret = ServiceDefUtil.normalize(loadEmbeddedServiceDef(serviceDefName)); - - LOG.info("creating embedded service-def " + serviceDefName); - if (ret.getId() != null) { - store.setPopulateExistingBaseFields(true); - try { - ret = store.createServiceDef(ret); - } finally { - store.setPopulateExistingBaseFields(false); - } - } else { - ret = store.createServiceDef(ret); - } - LOG.info("created embedded service-def " + serviceDefName); - } - } catch(Exception excp) { - LOG.fatal("EmbeddedServiceDefsUtil.getOrCreateServiceDef(): failed to load/create serviceType " + serviceDefName, excp); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== EmbeddedServiceDefsUtil.getOrCreateServiceDef(" + serviceDefName + "): " + ret); - } - - return ret; - } - - private RangerServiceDef loadEmbeddedServiceDef(String serviceType) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> EmbeddedServiceDefsUtil.loadEmbeddedServiceDef(" + serviceType + ")"); - } - - RangerServiceDef ret = null; - - String resource = "/service-defs/ranger-servicedef-" + serviceType + ".json"; - - InputStream inStream = getClass().getResourceAsStream(resource); - - InputStreamReader reader = new InputStreamReader(inStream); - - ret = gsonBuilder.fromJson(reader, RangerServiceDef.class); - - //Set DEFAULT displayName if missing - if (ret != null && StringUtils.isBlank(ret.getDisplayName())) { - ret.setDisplayName(ret.getName()); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("==> EmbeddedServiceDefsUtil.loadEmbeddedServiceDef(" + serviceType + ")"); - } - - return ret; - } - - private Set getSupportedServiceDef(){ - Set supportedServiceDef =new HashSet<>(); - try{ - String ranger_supportedcomponents = config.get(PROPERTY_SUPPORTED_SERVICE_DEFS, DEFAULT_BOOTSTRAP_SERVICEDEF_LIST); - if(StringUtils.isBlank(ranger_supportedcomponents) || "all".equalsIgnoreCase(ranger_supportedcomponents)){ - ranger_supportedcomponents=DEFAULT_BOOTSTRAP_SERVICEDEF_LIST; - } - String[] supportedComponents=ranger_supportedcomponents.split(","); - if(supportedComponents!=null && supportedComponents.length>0){ - for(String element:supportedComponents){ - if(!StringUtils.isBlank(element)){ - element=element.toLowerCase(); - supportedServiceDef.add(element); - } - } - } - }catch(Exception ex){ - LOG.error("EmbeddedServiceDefsUtil.getSupportedServiceDef(): failed", ex); - } - return supportedServiceDef; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/GeolocationStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/GeolocationStore.java deleted file mode 100644 index c995ba668e..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/GeolocationStore.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.geo.RangerGeolocationData; -import org.apache.atlas.plugin.geo.RangerGeolocationDatabase; - -import java.util.Map; - -public interface GeolocationStore { - void init(Map context); - - RangerGeolocationData getGeoLocation(String ipAddress); - RangerGeolocationDatabase getGeoDatabase(); -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/PList.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/PList.java deleted file mode 100644 index e00e8627a4..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/PList.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import java.util.List; - -public class PList implements java.io.Serializable { - - private static final long serialVersionUID = 1L; - - /** - * Start index for the result - */ - protected int startIndex; - /** - * Page size used for the result - */ - protected int pageSize; - /** - * Total records in the database for the given search conditions - */ - protected long totalCount; - /** - * Number of rows returned for the search condition - */ - protected int resultSize; - /** - * Sort type. Either desc or asc - */ - protected String sortType; - /** - * Comma seperated list of the fields for sorting - */ - protected String sortBy; - - protected long queryTimeMS = System.currentTimeMillis(); - - protected List list; - /** - * Default constructor. This will set all the attributes to default value. - */ - public PList() { - startIndex = 0; - pageSize = 0; - totalCount = 0; - resultSize = 0; - sortType = null; - sortBy = null; - } - - public PList(List list, int startIndex, int pageSize, long totalCount, int resultSize, String sortType, String sortBy) { - this.list = list; - this.startIndex = startIndex; - this.pageSize = pageSize; - this.totalCount = totalCount; - this.resultSize = resultSize; - this.sortType = sortType; - this.sortBy = sortBy; - - } - - public int getListSize() { - return list == null ? 0 : list.size(); - } - - public void setList(List list) {this.list = list;} - - public List getList() { - return list; - } - - /** - * This method sets the value to the member attribute startIndex. You - * cannot set null to the attribute. - * - * @param startIndex - * Value to set member attribute startIndex - */ - public void setStartIndex(int startIndex) { - this.startIndex = startIndex; - } - public int getStartIndex() { return startIndex; } - - - /** - * This method sets the value to the member attribute pageSize. You - * cannot set null to the attribute. - * - * @param pageSize - * Value to set member attribute pageSize - */ - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - public int getPageSize() { return pageSize; } - - - /** - * This method sets the value to the member attribute totalCount. You - * cannot set null to the attribute. - * - * @param totalCount - * Value to set member attribute totalCount - */ - public void setTotalCount(long totalCount) { - this.totalCount = totalCount; - } - public long getTotalCount() { return totalCount; } - - - - /** - * This method sets the value to the member attribute resultSize. You - * cannot set null to the attribute. - * - * @param resultSize - * Value to set member attribute resultSize - */ - public void setResultSize(int resultSize) { - this.resultSize = resultSize; - } - - /** - * Returns the value for the member attribute resultSize - * - * @return int - value of member attribute resultSize. - */ - public int getResultSize() { - return getListSize(); - } - - /** - * This method sets the value to the member attribute sortType. You - * cannot set null to the attribute. - * - * @param sortType - * Value to set member attribute sortType - */ - public void setSortType(String sortType) { - this.sortType = sortType; - } - public String getSortType() { return sortType; } - - - - /** - * This method sets the value to the member attribute sortBy. You - * cannot set null to the attribute. - * - * @param sortBy - * Value to set member attribute sortBy - */ - public void setSortBy(String sortBy) { - this.sortBy = sortBy; - } - public String getSortBy() { return sortBy; } - - - - - - - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "PList [startIndex=" + startIndex + ", pageSize=" - + pageSize + ", totalCount=" + totalCount - + ", resultSize=" + resultSize + ", sortType=" - + sortType + ", sortBy=" + sortBy + ", queryTimeMS=" - + queryTimeMS + "]"; - } -} \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RangerServiceResourceSignature.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RangerServiceResourceSignature.java deleted file mode 100644 index 1a399abbc6..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RangerServiceResourceSignature.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.atlas.authorization.hadoop.config.RangerAdminConfig; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerServiceResource; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -public class RangerServiceResourceSignature { - private final String _string; - private final String _hash; - - public RangerServiceResourceSignature(RangerServiceResource serviceResource) { - _string = ServiceResourceSerializer.toString(serviceResource); - if (RangerAdminConfig.getInstance().isFipsEnabled()) { - _hash = DigestUtils.sha512Hex(_string); - } else { - _hash = DigestUtils.sha256Hex(_string); - } - } - - String asString() { - return _string; - } - - public String getSignature() { - return _hash; - } - - static class ServiceResourceSerializer { - - static final int _SignatureVersion = 1; - - static public String toString(final RangerServiceResource serviceResource) { - // invalid/empty serviceResource gets a deterministic signature as if it had an - // empty resource string - Map resource = serviceResource.getResourceElements(); - Map resources = new TreeMap<>(); - for (Map.Entry entry : resource.entrySet()) { - String resourceName = entry.getKey(); - ResourceSerializer resourceView = new ResourceSerializer(entry.getValue()); - resources.put(resourceName, resourceView); - } - String resourcesAsString = resources.toString(); - return String.format("{version=%d,resource=%s}", _SignatureVersion, resourcesAsString); - } - - static class ResourceSerializer { - final RangerPolicy.RangerPolicyResource _policyResource; - - ResourceSerializer(RangerPolicy.RangerPolicyResource policyResource) { - _policyResource = policyResource; - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("{"); - if (_policyResource != null) { - builder.append("values="); - if (_policyResource.getValues() != null) { - List values = new ArrayList<>(_policyResource.getValues()); - Collections.sort(values); - builder.append(values); - } - - builder.append(",excludes="); - if (_policyResource.getIsExcludes() == null) { // null is same as false - builder.append(Boolean.FALSE); - } else { - builder.append(_policyResource.getIsExcludes()); - } - - builder.append(",recursive="); - if (_policyResource.getIsRecursive() == null) { // null is the same as false - builder.append(Boolean.FALSE); - } else { - builder.append(_policyResource.getIsRecursive()); - } - } - builder.append("}"); - return builder.toString(); - } - } - } -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RolePredicateUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RolePredicateUtil.java deleted file mode 100644 index 0f8b6ffe90..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RolePredicateUtil.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.Predicate; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; - -public class RolePredicateUtil extends AbstractPredicateUtil { - - public RolePredicateUtil() { - super(); - } - - @Override - public void addPredicates(SearchFilter filter, List predicates) { - addPredicateForRoleName(filter.getParam(SearchFilter.ROLE_NAME), predicates); - addPredicateForRoleId(filter.getParam(SearchFilter.ROLE_ID), predicates); - addPredicateForGroupName(filter.getParam(SearchFilter.GROUP_NAME), predicates); - addPredicateForUserName(filter.getParam(SearchFilter.USER_NAME), predicates); - - addPredicateForPartialRoleName(filter.getParam(SearchFilter.ROLE_NAME_PARTIAL), predicates); - addPredicateForPartialGroupName(filter.getParam(SearchFilter.GROUP_NAME_PARTIAL), predicates); - addPredicateForPartialUserName(filter.getParam(SearchFilter.USER_NAME_PARTIAL), predicates); - } - - private Predicate addPredicateForRoleName(final String roleName, List predicates) { - if(StringUtils.isEmpty(roleName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - ret = StringUtils.equals(role.getName(), roleName); - - if (!ret) { - List roles = role.getRoles(); - - for (RangerRole.RoleMember member : roles) { - ret = StringUtils.equals(role.getName(), roleName); - - if (ret) { - break; - } - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPartialRoleName(final String roleNamePartial, List predicates) { - if(StringUtils.isEmpty(roleNamePartial)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - ret = StringUtils.containsIgnoreCase(role.getName(), roleNamePartial); - - if (!ret) { - List roles = role.getRoles(); - - for (RangerRole.RoleMember member : roles) { - ret = StringUtils.containsIgnoreCase(role.getName(), roleNamePartial); - - if (ret) { - break; - } - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForRoleId(final String roleId, List predicates) { - if(StringUtils.isEmpty(roleId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - ret = StringUtils.equals(roleId, role.getId().toString()); - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForGroupName(final String groupName, List predicates) { - if(StringUtils.isEmpty(groupName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - List groups = role.getGroups(); - - for (RangerRole.RoleMember member : groups) { - ret = StringUtils.equals(member.getName(), groupName); - - if (ret) { - break; - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPartialGroupName(final String groupNamePartial, List predicates) { - if(StringUtils.isEmpty(groupNamePartial)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - List groups = role.getGroups(); - - for (RangerRole.RoleMember member : groups) { - ret = StringUtils.containsIgnoreCase(member.getName(), groupNamePartial); - - if (ret) { - break; - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForUserName(final String userName, List predicates) { - if(StringUtils.isEmpty(userName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - List users = role.getUsers(); - - for (RangerRole.RoleMember member : users) { - ret = StringUtils.equals(member.getName(), userName); - - if (ret) { - break; - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForPartialUserName(final String userNamePartial, List predicates) { - if(StringUtils.isEmpty(userNamePartial)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerRole) { - RangerRole role = (RangerRole) object; - - List users = role.getUsers(); - - for (RangerRole.RoleMember member : users) { - ret = StringUtils.containsIgnoreCase(member.getName(), userNamePartial); - - if (ret) { - break; - } - } - } - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RoleStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RoleStore.java deleted file mode 100644 index c38c512c52..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/RoleStore.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.model.RangerRole; -import org.apache.atlas.plugin.util.RangerRoles; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; - -public interface RoleStore { - - void init() throws Exception; - - RangerRole createRole(RangerRole role, Boolean createNonExistUserGroup) throws Exception; - - RangerRole updateRole(RangerRole role, Boolean createNonExistUserGroup) throws Exception; - - void deleteRole(String roleName) throws Exception; - - void deleteRole(Long roleId) throws Exception; - - RangerRole getRole(Long id) throws Exception; - - RangerRole getRole(String name) throws Exception; - - List getRoles(SearchFilter filter) throws Exception; - - List getRoleNames(SearchFilter filter) throws Exception; - - RangerRoles getRoles(String serviceName, Long lastKnownRoleVersion) throws Exception; - - Long getRoleVersion(String serviceName); - - boolean roleExists(Long id) throws Exception; - - boolean roleExists(String name) throws Exception; -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZonePredicateUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZonePredicateUtil.java deleted file mode 100644 index bd2aa023fb..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZonePredicateUtil.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.Predicate; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; - -public class SecurityZonePredicateUtil extends AbstractPredicateUtil { - - public SecurityZonePredicateUtil() { - super(); - } - - @Override - public void addPredicates(SearchFilter filter, List predicates) { - //super.addPredicates(filter, predicates); - - addPredicateForServiceName(filter.getParam(SearchFilter.SERVICE_NAME), predicates); - addPredicateForMatchingZoneId(filter.getParam(SearchFilter.ZONE_ID), predicates); - addPredicateForNonMatchingZoneName(filter.getParam(SearchFilter.ZONE_NAME), predicates); - } - - private Predicate addPredicateForServiceName(final String serviceName, List predicates) { - if(StringUtils.isEmpty(serviceName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerSecurityZone) { - RangerSecurityZone securityZone = (RangerSecurityZone) object; - - ret = securityZone.getServices().get(serviceName) != null; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForMatchingZoneId(final String zoneId, List predicates) { - if (StringUtils.isEmpty(zoneId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerSecurityZone) { - RangerSecurityZone securityZone = (RangerSecurityZone) object; - - if (StringUtils.equals(zoneId, securityZone.getId().toString())) { - ret = true; - } - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForNonMatchingZoneName(final String zoneName, List predicates) { - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerSecurityZone) { - RangerSecurityZone securityZone = (RangerSecurityZone) object; - - if (StringUtils.isEmpty(zoneName) || !StringUtils.equals(zoneName, securityZone.getName())) { - ret = true; - } - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZoneStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZoneStore.java deleted file mode 100644 index d23036be48..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/SecurityZoneStore.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; -import java.util.Map; - -public interface SecurityZoneStore { - - void init() throws Exception; - - RangerSecurityZone createSecurityZone(RangerSecurityZone securityZone) throws Exception; - - RangerSecurityZone updateSecurityZoneById(RangerSecurityZone securityZone) throws Exception; - - void deleteSecurityZoneByName(String zoneName) throws Exception; - - void deleteSecurityZoneById(Long zoneId) throws Exception; - - RangerSecurityZone getSecurityZone(Long id) throws Exception; - - RangerSecurityZone getSecurityZoneByName(String name) throws Exception; - - List getSecurityZones(SearchFilter filter) throws Exception; - - Map getSecurityZonesForService(String serviceName); - -} - diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceDefsUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceDefsUtil.java new file mode 100644 index 0000000000..6071a77b10 --- /dev/null +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceDefsUtil.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.atlas.plugin.store; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.atlas.plugin.model.RangerServiceDef; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.List; + +public class ServiceDefsUtil { + private static final Log LOG = LogFactory.getLog(ServiceDefsUtil.class); + + public static final String EMBEDDED_SERVICEDEF_TAG_NAME = "tag"; + + private static ServiceDefsUtil instance = new ServiceDefsUtil(); + + private final Gson gsonBuilder; + + /** Private constructor to restrict instantiation of this singleton utility class. */ + private ServiceDefsUtil() { + gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create(); + } + + public static ServiceDefsUtil instance() { + return instance; + } + + public RangerServiceDef getEmbeddedServiceDef(String defType) throws Exception { + RangerServiceDef serviceDef=null; + if(StringUtils.isNotEmpty(defType)){ + serviceDef=loadEmbeddedServiceDef(defType); + } + return serviceDef; + } + + public static boolean isRecursiveEnabled(final RangerServiceDef rangerServiceDef, final String resourceDefName) { + boolean ret = false; + List resourceDefs = rangerServiceDef.getResources(); + for(RangerServiceDef.RangerResourceDef resourceDef:resourceDefs) { + if (resourceDefName.equals(resourceDef.getName())) { + ret = resourceDef.getRecursiveSupported(); + break; + } + } + return ret; + } + + private RangerServiceDef loadEmbeddedServiceDef(String serviceType) throws Exception { + if(LOG.isDebugEnabled()) { + LOG.debug("==> EmbeddedServiceDefsUtil.loadEmbeddedServiceDef(" + serviceType + ")"); + } + + RangerServiceDef ret = null; + + String resource = "/service-defs/ranger-servicedef-" + serviceType + ".json"; + + try (InputStream inStream = getClass().getResourceAsStream(resource)) { + + try (InputStreamReader reader = new InputStreamReader(inStream)) { + ret = gsonBuilder.fromJson(reader, RangerServiceDef.class); + + //Set DEFAULT displayName if missing + if (ret != null && StringUtils.isBlank(ret.getDisplayName())) { + ret.setDisplayName(ret.getName()); + } + } + } + + if(LOG.isDebugEnabled()) { + LOG.debug("==> EmbeddedServiceDefsUtil.loadEmbeddedServiceDef(" + serviceType + ")"); + } + + return ret; + } +} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServicePredicateUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServicePredicateUtil.java deleted file mode 100644 index 642ca2a7e7..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServicePredicateUtil.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.Predicate; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; - -public class ServicePredicateUtil extends AbstractPredicateUtil { - private ServiceStore serviceStore; - - public ServicePredicateUtil(ServiceStore serviceStore) { - super(); - this.serviceStore = serviceStore; - } - - @Override - public void addPredicates(SearchFilter filter, List predicates) { - super.addPredicates(filter, predicates); - - addPredicateForServiceType(filter.getParam(SearchFilter.SERVICE_TYPE), predicates); - addPredicateForServiceId(filter.getParam(SearchFilter.SERVICE_ID), predicates); - addPredicateForTagSeviceName(filter.getParam(SearchFilter.TAG_SERVICE_NAME), predicates); - addPredicateForTagSeviceId(filter.getParam(SearchFilter.TAG_SERVICE_ID), predicates); - } - - private String getServiceType(String serviceName) { - RangerService service = null; - - try { - if (serviceStore != null) { - service = serviceStore.getServiceByName(serviceName); - } - } catch(Exception excp) { - // ignore - } - - return service != null ? service.getType() : null; - } - - private Long getServiceId(String serviceName) { - RangerService service = null; - - try { - if (serviceStore != null) { - service = serviceStore.getServiceByName(serviceName); - } - } catch(Exception excp) { - // ignore - } - - return service != null ? service.getId() : null; - } - - - private Predicate addPredicateForServiceType(final String serviceType, List predicates) { - if(StringUtils.isEmpty(serviceType)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - - ret = StringUtils.equals(serviceType, getServiceType(policy.getService())); - } else if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - ret = StringUtils.equals(serviceType, service.getType()); - } else if(object instanceof RangerServiceDef) { - RangerServiceDef serviceDef = (RangerServiceDef)object; - - ret = StringUtils.equals(serviceType, serviceDef.getName()); - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForServiceId(final String serviceId, List predicates) { - if(StringUtils.isEmpty(serviceId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerPolicy) { - RangerPolicy policy = (RangerPolicy)object; - Long svcId = getServiceId(policy.getService()); - - if(svcId != null) { - ret = StringUtils.equals(serviceId, svcId.toString()); - } - } else if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - if(service.getId() != null) { - ret = StringUtils.equals(serviceId, service.getId().toString()); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagSeviceName(final String tagServiceName, List predicates) { - if(StringUtils.isEmpty(tagServiceName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - ret = StringUtils.equals(tagServiceName, service.getTagService()); - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagSeviceId(final String tagServiceId, List predicates) { - if(StringUtils.isEmpty(tagServiceId)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - if(object == null) { - return false; - } - - boolean ret = false; - - if(object instanceof RangerService) { - RangerService service = (RangerService)object; - - if(! StringUtils.isEmpty(service.getTagService())) { - RangerService tagService = null; - - try { - tagService = serviceStore.getServiceByName(service.getTagService()); - } catch(Exception excp) { - } - - ret = tagService != null && tagService.getId() != null && StringUtils.equals(tagServiceId, tagService.getId().toString()); - } - } else { - ret = true; - } - - return ret; - } - }; - - if(predicates != null) { - predicates.add(ret); - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceStore.java deleted file mode 100644 index 91e31f74d2..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/ServiceStore.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerSecurityZone; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.util.SearchFilter; -import org.apache.atlas.plugin.util.ServicePolicies; - -import java.util.List; -import java.util.Map; - -public interface ServiceStore { - - String OPTION_FORCE_RENAME = "forceRename"; - - void init() throws Exception; - - RangerServiceDef createServiceDef(RangerServiceDef serviceDef) throws Exception; - - RangerServiceDef updateServiceDef(RangerServiceDef serviceDef) throws Exception; - - void deleteServiceDef(Long id, Boolean forceDelete) throws Exception; - - void updateTagServiceDefForAccessTypes() throws Exception; - - RangerServiceDef getServiceDef(Long id) throws Exception; - - RangerServiceDef getServiceDefByName(String name) throws Exception; - - RangerServiceDef getServiceDefByDisplayName(String name) throws Exception; - - List getServiceDefs(SearchFilter filter) throws Exception; - - PList getPaginatedServiceDefs(SearchFilter filter) throws Exception; - - RangerService createService(RangerService service) throws Exception; - - RangerService updateService(RangerService service, Map options) throws Exception; - - void deleteService(Long id) throws Exception; - - RangerService getService(Long id) throws Exception; - - RangerService getServiceByName(String name) throws Exception; - - RangerService getServiceByDisplayName(String displayName) throws Exception; - - List getServices(SearchFilter filter) throws Exception; - - PList getPaginatedServices(SearchFilter filter) throws Exception; - - RangerPolicy createPolicy(RangerPolicy policy) throws Exception; - - RangerPolicy updatePolicy(RangerPolicy policy) throws Exception; - - void deletePolicy(RangerPolicy policy, RangerService service) throws Exception; - - void deletePolicy(RangerPolicy policy) throws Exception; - - boolean policyExists(Long id) throws Exception; - - RangerPolicy getPolicy(Long id) throws Exception; - - List getPolicies(SearchFilter filter) throws Exception; - - Long getPolicyId(final Long serviceId, final String policyName, final Long zoneId); - - PList getPaginatedPolicies(SearchFilter filter) throws Exception; - - List getPoliciesByResourceSignature(String serviceName, String policySignature, Boolean isPolicyEnabled) throws Exception; - - List getServicePolicies(Long serviceId, SearchFilter filter) throws Exception; - - PList getPaginatedServicePolicies(Long serviceId, SearchFilter filter) throws Exception; - - List getServicePolicies(String serviceName, SearchFilter filter) throws Exception; - - PList getPaginatedServicePolicies(String serviceName, SearchFilter filter) throws Exception; - - ServicePolicies getServicePoliciesIfUpdated(String serviceName, Long lastKnownVersion, boolean needsBackwardCompatibility) throws Exception; - - Long getServicePolicyVersion(String serviceName); - - ServicePolicies getServicePolicyDeltasOrPolicies(String serviceName, Long lastKnownVersion) throws Exception; - - ServicePolicies getServicePolicyDeltas(String serviceName, Long lastKnownVersion) throws Exception; - - ServicePolicies getServicePolicies(String serviceName, Long lastKnownVersion) throws Exception; - - RangerPolicy getPolicyFromEventTime(String eventTimeStr, Long policyId); - - void setPopulateExistingBaseFields(Boolean populateExistingBaseFields); - - Boolean getPopulateExistingBaseFields(); - - RangerSecurityZone getSecurityZone(Long id) throws Exception; - - RangerSecurityZone getSecurityZone(String name) throws Exception; - - long getPoliciesCount(final String serviceName); - - Map getServiceConfigForPlugin(Long serviceId); -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/StoredServiceResource.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/StoredServiceResource.java deleted file mode 100644 index e8badc86cb..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/StoredServiceResource.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.model.RangerPolicy; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.util.Map; - -@JsonAutoDetect(fieldVisibility=JsonAutoDetect.Visibility.ANY) -@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) -@JsonIgnoreProperties(ignoreUnknown=true) -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) - -public class StoredServiceResource implements java.io.Serializable { - private final Map resourceElements; - private final String ownerName; - private final Map additionalInfo; - - public StoredServiceResource() { - this(null, null, null); - } - - public StoredServiceResource(Map resourceElements, String ownerName, Map additionalInfo) { - this.resourceElements = resourceElements; - this.ownerName = ownerName; - this.additionalInfo = additionalInfo; - } - - public Map getResourceElements() { - return resourceElements; - } - public String getOwnerName() { - return ownerName; - } - public Map getAdditionalInfo() { - return additionalInfo; - } -} \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagPredicateUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagPredicateUtil.java deleted file mode 100644 index f011b9ed32..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagPredicateUtil.java +++ /dev/null @@ -1,383 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.Predicate; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.RangerServiceResource; -import org.apache.atlas.plugin.model.RangerTag; -import org.apache.atlas.plugin.model.RangerTagDef; -import org.apache.atlas.plugin.model.RangerTagResourceMap; -import org.apache.atlas.plugin.util.SearchFilter; - -import java.util.List; - -public class TagPredicateUtil extends AbstractPredicateUtil { - - public TagPredicateUtil() { super(); } - - @Override - public void addPredicates(SearchFilter filter, List predicates) { - super.addPredicates(filter, predicates); - - addPredicateForTagDefId(filter.getParam(SearchFilter.TAG_DEF_ID), predicates); - addPredicateForTagDefGuid(filter.getParam(SearchFilter.TAG_DEF_GUID), predicates); - - addPredicateForTagId(filter.getParam(SearchFilter.TAG_ID), predicates); - addPredicateForTagGuid(filter.getParam(SearchFilter.TAG_GUID), predicates); - addPredicateForTagType(filter.getParam(SearchFilter.TAG_TYPE), predicates); - - addPredicateForResourceId(filter.getParam(SearchFilter.TAG_RESOURCE_ID), predicates); - addPredicateForResourceGuid(filter.getParam(SearchFilter.TAG_RESOURCE_GUID), predicates); - addPredicateForServiceResourceServiceName(filter.getParam(SearchFilter.TAG_RESOURCE_SERVICE_NAME), predicates); - addPredicateForResourceSignature(filter.getParam(SearchFilter.TAG_RESOURCE_SIGNATURE), predicates); - - addPredicateForTagResourceMapId(filter.getParam(SearchFilter.TAG_MAP_ID), predicates); - } - - private Predicate addPredicateForTagDefId(final String id, List predicates) { - if (StringUtils.isEmpty(id)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTagDef) { - RangerTagDef tagDef = (RangerTagDef) object; - - ret = StringUtils.equals(id, tagDef.getId().toString()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagDefGuid(final String guid, List predicates) { - if (StringUtils.isEmpty(guid)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTagDef) { - RangerTagDef tagDef = (RangerTagDef) object; - - ret = StringUtils.equals(guid, tagDef.getGuid()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagId(final String id, List predicates) { - if (StringUtils.isEmpty(id)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTag) { - RangerTag tag = (RangerTag) object; - - ret = StringUtils.equals(id, tag.getId().toString()); - } else if (object instanceof RangerTagResourceMap) { - RangerTagResourceMap tagResourceMap = (RangerTagResourceMap) object; - ret = StringUtils.equals(id, tagResourceMap.getTagId().toString()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagGuid(final String guid, List predicates) { - if (StringUtils.isEmpty(guid)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTag) { - RangerTag tag = (RangerTag) object; - - ret = StringUtils.equals(guid, tag.getGuid()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagType(final String type, List predicates) { - if (StringUtils.isEmpty(type)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTagDef) { - RangerTagDef tagDef = (RangerTagDef) object; - - ret = StringUtils.equals(type, tagDef.getName()); - } else if (object instanceof RangerTag) { - RangerTag tag = (RangerTag) object; - - ret = StringUtils.equals(type, tag.getType()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForResourceId(final String id, List predicates) { - if (StringUtils.isEmpty(id)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerServiceResource) { - RangerServiceResource resource = (RangerServiceResource) object; - - ret = StringUtils.equals(id, resource.getId().toString()); - } else if(object instanceof RangerTagResourceMap) { - RangerTagResourceMap tagResourceMap = (RangerTagResourceMap)object; - - ret = StringUtils.equals(id, tagResourceMap.getId().toString()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForResourceGuid(final String id, List predicates) { - if (StringUtils.isEmpty(id)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerServiceResource) { - RangerServiceResource resource = (RangerServiceResource) object; - - ret = StringUtils.equals(id, resource.getGuid()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForServiceResourceServiceName(final String serviceName, List predicates) { - if (serviceName == null || StringUtils.isEmpty(serviceName)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerServiceResource) { - RangerServiceResource resource = (RangerServiceResource) object; - ret = StringUtils.equals(resource.getServiceName(), serviceName); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForResourceSignature(final String signature, List predicates) { - if (StringUtils.isEmpty(signature)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerServiceResource) { - RangerServiceResource resource = (RangerServiceResource) object; - - ret = StringUtils.equals(signature, resource.getResourceSignature()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } - - private Predicate addPredicateForTagResourceMapId(final String id, List predicates) { - if (StringUtils.isEmpty(id)) { - return null; - } - - Predicate ret = new Predicate() { - @Override - public boolean evaluate(Object object) { - - boolean ret = false; - - if (object == null) { - return ret; - } - - if (object instanceof RangerTagResourceMap) { - RangerTagResourceMap tagResourceMap = (RangerTagResourceMap) object; - ret = StringUtils.equals(id, tagResourceMap.getId().toString()); - } - - return ret; - } - }; - - if (predicates != null) { - predicates.add(ret); - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagStore.java deleted file mode 100644 index baa9b05060..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagStore.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.atlas.plugin.model.*; -import org.apache.atlas.plugin.util.SearchFilter; -import org.apache.atlas.plugin.util.ServiceTags; - -import java.util.List; - -/** - * Interface to backing store for the top-level TAG model objects - */ - -public interface TagStore { - void init() throws Exception; - - void setServiceStore(ServiceStore svcStore); - - ServiceStore getServiceStore(); - - RangerTagDef createTagDef(RangerTagDef tagDef) throws Exception; - - RangerTagDef updateTagDef(RangerTagDef TagDef) throws Exception; - - void deleteTagDefByName(String name) throws Exception; - - void deleteTagDef(Long id) throws Exception; - - RangerTagDef getTagDef(Long id) throws Exception; - - RangerTagDef getTagDefByGuid(String guid) throws Exception; - - RangerTagDef getTagDefByName(String name) throws Exception; - - List getTagDefs(SearchFilter filter) throws Exception; - - PList getPaginatedTagDefs(SearchFilter filter) throws Exception; - - List getTagTypes() throws Exception; - - - RangerTag createTag(RangerTag tag) throws Exception; - - RangerTag updateTag(RangerTag tag) throws Exception; - - void deleteTag(Long id) throws Exception; - - RangerTag getTag(Long id) throws Exception; - - RangerTag getTagByGuid(String guid) throws Exception; - - List getTagIdsForResourceId(Long resourceId) throws Exception; - - List getTagsByType(String name) throws Exception; - - List getTagsForResourceId(Long resourceId) throws Exception; - - List getTagsForResourceGuid(String resourceGuid) throws Exception; - - List getTags(SearchFilter filter) throws Exception; - - PList getPaginatedTags(SearchFilter filter) throws Exception; - - - RangerServiceResource createServiceResource(RangerServiceResource resource) throws Exception; - - RangerServiceResource updateServiceResource(RangerServiceResource resource) throws Exception; - - void refreshServiceResource(Long resourceId) throws Exception; - - void deleteServiceResource(Long id) throws Exception; - - void deleteServiceResourceByGuid(String guid) throws Exception; - - RangerServiceResource getServiceResource(Long id) throws Exception; - - RangerServiceResource getServiceResourceByGuid(String guid) throws Exception; - - List getServiceResourcesByService(String serviceName) throws Exception; - - List getServiceResourceGuidsByService(String serviceName) throws Exception; - - RangerServiceResource getServiceResourceByServiceAndResourceSignature(String serviceName, String resourceSignature) throws Exception; - - List getServiceResources(SearchFilter filter) throws Exception; - - PList getPaginatedServiceResources(SearchFilter filter) throws Exception; - - - RangerTagResourceMap createTagResourceMap(RangerTagResourceMap tagResourceMap) throws Exception; - - void deleteTagResourceMap(Long id) throws Exception; - - RangerTagResourceMap getTagResourceMap(Long id) throws Exception; - - RangerTagResourceMap getTagResourceMapByGuid(String guid) throws Exception; - - List getTagResourceMapsForTagId(Long tagId) throws Exception; - - List getTagResourceMapsForTagGuid(String tagGuid) throws Exception; - - List getTagResourceMapsForResourceId(Long resourceId) throws Exception; - - List getTagResourceMapsForResourceGuid(String resourceGuid) throws Exception; - - RangerTagResourceMap getTagResourceMapForTagAndResourceId(Long tagId, Long resourceId) throws Exception; - - RangerTagResourceMap getTagResourceMapForTagAndResourceGuid(String tagGuid, String resourceGuid) throws Exception; - - List getTagResourceMaps(SearchFilter filter) throws Exception; - - PList getPaginatedTagResourceMaps(SearchFilter filter) throws Exception; - - - ServiceTags getServiceTagsIfUpdated(String serviceName, Long lastKnownVersion, boolean needsBackwardCompatibility) throws Exception; - ServiceTags getServiceTags(String serviceName, Long lastKnownVersion) throws Exception; - ServiceTags getServiceTagsDelta(String serviceName, Long lastKnownVersion) throws Exception; - - - Long getTagVersion(String serviceName); - - void deleteAllTagObjectsForService(String serviceName) throws Exception; - - boolean isInPlaceTagUpdateSupported(); - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagValidator.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagValidator.java deleted file mode 100644 index 0b966fc80d..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/TagValidator.java +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.atlas.plugin.model.*; - -import java.util.List; - -public class TagValidator { - private TagStore tagStore; - - public TagValidator() {} - - public void setTagStore(TagStore tagStore) { - this.tagStore = tagStore; - } - - public RangerTagDef preCreateTagDef(final RangerTagDef tagDef, boolean updateIfExists) throws Exception { - String name = tagDef.getName(); - - if (StringUtils.isBlank(name)) { - throw new Exception("TagDef has no name"); - } - - RangerTagDef existing = tagStore.getTagDefByName(name); - - return existing; - } - - public RangerTag preCreateTag(final RangerTag tag) throws Exception { - if(StringUtils.isBlank(tag.getType()) ) { - throw new Exception("Tag has no type"); - } - - RangerTag ret = null; - - String guid = tag.getGuid(); - if (! StringUtils.isBlank(guid)) { - ret = tagStore.getTagByGuid(guid); - } - - return ret; - } - - public void preUpdateTag(final Long id, final RangerTag tag) throws Exception { - if (StringUtils.isBlank(tag.getType())) { - throw new Exception("Tag has no type"); - } - - if (id == null) { - throw new Exception("Invalid/null id"); - } - - RangerTag existing = tagStore.getTag(id); - - if (existing == null) { - throw new Exception("Attempt to update nonexistant tag, id=" + id); - } - - if (!StringUtils.equals(tag.getType(), existing.getType())) { - throw new Exception("Attempt to change the tag-type"); - } - - tag.setId(existing.getId()); - tag.setGuid(existing.getGuid()); - } - - public void preUpdateTagByGuid(String guid, final RangerTag tag) throws Exception { - if (StringUtils.isBlank(tag.getType())) { - throw new Exception("Tag has no type"); - } - - RangerTag existing = tagStore.getTagByGuid(guid); - if (existing == null) { - throw new Exception("Attempt to update nonexistent tag, guid=" + guid); - } - - if (!StringUtils.equals(tag.getType(), existing.getType())) { - throw new Exception("Attempt to change the tag-type"); - } - - tag.setId(existing.getId()); - tag.setGuid(existing.getGuid()); - } - - public RangerTag preDeleteTag(Long id) throws Exception { - if (id == null) { - throw new Exception("Invalid/null id"); - } - - RangerTag existing = tagStore.getTag(id); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent tag, id=" + id); - } - - List associations = tagStore.getTagResourceMapsForTagId(existing.getId()); - if (CollectionUtils.isNotEmpty(associations)) { - throw new Exception("Attempt to delete tag which is associated with a service-resource, id=" + id); - } - return existing; - } - - public RangerTag preDeleteTagByGuid(String guid) throws Exception { - RangerTag exiting = tagStore.getTagByGuid(guid); - - if (exiting == null) { - throw new Exception("Attempt to delete nonexistent tag, guid=" + guid); - } - - List associations = tagStore.getTagResourceMapsForTagId(exiting.getId()); - if (CollectionUtils.isNotEmpty(associations)) { - throw new Exception("Attempt to delete tag which is associated with a service-resource, guid=" + guid); - } - return exiting; - } - - public RangerServiceResource preCreateServiceResource(RangerServiceResource resource) throws Exception { - RangerServiceResource ret = null; - - if (StringUtils.isBlank(resource.getServiceName()) || MapUtils.isEmpty(resource.getResourceElements())) { - throw new Exception("No serviceName or resource in RangerServiceResource"); - } - - String guid = resource.getGuid(); - if (! StringUtils.isBlank(guid)) { - ret = tagStore.getServiceResourceByGuid(guid); - } - - if (ret == null) { - RangerServiceResourceSignature serializer = new RangerServiceResourceSignature(resource); - resource.setResourceSignature(serializer.getSignature()); - } - - return ret; - } - - public void preUpdateServiceResource(Long id, RangerServiceResource resource) throws Exception { - if (StringUtils.isBlank(resource.getServiceName()) || MapUtils.isEmpty(resource.getResourceElements())) { - throw new Exception("No serviceName or resource in RangerServiceResource"); - } - - if (id == null) { - throw new Exception("Invalid/null id"); - } - - RangerServiceResource existing = tagStore.getServiceResource(id); - if (existing == null) { - throw new Exception("Attempt to update nonexistent resource, id=" + id); - } - - if (!StringUtils.equals(existing.getServiceName(), resource.getServiceName())) { - throw new Exception("Attempt to change service-name for existing service-resource"); - } - - RangerServiceResourceSignature serializer = new RangerServiceResourceSignature(resource); - - resource.setId(existing.getId()); - resource.setGuid(existing.getGuid()); - resource.setResourceSignature(serializer.getSignature()); - } - - public void preUpdateServiceResourceByGuid(String guid, RangerServiceResource resource) throws Exception { - if (StringUtils.isBlank(resource.getServiceName()) || MapUtils.isEmpty(resource.getResourceElements())) { - throw new Exception("No serviceName or resource in RangerServiceResource"); - } - - RangerServiceResource existing = tagStore.getServiceResourceByGuid(guid); - if (existing == null) { - throw new Exception("Attempt to update nonexistent resource, guid=" + guid); - } - - if (!StringUtils.equals(existing.getServiceName(), resource.getServiceName())) { - throw new Exception("Attempt to change service-name for existing service-resource"); - } - - RangerServiceResourceSignature serializer = new RangerServiceResourceSignature(resource); - - resource.setId(existing.getId()); - resource.setGuid(guid); - resource.setResourceSignature(serializer.getSignature()); - } - - public RangerServiceResource preDeleteServiceResource(Long id) throws Exception { - RangerServiceResource existing = tagStore.getServiceResource(id); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent resource, id=" + id); - } - - List associations = tagStore.getTagResourceMapsForResourceId(existing.getId()); - if (CollectionUtils.isNotEmpty(associations)) { - throw new Exception("Attempt to delete serviceResource which is associated with a tag, id=" + id); - } - - return existing; - } - - public RangerServiceResource preDeleteServiceResourceByGuid(String guid, boolean deleteReferences) throws Exception { - RangerServiceResource existing = tagStore.getServiceResourceByGuid(guid); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent resource, guid=" + guid); - } - - List associations = tagStore.getTagResourceMapsForResourceId(existing.getId()); - if (CollectionUtils.isNotEmpty(associations) && !deleteReferences) { - throw new Exception("Attempt to delete serviceResource which is associated with a tag, guid=" + guid); - } - - return existing; - } - - public RangerTagResourceMap preCreateTagResourceMap(String tagGuid, String resourceGuid) throws Exception { - if (StringUtils.isBlank(resourceGuid) || StringUtils.isBlank(tagGuid)) { - throw new Exception("Both resourceGuid and resourceId need to be non-empty"); - } - - RangerTagResourceMap existing = tagStore.getTagResourceMapForTagAndResourceGuid(tagGuid, resourceGuid); - - if (existing != null) { - throw new Exception("Attempt to create existing association between resourceId=" + resourceGuid + " and tagId=" + tagGuid); - } - - RangerServiceResource existingServiceResource = tagStore.getServiceResourceByGuid(resourceGuid); - - if(existingServiceResource == null) { - throw new Exception("No resource found for guid=" + resourceGuid); - } - - RangerTag existingTag = tagStore.getTagByGuid(tagGuid); - - if(existingTag == null) { - throw new Exception("No tag found for guid=" + tagGuid); - } - - RangerTagResourceMap newTagResourceMap = new RangerTagResourceMap(); - newTagResourceMap.setResourceId(existingServiceResource.getId()); - newTagResourceMap.setTagId(existingTag.getId()); - - return newTagResourceMap; - } - - public RangerTagResourceMap preCreateTagResourceMapByIds(Long tagId, Long resourceId) throws Exception { - RangerTagResourceMap existing = tagStore.getTagResourceMapForTagAndResourceId(tagId, resourceId); - - if (existing != null) { - throw new Exception("Attempt to create existing association between resourceId=" + resourceId + " and tagId=" + tagId); - } - - RangerServiceResource existingServiceResource = tagStore.getServiceResource(resourceId); - - if(existingServiceResource == null) { - throw new Exception("No resource found for id=" + resourceId); - } - - RangerTag existingTag = tagStore.getTag(tagId); - - if(existingTag == null) { - throw new Exception("No tag found for id=" + tagId); - } - - RangerTagResourceMap newTagResourceMap = new RangerTagResourceMap(); - newTagResourceMap.setResourceId(resourceId); - newTagResourceMap.setTagId(tagId); - - return newTagResourceMap; - } - - public RangerTagResourceMap preDeleteTagResourceMap(Long id) throws Exception { - RangerTagResourceMap existing = tagStore.getTagResourceMap(id); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent tagResourceMap(id=" + id + ")"); - } - - return existing; - } - - public RangerTagResourceMap preDeleteTagResourceMapByGuid(String guid) throws Exception { - RangerTagResourceMap existing = tagStore.getTagResourceMapByGuid(guid); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent tagResourceMap(guid=" + guid + ")"); - } - - return existing; - } - - public RangerTagResourceMap preDeleteTagResourceMap(String tagGuid, String resourceGuid) throws Exception { - RangerTagResourceMap existing = tagStore.getTagResourceMapForTagAndResourceGuid(tagGuid, resourceGuid); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent association between resourceId=" + resourceGuid + " and tagId=" + tagGuid); - } - - return existing; - } - - public RangerTagResourceMap preDeleteTagResourceMapByIds(Long tagId, Long resourceId) throws Exception { - RangerTagResourceMap existing = tagStore.getTagResourceMapForTagAndResourceId(tagId, resourceId); - - if (existing == null) { - throw new Exception("Attempt to delete nonexistent association between resourceId=" + resourceId + " and tagId=" + tagId); - } - - return existing; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/file/GeolocationFileStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/file/GeolocationFileStore.java deleted file mode 100644 index df82e4f323..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/store/file/GeolocationFileStore.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.store.file; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.geo.GeolocationMetadata; -import org.apache.atlas.plugin.geo.RangerGeolocationData; -import org.apache.atlas.plugin.geo.RangerGeolocationDatabase; -import org.apache.atlas.plugin.store.GeolocationStore; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.util.HashMap; -import java.util.Map; - -public class GeolocationFileStore implements GeolocationStore { - private static final Log LOG = LogFactory.getLog(GeolocationFileStore.class); - - public static final String GeoLineCommentIdentifier = "#"; - public static final Character GeoFieldsSeparator = ','; - - public static final String PROP_GEOLOCATION_FILE_LOCATION = "FilePath"; - public static final String PROP_GEOLOCATION_FILE_REINIT = "ForceRead"; - public static final String PROP_GEOLOCATION_IP_IN_DOT_FORMAT = "IPInDotFormat"; - - private static Map geolocationDBMap = new HashMap<>(); - - private RangerGeolocationDatabase geolocationDatabase; - - private boolean isMetalineProcessed; - private boolean useDotFormat; - - @Override - public void init(final Map context) { - - String filePathToGeolocationFile = context.get(PROP_GEOLOCATION_FILE_LOCATION); - - if (StringUtils.isBlank(filePathToGeolocationFile)) { - filePathToGeolocationFile = "/etc/ranger/data/geo.txt"; - } - - String reinit = context.get(PROP_GEOLOCATION_FILE_REINIT); - boolean reinitialize = reinit == null || Boolean.parseBoolean(reinit); - - String ipInDotFormat = context.get(PROP_GEOLOCATION_IP_IN_DOT_FORMAT); - useDotFormat = ipInDotFormat == null || Boolean.parseBoolean(ipInDotFormat); - - - if (LOG.isDebugEnabled()) { - LOG.debug("GeolocationFileStore.init() - Geolocation file location=" + filePathToGeolocationFile); - LOG.debug("GeolocationFileStore.init() - Reinitialize flag =" + reinitialize); - LOG.debug("GeolocationFileStore.init() - UseDotFormat flag =" + useDotFormat); - } - - RangerGeolocationDatabase database = geolocationDBMap.get(filePathToGeolocationFile); - - if (database == null || reinitialize) { - RangerGeolocationDatabase newDatabase = build(filePathToGeolocationFile); - if (newDatabase != null) { - geolocationDBMap.put(filePathToGeolocationFile, newDatabase); - database = newDatabase; - } else { - LOG.error("GeolocationFileStore.init() - Could not build database. Using old database if present."); - } - } - geolocationDatabase = database; - - if (geolocationDatabase == null) { - LOG.error("GeolocationFileStore.init() - Cannot build Geolocation database from file " + filePathToGeolocationFile); - } - - } - - @Override - public RangerGeolocationDatabase getGeoDatabase() { - return geolocationDatabase; - } - - @Override - public final RangerGeolocationData getGeoLocation(final String ipAddress) { - RangerGeolocationData ret = null; - - RangerGeolocationDatabase database = geolocationDatabase; // init() may get called when getGeolocation is half-executed - - if (database != null) { - - long start = 0L, end = 0L; - - start = System.currentTimeMillis(); - ret = database.find(ipAddress); - end = System.currentTimeMillis(); - - if (LOG.isDebugEnabled()) { - if (ret == null) { - LOG.debug("GeolocationFileStore.getGeolocation() - " + ipAddress + " not found. Search time = " + (end - start) + " milliseconds"); - } else { - LOG.debug("GeolocationFileStore.getGeolocation() - " + ipAddress + " found. Search time = " + (end - start) + " milliseconds"); - - for (String attrName : database.getMetadata().getLocationDataItemNames()) { - LOG.debug("GeolocationFileStore.getGeolocation() - IPAddress[" + attrName + "]=" + database.getValue(ret, attrName) + ", "); - } - - } - } - } else { - LOG.error("GeolocationFileStore.getGeolocation() - GeoLocationDatabase is not initialized correctly."); - } - - return ret; - } - - private Reader getReader(String dataFileName) throws IOException { - Reader ret = null; - - File f = new File(dataFileName); - - if(f.exists() && f.canRead()) { - LOG.info("GeolocationFileStore: reading location data from file '" + dataFileName + "'"); - - ret = new FileReader(dataFileName); - } else { - InputStream inStr = this.getClass().getResourceAsStream(dataFileName); - - if(inStr != null) { - LOG.info("GeolocationFileStore: reading location data from resource '" + dataFileName + "'"); - - ret = new InputStreamReader(inStr); - } - } - - if(ret == null) { - throw new FileNotFoundException(dataFileName); - } - - return ret; - } - - RangerGeolocationDatabase build(String dataFileName) { - - RangerGeolocationDatabase database = null; - - BufferedReader bufferedReader = null; - long start = 0L, end = 0L; - - start = System.currentTimeMillis(); - - try { - bufferedReader = new BufferedReader(getReader(dataFileName)); - - database = new RangerGeolocationDatabase(); - - String line; - int lineNumber = 0; - isMetalineProcessed = false; - - while(( line = bufferedReader.readLine()) != null) { - lineNumber++; - if (!processLine(lineNumber, line, database)) { - LOG.error("RangerGeolocationDatabaseBuilder.build() - Invalid geo-specification - " + lineNumber + ":" + line); - database = null; - break; - } - } - - bufferedReader.close(); - bufferedReader = null; - } - catch(FileNotFoundException ex) { - LOG.error("RangerGeolocationDatabaseBuilder.build() - Unable to open file '" + dataFileName + "'"); - } - catch(IOException ex) { - LOG.error("RangerGeolocationDatabaseBuilder.build() - Error reading file '" + dataFileName + "', " + ex); - } - finally { - if (bufferedReader != null) { - try { - bufferedReader.close(); - } - catch (Exception exception) { - // Ignore - } - } - } - - end = System.currentTimeMillis(); - - if (LOG.isDebugEnabled()) { - LOG.debug("RangerGeolocationDatabaseBuilder.build() - Time taken for reading file = " + (end - start) + " milliseconds"); - } - - if (database != null) { - database.optimize(); - } - - return database; - } - - private boolean processLine(int lineNumber, String line, RangerGeolocationDatabase database) { - - boolean ret = true; - - line = line.trim(); - - if (!line.startsWith(GeoLineCommentIdentifier)) { - String fields[] = StringUtils.split(line, GeoFieldsSeparator); - if (fields != null) { - if (!isMetalineProcessed) { - GeolocationMetadata metadata = GeolocationMetadata.create(fields, lineNumber); - if (metadata != null) { - database.setMetadata(metadata); - isMetalineProcessed = true; - } else { - LOG.error("GeolocationFileStore.processLine() - Invalid metadata specification " + lineNumber + ":" + line); - ret = false; - } - } else { - RangerGeolocationData data = RangerGeolocationData.create(fields, lineNumber, useDotFormat); - if (data != null) { - database.getData().insert(data); - } else { - LOG.error("GeolocationFileStore.processLine() - Invalid data specification " + lineNumber + ":" + line); - } - } - } else { - LOG.error("GeolocationFileStore.processLine() - Invalid line, skipping.." + lineNumber + ":" + line); - } - } - return ret; - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRequest.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRequest.java index 09cf851927..7ac0b76d33 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRequest.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRequest.java @@ -20,7 +20,7 @@ package org.apache.atlas.plugin.util; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRoleRequest.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRoleRequest.java index 0f3811fde6..8d3d083d72 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRoleRequest.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/GrantRevokeRoleRequest.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.util; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/JsonUtilsV2.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/JsonUtilsV2.java deleted file mode 100644 index 9632166079..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/JsonUtilsV2.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.util; - - -import org.apache.htrace.shaded.fasterxml.jackson.core.type.TypeReference; -import org.apache.htrace.shaded.fasterxml.jackson.databind.ObjectMapper; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class JsonUtilsV2 { - - static private final ThreadLocal mapper = new ThreadLocal() { - @Override - protected ObjectMapper initialValue() { - return new ObjectMapper(); - } - }; - - static public ObjectMapper getMapper() { - return mapper.get(); - } - - static public Map jsonToMap(String jsonStr) throws Exception { - final Map ret; - - if (jsonStr == null || jsonStr.isEmpty()) { - ret = new HashMap<>(); - } else { - ret = getMapper().readValue(jsonStr, new TypeReference>() {}); - } - - return ret; - } - - static public String mapToJson(Map map) throws Exception { - return getMapper().writeValueAsString(map); - } - - static public String listToJson(List list) throws Exception { - return getMapper().writeValueAsString(list); - } - - static public String objToJson(Serializable obj) throws Exception { - return getMapper().writeValueAsString(obj); - } - - static public T jsonToObj(String json, Class tClass) throws Exception { - return getMapper().readValue(json, tClass); - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/PolicyRefresher.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/PolicyRefresher.java index aae09a7d26..b444ebb092 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/PolicyRefresher.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/PolicyRefresher.java @@ -26,8 +26,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.atlas.admin.client.RangerAdminClient; -import org.apache.atlas.authorization.hadoop.config.RangerPluginConfig; +import org.apache.atlas.authorization.config.RangerPluginConfig; import org.apache.atlas.plugin.policyengine.RangerPluginContext; import org.apache.atlas.plugin.service.RangerBasePlugin; @@ -49,7 +48,6 @@ public class PolicyRefresher extends Thread { private final RangerBasePlugin plugIn; private final String serviceType; private final String serviceName; - private final RangerAdminClient rangerAdmin; private final AtlasAuthAdminClient atlasAuthAdminClient; private final RangerRolesProvider rolesProvider; private final RangerUserStoreProvider userStoreProvider; @@ -88,8 +86,6 @@ public PolicyRefresher(RangerBasePlugin plugIn) { this.cacheFileName = cacheFilename; - rangerAdmin = getRangerAdminClient(); - Gson gson = null; try { gson = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").create(); @@ -132,13 +128,6 @@ public String getServiceName() { return serviceName; } - /** - * @return the rangerAdmin - */ - public RangerAdminClient getRangerAdminClient() { - return rangerAdmin; - } - public long getLastActivationTimeInMillis() { return lastActivationTimeInMillis; } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPluginCapability.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPluginCapability.java index a80b495f1b..dcd073ce6d 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPluginCapability.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPluginCapability.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.util; -import org.apache.atlas.authorization.utils.JsonUtils; +import org.apache.atlas.authorization.utils.RangerUtil; import java.util.ArrayList; import java.util.Arrays; @@ -137,7 +137,7 @@ public List compare(RangerPluginCapability other) { @Override public String toString() { List capabilities = toStrings(pluginCapabilities); - return JsonUtils.objectToJson(capabilities); + return RangerUtil.objectToJson(capabilities); } public static String getBaseRangerCapabilities() { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPolicyDeltaUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPolicyDeltaUtil.java index e3cb0f3697..372c16e6cf 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPolicyDeltaUtil.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerPolicyDeltaUtil.java @@ -26,7 +26,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.atlas.plugin.model.RangerPolicy; import org.apache.atlas.plugin.model.RangerPolicyDelta; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; +import org.apache.atlas.plugin.store.ServiceDefsUtil; import java.util.ArrayList; import java.util.Arrays; @@ -63,7 +63,7 @@ public static List applyDeltas(List policies, List deltas, String compo final String serviceType = delta.getServiceType(); final String policyType = delta.getPolicyType(); - if (serviceType == null || (!serviceType.equals(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME) && + if (serviceType == null || (!serviceType.equals(ServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME) && !serviceType.equals(componentServiceType))) { isValid = false; } else if (StringUtils.isEmpty(policyType) || (!RangerPolicy.POLICY_TYPE_ACCESS.equals(policyType) diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java deleted file mode 100644 index 94af3510e4..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRESTClient.java +++ /dev/null @@ -1,652 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.util; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientHandlerException; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.ClientConfig; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; -import com.sun.jersey.client.urlconnection.HTTPSProperties; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.hadoop.utils.RangerCredentialProvider; -import org.apache.atlas.authorization.utils.StringUtil; -import org.codehaus.jackson.jaxrs.JacksonJsonProvider; - -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.KeyManager; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLSession; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; -import javax.ws.rs.core.Cookie; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; -import java.security.UnrecoverableKeyException; -import java.security.cert.CertificateException; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - - -public class RangerRESTClient { - private static final Log LOG = LogFactory.getLog(RangerRESTClient.class); - - public static final String RANGER_PROP_POLICYMGR_URL = "ranger.service.store.rest.url"; - public static final String RANGER_PROP_POLICYMGR_SSLCONFIG_FILENAME = "ranger.service.store.rest.ssl.config.file"; - - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE = "xasecure.policymgr.clientssl.keystore"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE = "xasecure.policymgr.clientssl.keystore.type"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.keystore.credential.file"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL_ALIAS = "sslKeyStore"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE_DEFAULT = "jks"; - - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE = "xasecure.policymgr.clientssl.truststore"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE = "xasecure.policymgr.clientssl.truststore.type"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.truststore.credential.file"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL_ALIAS = "sslTrustStore"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE_DEFAULT = "jks"; - - public static final String RANGER_SSL_KEYMANAGER_ALGO_TYPE = KeyManagerFactory.getDefaultAlgorithm(); - public static final String RANGER_SSL_TRUSTMANAGER_ALGO_TYPE = TrustManagerFactory.getDefaultAlgorithm(); - public static final String RANGER_SSL_CONTEXT_ALGO_TYPE = "TLS"; - - private String mUrl; - private String mSslConfigFileName; - private String mUsername; - private String mPassword; - private boolean mIsSSL; - - private String mKeyStoreURL; - private String mKeyStoreAlias; - private String mKeyStoreFile; - private String mKeyStoreType; - private String mTrustStoreURL; - private String mTrustStoreAlias; - private String mTrustStoreFile; - private String mTrustStoreType; - private Gson gsonBuilder; - private int mRestClientConnTimeOutMs; - private int mRestClientReadTimeOutMs; - private int lastKnownActiveUrlIndex; - - private final List configuredURLs; - - private volatile Client client; - - - public RangerRESTClient(String url, String sslConfigFileName, Configuration config) { - mUrl = url; - mSslConfigFileName = sslConfigFileName; - configuredURLs = StringUtil.getURLs(mUrl); - - setLastKnownActiveUrlIndex((new Random()).nextInt(getConfiguredURLs().size())); - - init(config); - } - - public String getUrl() { - return mUrl; - } - - public void setUrl(String url) { - this.mUrl = url; - } - - public String getUsername() { - return mUsername; - } - - public String getPassword() { - return mPassword; - } - - public int getRestClientConnTimeOutMs() { - return mRestClientConnTimeOutMs; - } - - public void setRestClientConnTimeOutMs(int mRestClientConnTimeOutMs) { - this.mRestClientConnTimeOutMs = mRestClientConnTimeOutMs; - } - - public int getRestClientReadTimeOutMs() { - return mRestClientReadTimeOutMs; - } - - public void setRestClientReadTimeOutMs(int mRestClientReadTimeOutMs) { - this.mRestClientReadTimeOutMs = mRestClientReadTimeOutMs; - } - - public void setBasicAuthInfo(String username, String password) { - mUsername = username; - mPassword = password; - } - - public WebResource getResource(String relativeUrl) { - WebResource ret = getClient().resource(getUrl() + relativeUrl); - - return ret; - } - - public String toJson(Object obj) { - return gsonBuilder.toJson(obj); - } - - public T fromJson(String json, Class cls) { - return gsonBuilder.fromJson(json, cls); - } - - public Client getClient() { - // result saves on access time when client is built at the time of the call - Client result = client; - if(result == null) { - synchronized(this) { - result = client; - if(result == null) { - client = result = buildClient(); - } - } - } - - return result; - } - - private Client buildClient() { - Client client = null; - - if (mIsSSL) { - KeyManager[] kmList = getKeyManagers(); - TrustManager[] tmList = getTrustManagers(); - SSLContext sslContext = getSSLContext(kmList, tmList); - ClientConfig config = new DefaultClientConfig(); - - config.getClasses().add(JacksonJsonProvider.class); // to handle List<> unmarshalling - - HostnameVerifier hv = new HostnameVerifier() { - public boolean verify(String urlHostName, SSLSession session) { - return session.getPeerHost().equals(urlHostName); - } - }; - - config.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(hv, sslContext)); - - client = Client.create(config); - } - - if(client == null) { - ClientConfig config = new DefaultClientConfig(); - - config.getClasses().add(JacksonJsonProvider.class); // to handle List<> unmarshalling - - client = Client.create(config); - } - - if(StringUtils.isNotEmpty(mUsername) && StringUtils.isNotEmpty(mPassword)) { - client.addFilter(new HTTPBasicAuthFilter(mUsername, mPassword)); - } - - // Set Connection Timeout and ReadTime for the PolicyRefresh - client.setConnectTimeout(mRestClientConnTimeOutMs); - client.setReadTimeout(mRestClientReadTimeOutMs); - - return client; - } - - public void resetClient(){ - client = null; - } - - private void init(Configuration config) { - try { - gsonBuilder = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ").create(); - } catch(Throwable excp) { - LOG.fatal("RangerRESTClient.init(): failed to create GsonBuilder object", excp); - } - - mIsSSL = StringUtils.containsIgnoreCase(mUrl, "https"); - - if (mIsSSL) { - - InputStream in = null; - - try { - in = getFileInputStream(mSslConfigFileName); - - if (in != null) { - config.addResource(in); - } - - mKeyStoreURL = config.get(RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL); - mKeyStoreAlias = RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL_ALIAS; - mKeyStoreType = config.get(RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE, RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE_DEFAULT); - mKeyStoreFile = config.get(RANGER_POLICYMGR_CLIENT_KEY_FILE); - - mTrustStoreURL = config.get(RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL); - mTrustStoreAlias = RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL_ALIAS; - mTrustStoreType = config.get(RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE, RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE_DEFAULT); - mTrustStoreFile = config.get(RANGER_POLICYMGR_TRUSTSTORE_FILE); - } catch (IOException ioe) { - LOG.error("Unable to load SSL Config FileName: [" + mSslConfigFileName + "]", ioe); - } finally { - close(in, mSslConfigFileName); - } - - } - } - - private KeyManager[] getKeyManagers() { - KeyManager[] kmList = null; - - String keyStoreFilepwd = getCredential(mKeyStoreURL, mKeyStoreAlias); - - kmList = getKeyManagers(mKeyStoreFile,keyStoreFilepwd); - return kmList; - } - - public KeyManager[] getKeyManagers(String keyStoreFile, String keyStoreFilePwd) { - KeyManager[] kmList = null; - - if (StringUtils.isNotEmpty(keyStoreFile) && StringUtils.isNotEmpty(keyStoreFilePwd)) { - InputStream in = null; - - try { - in = getFileInputStream(keyStoreFile); - - if (in != null) { - KeyStore keyStore = KeyStore.getInstance(mKeyStoreType); - - keyStore.load(in, keyStoreFilePwd.toCharArray()); - - KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(RANGER_SSL_KEYMANAGER_ALGO_TYPE); - - keyManagerFactory.init(keyStore, keyStoreFilePwd.toCharArray()); - - kmList = keyManagerFactory.getKeyManagers(); - } else { - LOG.error("Unable to obtain keystore from file [" + keyStoreFile + "]"); - throw new IllegalStateException("Unable to find keystore file :" + keyStoreFile); - } - } catch (KeyStoreException e) { - LOG.error("Unable to obtain from KeyStore :" + e.getMessage(), e); - throw new IllegalStateException("Unable to init keystore:" + e.getMessage(), e); - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is NOT available in the environment", e); - throw new IllegalStateException("SSL algorithm is NOT available in the environment :" + e.getMessage(), e); - } catch (CertificateException e) { - LOG.error("Unable to obtain the requested certification ", e); - throw new IllegalStateException("Unable to obtain the requested certification :" + e.getMessage(), e); - } catch (FileNotFoundException e) { - LOG.error("Unable to find the necessary SSL Keystore Files", e); - throw new IllegalStateException("Unable to find keystore file :" + keyStoreFile + ", error :" + e.getMessage(), e); - } catch (IOException e) { - LOG.error("Unable to read the necessary SSL Keystore Files", e); - throw new IllegalStateException("Unable to read keystore file :" + keyStoreFile + ", error :" + e.getMessage(), e); - } catch (UnrecoverableKeyException e) { - LOG.error("Unable to recover the key from keystore", e); - throw new IllegalStateException("Unable to recover the key from keystore :" + keyStoreFile+", error :" + e.getMessage(), e); - } finally { - close(in, keyStoreFile); - } - } - - return kmList; - } - - private TrustManager[] getTrustManagers() { - TrustManager[] tmList = null; - if (StringUtils.isNotEmpty(mTrustStoreURL) && StringUtils.isNotEmpty(mTrustStoreAlias)) { - String trustStoreFilepwd = getCredential(mTrustStoreURL, mTrustStoreAlias); - if (StringUtils.isNotEmpty(trustStoreFilepwd)) { - tmList = getTrustManagers(mTrustStoreFile, trustStoreFilepwd); - } - } - return tmList; - } - - public TrustManager[] getTrustManagers(String trustStoreFile, String trustStoreFilepwd) { - TrustManager[] tmList = null; - - if (StringUtils.isNotEmpty(trustStoreFile) && StringUtils.isNotEmpty(trustStoreFilepwd)) { - InputStream in = null; - - try { - in = getFileInputStream(trustStoreFile); - - if (in != null) { - KeyStore trustStore = KeyStore.getInstance(mTrustStoreType); - - trustStore.load(in, trustStoreFilepwd.toCharArray()); - - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(RANGER_SSL_TRUSTMANAGER_ALGO_TYPE); - - trustManagerFactory.init(trustStore); - - tmList = trustManagerFactory.getTrustManagers(); - } else { - LOG.error("Unable to obtain truststore from file [" + trustStoreFile + "]"); - throw new IllegalStateException("Unable to find truststore file :" + trustStoreFile); - } - } catch (KeyStoreException e) { - LOG.error("Unable to obtain from KeyStore", e); - throw new IllegalStateException("Unable to init keystore:" + e.getMessage(), e); - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is NOT available in the environment :" + e.getMessage(), e); - throw new IllegalStateException("SSL algorithm is NOT available in the environment :" + e.getMessage(), e); - } catch (CertificateException e) { - LOG.error("Unable to obtain the requested certification :" + e.getMessage(), e); - throw new IllegalStateException("Unable to obtain the requested certification :" + e.getMessage(), e); - } catch (FileNotFoundException e) { - LOG.error("Unable to find the necessary SSL TrustStore File:" + trustStoreFile, e); - throw new IllegalStateException("Unable to find trust store file :" + trustStoreFile + ", error :" + e.getMessage(), e); - } catch (IOException e) { - LOG.error("Unable to read the necessary SSL TrustStore Files :" + trustStoreFile, e); - throw new IllegalStateException("Unable to read the trust store file :" + trustStoreFile + ", error :" + e.getMessage(), e); - } finally { - close(in, trustStoreFile); - } - } - - return tmList; - } - - protected SSLContext getSSLContext(KeyManager[] kmList, TrustManager[] tmList) { - if (tmList == null) { - try { - String algo = TrustManagerFactory.getDefaultAlgorithm() ; - TrustManagerFactory tmf = TrustManagerFactory.getInstance(algo) ; - tmf.init((KeyStore)null) ; - tmList = tmf.getTrustManagers() ; - } - catch(NoSuchAlgorithmException | KeyStoreException | IllegalStateException e) { - LOG.error("Unable to get the default SSL TrustStore for the JVM",e); - tmList = null; - } - } - Validate.notNull(tmList, "TrustManager is not specified"); - try { - SSLContext sslContext = SSLContext.getInstance(RANGER_SSL_CONTEXT_ALGO_TYPE); - - sslContext.init(kmList, tmList, new SecureRandom()); - - return sslContext; - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is not available in the environment", e); - throw new IllegalStateException("SSL algorithm is not available in the environment: " + e.getMessage(), e); - } catch (KeyManagementException e) { - LOG.error("Unable to initials the SSLContext", e); - throw new IllegalStateException("Unable to initials the SSLContex: " + e.getMessage(), e); - } - } - - private String getCredential(String url, String alias) { - return RangerCredentialProvider.getInstance().getCredentialString(url, alias); - } - - private InputStream getFileInputStream(String fileName) throws IOException { - InputStream in = null; - - if(StringUtils.isNotEmpty(fileName)) { - File f = new File(fileName); - - if (f.exists()) { - in = new FileInputStream(f); - } - else { - in = ClassLoader.getSystemResourceAsStream(fileName); - } - } - - return in; - } - - private void close(InputStream str, String filename) { - if (str != null) { - try { - str.close(); - } catch (IOException excp) { - LOG.error("Error while closing file: [" + filename + "]", excp); - } - } - } - - public ClientResponse get(String relativeUrl, Map params) throws Exception { - ClientResponse finalResponse = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = getClient().resource(configuredURLs.get(currentIndex) + relativeUrl); - webResource = setQueryParams(webResource, params); - - finalResponse = webResource.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON).get(ClientResponse.class); - - if (finalResponse != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException ex) { - LOG.warn("Failed to communicate with Ranger Admin, URL : " + configuredURLs.get(currentIndex)); - processException(index, ex); - } - } - return finalResponse; - } - - public ClientResponse get(String relativeUrl, Map params, Cookie sessionId) throws Exception{ - ClientResponse finalResponse = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = createWebResourceForCookieAuth(currentIndex, relativeUrl); - webResource = setQueryParams(webResource, params); - WebResource.Builder br = webResource.getRequestBuilder().cookie(sessionId); - finalResponse = br.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON).get(ClientResponse.class); - - if (finalResponse != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException ex) { - LOG.warn("Failed to communicate with Ranger Admin, URL : "+configuredURLs.get(currentIndex)); - processException(index, ex); - } - } - return finalResponse; - } - - public ClientResponse post(String relativeUrl, Map params, Object obj) throws Exception { - ClientResponse finalResponse = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = getClient().resource(configuredURLs.get(currentIndex) + relativeUrl); - webResource = setQueryParams(webResource, params); - finalResponse = webResource.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON).post(ClientResponse.class, toJson(obj)); - if (finalResponse != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException ex) { - LOG.warn("Failed to communicate with Ranger Admin, URL : " + configuredURLs.get(currentIndex)); - processException(index, ex); - } - } - return finalResponse; - } - - public ClientResponse delete(String relativeUrl, Map params) throws Exception { - ClientResponse finalResponse = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = getClient().resource(configuredURLs.get(currentIndex) + relativeUrl); - webResource = setQueryParams(webResource, params); - - finalResponse = webResource.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON).delete(ClientResponse.class); - if (finalResponse != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException ex) { - LOG.warn("Failed to communicate with Ranger Admin, URL : " + configuredURLs.get(currentIndex)); - processException(index, ex); - } - } - return finalResponse; - } - - public ClientResponse put(String relativeUrl, Map params, Object obj) throws Exception { - ClientResponse finalResponse = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = getClient().resource(configuredURLs.get(currentIndex) + relativeUrl); - webResource = setQueryParams(webResource, params); - finalResponse = webResource.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON).put(ClientResponse.class, toJson(obj)); - if (finalResponse != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException ex) { - LOG.warn("Failed to communicate with Ranger Admin, URL : " + configuredURLs.get(currentIndex)); - processException(index, ex); - } - } - return finalResponse; - } - - public ClientResponse put(String relativeURL, Object request, Cookie sessionId) throws Exception { - ClientResponse response = null; - int startIndex = this.lastKnownActiveUrlIndex; - int currentIndex = 0; - - for (int index = 0; index < configuredURLs.size(); index++) { - try { - currentIndex = (startIndex + index) % configuredURLs.size(); - - WebResource webResource = createWebResourceForCookieAuth(currentIndex, relativeURL); - WebResource.Builder br = webResource.getRequestBuilder().cookie(sessionId); - response = br.accept(RangerRESTUtils.REST_EXPECTED_MIME_TYPE).type(RangerRESTUtils.REST_MIME_TYPE_JSON) - .put(ClientResponse.class, toJson(request)); - if (response != null) { - setLastKnownActiveUrlIndex(currentIndex); - break; - } - } catch (ClientHandlerException e) { - LOG.warn("Failed to communicate with Ranger Admin, URL : " + configuredURLs.get(currentIndex)); - processException(index, e); - } - } - return response; - } - - protected static WebResource setQueryParams(WebResource webResource, Map params) { - WebResource ret = webResource; - if (webResource != null && params != null) { - Set> entrySet= params.entrySet(); - for (Map.Entry entry : entrySet) { - ret = ret.queryParam(entry.getKey(), entry.getValue()); - } - } - return ret; - } - - protected void setLastKnownActiveUrlIndex(int lastKnownActiveUrlIndex) { - this.lastKnownActiveUrlIndex = lastKnownActiveUrlIndex; - } - - protected WebResource createWebResourceForCookieAuth(int currentIndex, String relativeURL) { - Client cookieClient = getClient(); - cookieClient.removeAllFilters(); - WebResource ret = cookieClient.resource(configuredURLs.get(currentIndex) + relativeURL); - return ret; - } - - protected void processException(int index, ClientHandlerException e) throws Exception { - if (index == configuredURLs.size() - 1) { - LOG.error("Failed to communicate with all Ranger Admin's URL's : [ " + configuredURLs + " ]"); - throw e; - } - } - - public int getLastKnownActiveUrlIndex() { - return lastKnownActiveUrlIndex; - } - - public List getConfiguredURLs() { - return configuredURLs; - } - - public boolean isSSL() { - return mIsSSL; - } - - public void setSSL(boolean mIsSSL) { - this.mIsSSL = mIsSSL; - } - - protected void setClient(Client client) { - this.client = client; - } - - protected void setKeyStoreType(String mKeyStoreType) { - this.mKeyStoreType = mKeyStoreType; - } - - protected void setTrustStoreType(String mTrustStoreType) { - this.mTrustStoreType = mTrustStoreType; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRequestedResources.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRequestedResources.java index fa4272aa67..999d2ecd54 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRequestedResources.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRequestedResources.java @@ -19,8 +19,8 @@ package org.apache.atlas.plugin.util; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.commons.collections.CollectionUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.policyengine.RangerAccessResource; import org.apache.atlas.plugin.policyresourcematcher.RangerPolicyResourceMatcher; @@ -31,7 +31,7 @@ import java.util.List; import java.util.Map; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) /*@JsonAutoDetect(getterVisibility= JsonAutoDetect.Visibility.NONE, setterVisibility= JsonAutoDetect.Visibility.NONE, fieldVisibility= JsonAutoDetect.Visibility.ANY) @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL ) @JsonIgnoreProperties(ignoreUnknown=true)*/ diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java index 083871f1e2..dc474fe3bc 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRoles.java @@ -19,7 +19,7 @@ package org.apache.atlas.plugin.util; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.atlas.plugin.model.RangerRole; import javax.xml.bind.annotation.XmlAccessType; @@ -29,7 +29,7 @@ import java.util.Date; import java.util.Set; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerRoles implements Serializable { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRolesProvider.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRolesProvider.java index 4607d6192a..dc44402046 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRolesProvider.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerRolesProvider.java @@ -26,7 +26,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.admin.client.RangerAdminClient; import org.apache.atlas.plugin.service.RangerBasePlugin; import java.io.File; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerServiceNotFoundException.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerServiceNotFoundException.java index e1c836bc8b..9bd4c1d505 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerServiceNotFoundException.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerServiceNotFoundException.java @@ -22,17 +22,9 @@ import org.apache.commons.lang.StringUtils; public class RangerServiceNotFoundException extends Exception { - static private final String formatString = "\"RANGER_ERROR_SERVICE_NOT_FOUND: ServiceName=%s\""; + public RangerServiceNotFoundException(String serviceName) { super(serviceName); } - public static final String buildExceptionMsg(String serviceName) { - return String.format(formatString, serviceName); - } - public static final void throwExceptionIfServiceNotFound(String serviceName, String exceptionMsg) throws RangerServiceNotFoundException { - String expectedExceptionMsg = buildExceptionMsg(serviceName); - if (StringUtils.startsWith(exceptionMsg, expectedExceptionMsg)) { - throw new RangerServiceNotFoundException(serviceName); - } - } + } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerSslHelper.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerSslHelper.java deleted file mode 100644 index 187185b268..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerSslHelper.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.util; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.atlas.authorization.hadoop.utils.RangerCredentialProvider; -import org.apache.atlas.authorization.utils.StringUtil; - -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.KeyManager; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLSession; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; -import java.security.UnrecoverableKeyException; -import java.security.cert.CertificateException; - -public class RangerSslHelper { - private static final Log LOG = LogFactory.getLog(RangerSslHelper.class); - - static final String RANGER_POLICYMGR_CLIENT_KEY_FILE = "xasecure.policymgr.clientssl.keystore"; - static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE = "xasecure.policymgr.clientssl.keystore.type"; - static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.keystore.credential.file"; - static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL_ALIAS = "sslKeyStore"; - static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE_DEFAULT = "jks"; - - static final String RANGER_POLICYMGR_TRUSTSTORE_FILE = "xasecure.policymgr.clientssl.truststore"; - static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE = "xasecure.policymgr.clientssl.truststore.type"; - static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.truststore.credential.file"; - static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL_ALIAS = "sslTrustStore"; - static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE_DEFAULT = "jks"; - - static final String RANGER_SSL_KEYMANAGER_ALGO_TYPE = KeyManagerFactory.getDefaultAlgorithm(); - static final String RANGER_SSL_TRUSTMANAGER_ALGO_TYPE = TrustManagerFactory.getDefaultAlgorithm(); - static final String RANGER_SSL_CONTEXT_ALGO_TYPE = "TLS"; - - private String mKeyStoreURL; - private String mKeyStoreAlias; - private String mKeyStoreFile; - private String mKeyStoreType; - private String mTrustStoreURL; - private String mTrustStoreAlias; - private String mTrustStoreFile; - private String mTrustStoreType; - - final static HostnameVerifier _Hv = new HostnameVerifier() { - - @Override - public boolean verify(String urlHostName, SSLSession session) { - return session.getPeerHost().equals(urlHostName); - } - }; - - final String mSslConfigFileName; - - public RangerSslHelper(String sslConfigFileName) { - mSslConfigFileName = sslConfigFileName; - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerSslHelper(" + mSslConfigFileName + ")"); - } - - } - - public SSLContext createContext() { - readConfig(); - KeyManager[] kmList = getKeyManagers(); - TrustManager[] tmList = getTrustManagers(); - SSLContext sslContext = getSSLContext(kmList, tmList); - return sslContext; - } - - public HostnameVerifier getHostnameVerifier() { - return _Hv; - } - - void readConfig() { - InputStream in = null; - - try { - Configuration conf = new Configuration(); - - in = getFileInputStream(mSslConfigFileName); - - if (in != null) { - conf.addResource(in); - } - - mKeyStoreURL = conf.get(RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL); - mKeyStoreAlias = RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL_ALIAS; - mKeyStoreType = conf.get(RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE, RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE_DEFAULT); - mKeyStoreFile = conf.get(RANGER_POLICYMGR_CLIENT_KEY_FILE); - - mTrustStoreURL = conf.get(RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL); - mTrustStoreAlias = RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL_ALIAS; - mTrustStoreType = conf.get(RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE, RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE_DEFAULT); - mTrustStoreFile = conf.get(RANGER_POLICYMGR_TRUSTSTORE_FILE); - - if (LOG.isDebugEnabled()) { - LOG.debug(toString()); - } - } - catch(IOException ioe) { - LOG.error("Unable to load SSL Config FileName: [" + mSslConfigFileName + "]", ioe); - } - finally { - close(in, mSslConfigFileName); - } - } - - private KeyManager[] getKeyManagers() { - KeyManager[] kmList = null; - - String keyStoreFilepwd = getCredential(mKeyStoreURL, mKeyStoreAlias); - - if (!StringUtil.isEmpty(mKeyStoreFile) && !StringUtil.isEmpty(keyStoreFilepwd)) { - InputStream in = null; - - try { - in = getFileInputStream(mKeyStoreFile); - - if (in != null) { - KeyStore keyStore = KeyStore.getInstance(mKeyStoreType); - - keyStore.load(in, keyStoreFilepwd.toCharArray()); - - KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(RANGER_SSL_KEYMANAGER_ALGO_TYPE); - - keyManagerFactory.init(keyStore, keyStoreFilepwd.toCharArray()); - - kmList = keyManagerFactory.getKeyManagers(); - } else { - LOG.error("Unable to obtain keystore from file [" + mKeyStoreFile + "]"); - } - } catch (KeyStoreException e) { - LOG.error("Unable to obtain from KeyStore", e); - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is available in the environment", e); - } catch (CertificateException e) { - LOG.error("Unable to obtain the requested certification ", e); - } catch (FileNotFoundException e) { - LOG.error("Unable to find the necessary SSL Keystore and TrustStore Files", e); - } catch (IOException e) { - LOG.error("Unable to read the necessary SSL Keystore and TrustStore Files", e); - } catch (UnrecoverableKeyException e) { - LOG.error("Unable to recover the key from keystore", e); - } finally { - close(in, mKeyStoreFile); - } - } - - return kmList; - } - - private TrustManager[] getTrustManagers() { - TrustManager[] tmList = null; - - String trustStoreFilepwd = getCredential(mTrustStoreURL, mTrustStoreAlias); - - if (!StringUtil.isEmpty(mTrustStoreFile) && !StringUtil.isEmpty(trustStoreFilepwd)) { - InputStream in = null; - - try { - in = getFileInputStream(mTrustStoreFile); - - if (in != null) { - KeyStore trustStore = KeyStore.getInstance(mTrustStoreType); - - trustStore.load(in, trustStoreFilepwd.toCharArray()); - - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(RANGER_SSL_TRUSTMANAGER_ALGO_TYPE); - - trustManagerFactory.init(trustStore); - - tmList = trustManagerFactory.getTrustManagers(); - } else { - LOG.error("Unable to obtain keystore from file [" + mTrustStoreFile + "]"); - } - } catch (KeyStoreException e) { - LOG.error("Unable to obtain from KeyStore", e); - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is available in the environment", e); - } catch (CertificateException e) { - LOG.error("Unable to obtain the requested certification ", e); - } catch (FileNotFoundException e) { - LOG.error("Unable to find the necessary SSL Keystore and TrustStore Files", e); - } catch (IOException e) { - LOG.error("Unable to read the necessary SSL Keystore and TrustStore Files", e); - } finally { - close(in, mTrustStoreFile); - } - } - - return tmList; - } - - private SSLContext getSSLContext(KeyManager[] kmList, TrustManager[] tmList) { - try { - if(tmList != null) { - SSLContext sslContext = SSLContext.getInstance(RANGER_SSL_CONTEXT_ALGO_TYPE); - - sslContext.init(kmList, tmList, new SecureRandom()); - - return sslContext; - } - } catch (NoSuchAlgorithmException e) { - LOG.error("SSL algorithm is available in the environment", e); - } catch (Exception e) { - LOG.error("Unable to initialize the SSLContext", e); - } - - return null; - } - - private String getCredential(String url, String alias) { - return RangerCredentialProvider.getInstance().getCredentialString(url, alias); - } - - private InputStream getFileInputStream(String fileName) throws IOException { - InputStream in = null; - - if(! StringUtil.isEmpty(fileName)) { - File f = new File(fileName); - - if (f.exists()) { - in = new FileInputStream(f); - } - else { - in = ClassLoader.getSystemResourceAsStream(fileName); - } - } - - return in; - } - - private void close(InputStream str, String filename) { - if (str != null) { - try { - str.close(); - } catch (IOException excp) { - LOG.error("Error while closing file: [" + filename + "]", excp); - } - } - } - - @Override - public String toString() { - return "keyStoreAlias=" + mKeyStoreAlias + ", " - + "keyStoreFile=" + mKeyStoreFile + ", " - + "keyStoreType="+ mKeyStoreType + ", " - + "keyStoreURL=" + mKeyStoreURL + ", " - + "trustStoreAlias=" + mTrustStoreAlias + ", " - + "trustStoreFile=" + mTrustStoreFile + ", " - + "trustStoreType=" + mTrustStoreType + ", " - + "trustStoreURL=" + mTrustStoreURL - ; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java index 7318648cf8..5af0ee64c4 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/RangerUserStore.java @@ -22,7 +22,7 @@ import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.model.GroupInfo; import org.apache.atlas.plugin.model.UserInfo; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceDefUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceDefUtil.java index 39e0e236e8..ce1b4bf0d7 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceDefUtil.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceDefUtil.java @@ -19,6 +19,7 @@ package org.apache.atlas.plugin.util; +import org.apache.atlas.authorization.utils.RangerAtlasConstants; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; @@ -29,8 +30,7 @@ import org.apache.atlas.plugin.model.RangerServiceDef.RangerDataMaskTypeDef; import org.apache.atlas.plugin.model.RangerServiceDef.RangerResourceDef; import org.apache.atlas.plugin.policyengine.RangerPluginContext; -import org.apache.atlas.plugin.store.AbstractServiceStore; -import org.apache.atlas.plugin.store.EmbeddedServiceDefsUtil; +import org.apache.atlas.plugin.store.ServiceDefsUtil; import java.util.ArrayList; import java.util.Collection; @@ -48,7 +48,7 @@ public static boolean getOption_enableDenyAndExceptionsInPolicies(RangerServiceD if(serviceDef != null) { Configuration config = pluginContext != null ? pluginContext.getConfig() : null; boolean enableDenyAndExceptionsInPoliciesHiddenOption = config == null || config.getBoolean("ranger.servicedef.enableDenyAndExceptionsInPolicies", true); - boolean defaultValue = enableDenyAndExceptionsInPoliciesHiddenOption || StringUtils.equalsIgnoreCase(serviceDef.getName(), EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME); + boolean defaultValue = enableDenyAndExceptionsInPoliciesHiddenOption || StringUtils.equalsIgnoreCase(serviceDef.getName(), ServiceDefsUtil.EMBEDDED_SERVICEDEF_TAG_NAME); ret = ServiceDefUtil.getBooleanValue(serviceDef.getOptions(), RangerServiceDef.OPTION_ENABLE_DENY_AND_EXCEPTIONS_IN_POLICIES, defaultValue); } @@ -196,7 +196,7 @@ public static RangerServiceDef normalizeAccessTypeDefs(RangerServiceDef serviceD if (CollectionUtils.isNotEmpty(accessTypeDefs)) { - String prefix = componentType + AbstractServiceStore.COMPONENT_ACCESSTYPE_SEPARATOR; + String prefix = componentType + RangerAtlasConstants.COMPONENT_ACCESSTYPE_SEPARATOR; List unneededAccessTypeDefs = null; @@ -232,7 +232,7 @@ public static RangerServiceDef normalizeAccessTypeDefs(RangerServiceDef serviceD accessTypeDef.setImpliedGrants(newImpliedGrants); } - } else if (StringUtils.contains(accessType, AbstractServiceStore.COMPONENT_ACCESSTYPE_SEPARATOR)) { + } else if (StringUtils.contains(accessType, RangerAtlasConstants.COMPONENT_ACCESSTYPE_SEPARATOR)) { if(unneededAccessTypeDefs == null) { unneededAccessTypeDefs = new ArrayList<>(); } diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java index 547349c8f5..c6777e7020 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServicePolicies.java @@ -20,10 +20,10 @@ package org.apache.atlas.plugin.util; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.commons.collections.MapUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.model.RangerPolicy; import org.apache.atlas.plugin.model.RangerPolicyDelta; import org.apache.atlas.plugin.model.RangerServiceDef; @@ -40,7 +40,7 @@ import java.util.List; import java.util.Map; -@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class ServicePolicies implements java.io.Serializable { @@ -184,7 +184,7 @@ public String toString() { public void setPolicyDeltas(List policyDeltas) { this.policyDeltas = policyDeltas; } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class TagPolicies implements java.io.Serializable { @@ -302,7 +302,7 @@ public String toString() { } } - @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public static class SecurityZoneInfo implements java.io.Serializable { diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceTags.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceTags.java index a57d4d2600..3118534e9a 100644 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceTags.java +++ b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/ServiceTags.java @@ -20,7 +20,8 @@ package org.apache.atlas.plugin.util; -import org.apache.htrace.shaded.fasterxml.jackson.annotation.JsonInclude; + +import com.fasterxml.jackson.annotation.JsonInclude; import org.apache.atlas.plugin.model.RangerServiceResource; import org.apache.atlas.plugin.model.RangerTag; import org.apache.atlas.plugin.model.RangerTagDef; diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/TimedEventUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/TimedEventUtil.java deleted file mode 100644 index 65ad8b66a5..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/TimedEventUtil.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.plugin.util; - -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; - -public class TimedEventUtil{ - - public static void runWithTimeout(final Runnable runnable, long timeout, TimeUnit timeUnit) throws Exception { - timedTask(new Callable() { - @Override - public Object call() throws Exception { - runnable.run(); - return null; - } - }, timeout, timeUnit); - } - - public static T timedTask(Callable callableObj, long timeout, - TimeUnit timeUnit) throws Exception{ - - return callableObj.call(); - - /* - final ExecutorService executor = Executors.newSingleThreadExecutor(); - final Future future = executor.submit(callableObj); - executor.shutdownNow(); - - try { - return future.get(timeout, timeUnit); - } catch (TimeoutException | InterruptedException | ExecutionException e) { - if(logger.isDebugEnabled()){ - logger.debug("Error executing task", e); - } - Throwable t = e.getCause(); - if (t instanceof Error) { - throw (Error) t; - } else if (t instanceof Exception) { - throw (Exception) e; - } else { - throw new IllegalStateException(t); - } - } - */ - - } - - -} \ No newline at end of file diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/URLEncoderUtil.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/URLEncoderUtil.java deleted file mode 100644 index aaa1843426..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/URLEncoderUtil.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.util; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; - -public class URLEncoderUtil { - - public static String encodeURIParam(String s) throws UnsupportedEncodingException { - - String ret = null; - - try { - ret = URLEncoder.encode(s, "UTF-8").replaceAll("\\+", "%20"); - } catch (UnsupportedEncodingException e) { - throw e; - } - - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/XMLUtils.java b/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/XMLUtils.java deleted file mode 100644 index 85e6e0eb94..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/plugin/util/XMLUtils.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.plugin.util; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -import javax.xml.XMLConstants; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.InputStream; -import java.util.Map; - -public class XMLUtils { - - private static final Logger LOG = LoggerFactory.getLogger(XMLUtils.class); - - private static final String XMLCONFIG_PROPERTY_TAGNAME = "property"; - private static final String XMLCONFIG_NAME_TAGNAME = "name"; - private static final String XMLCONFIG_VALUE_TAGNAME = "value"; - - public static void loadConfig(String configFileName, Map properties) { - try (InputStream input = getFileInputStream(configFileName)) { - loadConfig(input, properties); - } catch (Exception e) { - LOG.error("Error loading : ", e); - } - } - - public static void loadConfig(InputStream input, Map properties) { - try { - DocumentBuilderFactory xmlDocumentBuilderFactory = DocumentBuilderFactory.newInstance(); - xmlDocumentBuilderFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); - xmlDocumentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); - xmlDocumentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); - xmlDocumentBuilderFactory.setIgnoringComments(true); - xmlDocumentBuilderFactory.setNamespaceAware(true); - - DocumentBuilder xmlDocumentBuilder = xmlDocumentBuilderFactory.newDocumentBuilder(); - Document xmlDocument = xmlDocumentBuilder.parse(input); - xmlDocument.getDocumentElement().normalize(); - - NodeList nList = xmlDocument.getElementsByTagName(XMLCONFIG_PROPERTY_TAGNAME); - - for (int temp = 0; temp < nList.getLength(); temp++) { - - Node nNode = nList.item(temp); - - if (nNode.getNodeType() == Node.ELEMENT_NODE) { - - Element eElement = (Element) nNode; - - String propertyName = ""; - String propertyValue = ""; - if (eElement.getElementsByTagName(XMLCONFIG_NAME_TAGNAME).item(0) != null) { - propertyName = eElement.getElementsByTagName(XMLCONFIG_NAME_TAGNAME) - .item(0).getTextContent().trim(); - } - if (eElement.getElementsByTagName(XMLCONFIG_VALUE_TAGNAME).item(0) != null) { - propertyValue = eElement.getElementsByTagName(XMLCONFIG_VALUE_TAGNAME) - .item(0).getTextContent().trim(); - } - - if (properties.get(propertyName) != null) { - properties.remove(propertyName); - } - - properties.put(propertyName, propertyValue); - - } - } - - } catch (Exception e) { - LOG.error("Error loading : ", e); - } - } - - private static InputStream getFileInputStream(String path) throws FileNotFoundException { - - InputStream ret = null; - - // Guard against path traversal attacks - String sanitizedPath = new File(path).getName(); - if ("".equals(sanitizedPath)) { - return null; - } - File f = new File(sanitizedPath); - - if (f.exists()) { - ret = new FileInputStream(f); - } else { - ret = XMLUtils.class.getResourceAsStream(path); - - if (ret == null) { - if (! path.startsWith("/")) { - ret = XMLUtils.class.getResourceAsStream("/" + path); - } - } - - if (ret == null) { - ret = ClassLoader.getSystemClassLoader().getResourceAsStream(path); - if (ret == null) { - if (! path.startsWith("/")) { - ret = ClassLoader.getSystemResourceAsStream("/" + path); - } - } - } - } - - return ret; - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/atlas/services/tag/RangerServiceTag.java b/auth-agents-common/src/main/java/org/apache/atlas/services/tag/RangerServiceTag.java deleted file mode 100644 index 55ee56ef7f..0000000000 --- a/auth-agents-common/src/main/java/org/apache/atlas/services/tag/RangerServiceTag.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.services.tag; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.service.RangerBaseService; -import org.apache.atlas.plugin.service.ResourceLookupContext; -import org.apache.atlas.plugin.store.TagStore; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.plugin.policyengine.RangerPolicyEngine.GROUP_PUBLIC; - -public class RangerServiceTag extends RangerBaseService { - - private static final Log LOG = LogFactory.getLog(RangerServiceTag.class); - - public static final String TAG_RESOURCE_NAME = "tag"; - public static final String RANGER_TAG_NAME_EXPIRES_ON = "EXPIRES_ON"; - public static final String RANGER_TAG_EXPIRY_CONDITION_NAME = "accessed-after-expiry"; - - private TagStore tagStore; - - public RangerServiceTag() { - super(); - } - - @Override - public void init(RangerServiceDef serviceDef, RangerService service) { - super.init(serviceDef, service); - } - - public void setTagStore(TagStore tagStore) { - this.tagStore = tagStore; - } - - @Override - public Map validateConfig() throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceTag.validateConfig(" + serviceName + " )"); - } - - Map ret = new HashMap<>(); - - ret.put("connectivityStatus", true); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceTag.validateConfig(" + serviceName + " ): " + ret); - } - - return ret; - } - - @Override - public List lookupResource(ResourceLookupContext context) throws Exception { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceTag.lookupResource(" + context + ")"); - } - - List ret = new ArrayList<>(); - - if (context != null && StringUtils.equals(context.getResourceName(), TAG_RESOURCE_NAME)) { - try { - List tags = tagStore != null ? tagStore.getTagTypes() : null; - - if(CollectionUtils.isNotEmpty(tags)) { - List valuesToExclude = MapUtils.isNotEmpty(context.getResources()) ? context.getResources().get(TAG_RESOURCE_NAME) : null; - - if(CollectionUtils.isNotEmpty(valuesToExclude)) { - tags.removeAll(valuesToExclude); - } - - String valueToMatch = context.getUserInput(); - - if(StringUtils.isNotEmpty(valueToMatch)) { - if(! valueToMatch.endsWith("*")) { - valueToMatch += "*"; - } - - for (String tag : tags) { - if(FilenameUtils.wildcardMatch(tag, valueToMatch)) { - ret.add(tag); - } - } - } - } - } catch (Exception excp) { - LOG.error("RangerServiceTag.lookupResource()", excp); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceTag.lookupResource(): tag count=" + ret.size()); - } - - return ret; - } - - @Override - public List getDefaultRangerPolicies() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceTag.getDefaultRangerPolicies() "); - } - - List ret = new ArrayList(); - - boolean isConditionDefFound = false; - - List policyConditionDefs = serviceDef.getPolicyConditions(); - - if (CollectionUtils.isNotEmpty(policyConditionDefs)) { - for (RangerServiceDef.RangerPolicyConditionDef conditionDef : policyConditionDefs) { - if (conditionDef.getName().equals(RANGER_TAG_EXPIRY_CONDITION_NAME)) { - isConditionDefFound = true; - break; - } - } - } - - if (isConditionDefFound) { - - ret = super.getDefaultRangerPolicies(); - String tagResourceName = null; - if (!serviceDef.getResources().isEmpty()) { - tagResourceName = serviceDef.getResources().get(0).getName(); - - for (RangerPolicy defaultPolicy : ret) { - - RangerPolicy.RangerPolicyResource tagPolicyResource = defaultPolicy.getResources().get(tagResourceName); - - if (tagPolicyResource != null) { - - String value = RANGER_TAG_NAME_EXPIRES_ON; - - tagPolicyResource.setValue(value); - defaultPolicy.setName(value); - defaultPolicy.setDescription("Policy for data with " + value + " tag"); - - List defaultPolicyItems = defaultPolicy.getPolicyItems(); - - for (RangerPolicy.RangerPolicyItem defaultPolicyItem : defaultPolicyItems) { - - List groups = new ArrayList(); - groups.add(GROUP_PUBLIC); - defaultPolicyItem.setGroups(groups); - - List policyItemConditions = new ArrayList(); - List values = new ArrayList(); - values.add("yes"); - RangerPolicy.RangerPolicyItemCondition policyItemCondition = new RangerPolicy.RangerPolicyItemCondition(RANGER_TAG_EXPIRY_CONDITION_NAME, values); - policyItemConditions.add(policyItemCondition); - - defaultPolicyItem.setConditions(policyItemConditions); - defaultPolicyItem.setDelegateAdmin(Boolean.FALSE); - } - - defaultPolicy.setDenyPolicyItems(defaultPolicyItems); - defaultPolicy.setPolicyItems(null); - } - } - } - } else { - LOG.error("RangerServiceTag.getDefaultRangerPolicies() - Cannot create default TAG policy: Cannot get tagPolicyConditionDef with name=" + RANGER_TAG_EXPIRY_CONDITION_NAME); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceTag.getDefaultRangerPolicies() : " + ret); - } - return ret; - } -} diff --git a/auth-agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java b/auth-agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java deleted file mode 100644 index 414ac34e78..0000000000 --- a/auth-agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.hadoop.security; - -import javax.security.auth.Subject; -import javax.security.auth.callback.CallbackHandler; -import javax.security.auth.login.LoginException; -import javax.security.auth.spi.LoginModule; -import java.util.Map; - -public class KrbPasswordSaverLoginModule implements LoginModule { - public static final String USERNAME_PARAM = "javax.security.auth.login.name"; - public static final String PASSWORD_PARAM = "javax.security.auth.login.password"; - - @SuppressWarnings("rawtypes") - private Map sharedState; - - public KrbPasswordSaverLoginModule() { - } - - @Override - public boolean abort() throws LoginException { - return true; - } - - @Override - public boolean commit() throws LoginException { - return true; - } - - @SuppressWarnings("unchecked") - @Override - public void initialize(Subject subject, CallbackHandler callbackhandler, Map sharedMap, Map options) { - - this.sharedState = sharedMap; - - String userName = (options != null) ? (String)options.get(USERNAME_PARAM) : null; - if (userName != null) { - this.sharedState.put(USERNAME_PARAM,userName); - } - String password = (options != null) ? (String)options.get(PASSWORD_PARAM) : null; - - if (password != null) { - this.sharedState.put(PASSWORD_PARAM,password.toCharArray()); - } - } - - @Override - public boolean login() throws LoginException { - return true; - } - - @Override - public boolean logout() throws LoginException { - return true; - } - -} diff --git a/auth-agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java b/auth-agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java deleted file mode 100644 index 39ac3476a6..0000000000 --- a/auth-agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hadoop.security; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.authentication.util.KerberosUtil; -import org.apache.hadoop.util.StringUtils; - -import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; -import javax.security.auth.login.LoginContext; -import javax.security.auth.login.LoginException; -import java.io.File; -import java.io.IOException; -import java.security.Principal; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -public class SecureClientLogin { - private static final Log LOG = LogFactory.getLog(SecureClientLogin.class); - public static final String HOSTNAME_PATTERN = "_HOST"; - - public synchronized static Subject loginUserFromKeytab(String user, String path) throws IOException { - try { - Subject subject = new Subject(); - SecureClientLoginConfiguration loginConf = new SecureClientLoginConfiguration(true, user, path); - LoginContext login = new LoginContext("hadoop-keytab-kerberos", subject, null, loginConf); - subject.getPrincipals().add(new User(user, AuthenticationMethod.KERBEROS, login)); - login.login(); - return login.getSubject(); - } catch (LoginException le) { - throw new IOException("Login failure for " + user + " from keytab " + path, le); - } - } - - public synchronized static Subject loginUserFromKeytab(String user, String path, String nameRules) throws IOException { - try { - Subject subject = new Subject(); - SecureClientLoginConfiguration loginConf = new SecureClientLoginConfiguration(true, user, path); - LoginContext login = new LoginContext("hadoop-keytab-kerberos", subject, null, loginConf); - KerberosName.setRules(nameRules); - subject.getPrincipals().add(new User(user, AuthenticationMethod.KERBEROS, login)); - login.login(); - return login.getSubject(); - } catch (LoginException le) { - throw new IOException("Login failure for " + user + " from keytab " + path, le); - } - } - - public synchronized static Subject loginUserWithPassword(String user, String password) throws IOException { - try { - Subject subject = new Subject(); - SecureClientLoginConfiguration loginConf = new SecureClientLoginConfiguration(false, user, password); - LoginContext login = new LoginContext("hadoop-keytab-kerberos", subject, null, loginConf); - subject.getPrincipals().add(new User(user, AuthenticationMethod.KERBEROS, login)); - login.login(); - return login.getSubject(); - } catch (LoginException le) { - throw new IOException("Login failure for " + user + " using password ****", le); - } - } - - public synchronized static Subject login(String user) throws IOException { - Subject subject = new Subject(); - subject.getPrincipals().add(new User(user)); - return subject; - } - - public static Set getUserPrincipals(Subject aSubject) { - if (aSubject != null) { - Set list = aSubject.getPrincipals(User.class); - if (list != null) { - Set ret = new HashSet<>(); - ret.addAll(list); - return ret; - } else { - return null; - } - } else { - return null; - } - } - - public static Principal createUserPrincipal(String aLoginName) { - return new User(aLoginName); - } - - public static boolean isKerberosCredentialExists(String principal, String keytabPath){ - boolean isValid = false; - if (keytabPath != null && !keytabPath.isEmpty()) { - File keytabFile = new File(keytabPath); - if (!keytabFile.exists()) { - LOG.warn(keytabPath + " doesn't exist."); - } else if (!keytabFile.canRead()) { - LOG.warn("Unable to read " + keytabPath + ". Please check the file access permissions for user"); - }else{ - isValid = true; - } - } else { - LOG.warn("Can't find keyTab Path : "+keytabPath); - } - if (!(principal != null && !principal.isEmpty() && isValid)) { - isValid = false; - LOG.warn("Can't find principal : "+principal); - } - return isValid; - } - - public static String getPrincipal(String principalConfig, String hostName) throws IOException { - String[] components = getComponents(principalConfig); - if (components == null || components.length != 3 || !HOSTNAME_PATTERN.equals(components[1])) { - return principalConfig; - } else { - if (hostName == null) { - throw new IOException("Can't replace " + HOSTNAME_PATTERN + " pattern since client ranger.service.host is null"); - } - return replacePattern(components, hostName); - } - } - - private static String[] getComponents(String principalConfig) { - if (principalConfig == null) - return null; - return principalConfig.split("[/@]"); - } - - private static String replacePattern(String[] components, String hostname) - throws IOException { - String fqdn = hostname; - if (fqdn == null || fqdn.isEmpty() || "0.0.0.0".equals(fqdn)) { - fqdn = java.net.InetAddress.getLocalHost().getCanonicalHostName(); - } - return components[0] + "/" + StringUtils.toLowerCase(fqdn) + "@" + components[2]; - } -} - -class SecureClientLoginConfiguration extends javax.security.auth.login.Configuration { - private Map kerberosOptions = new HashMap<>(); - private boolean usePassword; - - public SecureClientLoginConfiguration(boolean useKeyTab, String principal, String credential) { - kerberosOptions.put("principal", principal); - kerberosOptions.put("debug", "false"); - if (useKeyTab) { - kerberosOptions.put("useKeyTab", "true"); - kerberosOptions.put("keyTab", credential); - kerberosOptions.put("doNotPrompt", "true"); - } else { - usePassword = true; - kerberosOptions.put("useKeyTab", "false"); - kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, principal); - kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, credential); - kerberosOptions.put("doNotPrompt", "false"); - kerberosOptions.put("useFirstPass", "true"); - kerberosOptions.put("tryFirstPass","false"); - } - kerberosOptions.put("storeKey", "true"); - kerberosOptions.put("refreshKrb5Config", "true"); - } - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String appName) { - AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(), LoginModuleControlFlag.REQUIRED, kerberosOptions); - if (usePassword) { - AppConfigurationEntry KERBEROS_PWD_SAVER = new AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), LoginModuleControlFlag.REQUIRED, kerberosOptions); - return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN }; - } - else { - return new AppConfigurationEntry[] { KEYTAB_KERBEROS_LOGIN }; - } - } - - -} diff --git a/auth-agents-cred/pom.xml b/auth-agents-cred/pom.xml deleted file mode 100644 index d2f5773230..0000000000 --- a/auth-agents-cred/pom.xml +++ /dev/null @@ -1,52 +0,0 @@ - - - - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - - 4.0.0 - - auth-agents-cred - - - 8 - 8 - - - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - net.minidev - json-smart - - - - - - - \ No newline at end of file diff --git a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/CredentialsProviderUtil.java b/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/CredentialsProviderUtil.java deleted file mode 100644 index 277c15e603..0000000000 --- a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/CredentialsProviderUtil.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.credutils; - -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.KerberosCredentials; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.config.AuthSchemes; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.atlas.authorization.credutils.kerberos.KerberosCredentialsProvider; -import org.apache.atlas.authorization.credutils.kerberos.KeytabJaasConf; -import org.ietf.jgss.GSSCredential; -import org.ietf.jgss.GSSException; -import org.ietf.jgss.GSSManager; -import org.ietf.jgss.GSSName; -import org.ietf.jgss.Oid; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.Subject; -import javax.security.auth.kerberos.KerberosPrincipal; -import javax.security.auth.kerberos.KerberosTicket; -import javax.security.auth.login.Configuration; -import javax.security.auth.login.LoginContext; -import java.math.BigDecimal; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.Collections; -import java.util.Date; -import java.util.Set; - -public class CredentialsProviderUtil { - private static final Logger logger = LoggerFactory.getLogger(CredentialsProviderUtil.class); - private static final Oid SPNEGO_OID = getSpnegoOid(); - private static final String CRED_CONF_NAME = "ESClientLoginConf"; - public static long ticketExpireTime80 = 0; - - private static Oid getSpnegoOid() { - Oid oid = null; - try { - oid = new Oid("1.3.6.1.5.5.2"); - } catch (GSSException gsse) { - throw new RuntimeException(gsse); - } - return oid; - } - - public static KerberosCredentialsProvider getKerberosCredentials(String user, String password){ - KerberosCredentialsProvider credentialsProvider = new KerberosCredentialsProvider(); - final GSSManager gssManager = GSSManager.getInstance(); - try { - final GSSName gssUserPrincipalName = gssManager.createName(user, GSSName.NT_USER_NAME); - Subject subject = login(user, password); - final AccessControlContext acc = AccessController.getContext(); - final GSSCredential credential = doAsPrivilegedWrapper(subject, - (PrivilegedExceptionAction) () -> gssManager.createCredential(gssUserPrincipalName, - GSSCredential.DEFAULT_LIFETIME, SPNEGO_OID, GSSCredential.INITIATE_ONLY), - acc); - credentialsProvider.setCredentials( - new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO), - new KerberosCredentials(credential)); - } catch (GSSException e) { - logger.error("GSSException:", e); - throw new RuntimeException(e); - } catch (PrivilegedActionException e) { - logger.error("PrivilegedActionException:", e); - throw new RuntimeException(e); - } - return credentialsProvider; - } - - public static synchronized KerberosTicket getTGT(Subject subject) { - Set tickets = subject.getPrivateCredentials(KerberosTicket.class); - for(KerberosTicket ticket: tickets) { - KerberosPrincipal server = ticket.getServer(); - if (server.getName().equals("krbtgt/" + server.getRealm() + "@" + server.getRealm())) { - if (logger.isDebugEnabled()) { - logger.debug("Client principal is \"" + ticket.getClient().getName() + "\"."); - logger.debug("Server principal is \"" + ticket.getServer().getName() + "\"."); - } - return ticket; - } - } - return null; - } - - public static Boolean ticketWillExpire(KerberosTicket ticket){ - long ticketExpireTime = ticket.getEndTime().getTime(); - long currrentTime = new Date().getTime(); - if (logger.isDebugEnabled()) { - logger.debug("TicketExpireTime is:" + ticketExpireTime); - logger.debug("currrentTime is:" + currrentTime); - } - if (ticketExpireTime80 == 0) { - long timeDiff = ticketExpireTime - currrentTime; - long timeDiff20 = Math.round(Float.parseFloat(BigDecimal.valueOf(timeDiff * 0.2).toPlainString())); - ticketExpireTime80 = ticketExpireTime - timeDiff20; - } - if (logger.isDebugEnabled()) { - logger.debug("ticketExpireTime80 is:" + ticketExpireTime80); - } - if (currrentTime > ticketExpireTime80) { - if (logger.isDebugEnabled()) { - logger.debug("Current time is more than 80% of Ticket Expire Time!!"); - } - ticketExpireTime80 = 0; - return true; - } - return false; - } - - public static synchronized Subject login(String userPrincipalName, String keytabPath) throws PrivilegedActionException { - Subject sub = AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - final Subject subject = new Subject(false, Collections.singleton(new KerberosPrincipal(userPrincipalName)), - Collections.emptySet(), Collections.emptySet()); - Configuration conf = new KeytabJaasConf(userPrincipalName, keytabPath, false); - - LoginContext loginContext = new LoginContext(CRED_CONF_NAME, subject, null, conf); - loginContext.login(); - return loginContext.getSubject(); - }); - return sub; - } - - - static T doAsPrivilegedWrapper(final Subject subject, final PrivilegedExceptionAction action, final AccessControlContext acc) - throws PrivilegedActionException { - try { - return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAsPrivileged(subject, action, acc)); - } catch (PrivilegedActionException pae) { - if (pae.getCause() instanceof PrivilegedActionException) { - throw (PrivilegedActionException) pae.getCause(); - } - throw pae; - } - } - - public static CredentialsProvider getBasicCredentials(String user, String password) { - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(user, password)); - return credentialsProvider; - } - -} diff --git a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/AbstractJaasConf.java b/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/AbstractJaasConf.java deleted file mode 100644 index fddb8a5e6a..0000000000 --- a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/AbstractJaasConf.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.credutils.kerberos; - -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -public abstract class AbstractJaasConf extends Configuration { - private final String userPrincipalName; - private final boolean enableDebugLogs; - - public AbstractJaasConf(final String userPrincipalName, final boolean enableDebugLogs) { - this.userPrincipalName = userPrincipalName; - this.enableDebugLogs = enableDebugLogs; - } - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(final String name) { - final Map options = new HashMap<>(); - options.put("principal", userPrincipalName); - options.put("isInitiator", Boolean.TRUE.toString()); - options.put("storeKey", Boolean.TRUE.toString()); - options.put("debug", Boolean.toString(enableDebugLogs)); - addOptions(options); - return new AppConfigurationEntry[] { new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, Collections.unmodifiableMap(options)) }; - } - - abstract void addOptions(Map options); -} diff --git a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KerberosCredentialsProvider.java b/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KerberosCredentialsProvider.java deleted file mode 100644 index 1a87548ce6..0000000000 --- a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KerberosCredentialsProvider.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.credutils.kerberos; - -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.Credentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.config.AuthSchemes; - -public class KerberosCredentialsProvider implements CredentialsProvider { - private AuthScope authScope; - private Credentials credentials; - - @Override - public void setCredentials(AuthScope authscope, Credentials credentials) { - if (authscope.getScheme().regionMatches(true, 0, AuthSchemes.SPNEGO, 0, AuthSchemes.SPNEGO.length()) == false) { - throw new IllegalArgumentException("Only " + AuthSchemes.SPNEGO + " auth scheme is supported in AuthScope"); - } - this.authScope = authscope; - this.credentials = credentials; - } - - @Override - public Credentials getCredentials(AuthScope authscope) { - assert this.authScope != null && authscope != null; - return authscope.match(this.authScope) > -1 ? this.credentials : null; - } - - @Override - public void clear() { - this.authScope = null; - this.credentials = null; - } - -} diff --git a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KeytabJaasConf.java b/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KeytabJaasConf.java deleted file mode 100644 index 2b0efb70a2..0000000000 --- a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/credutils/kerberos/KeytabJaasConf.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.credutils.kerberos; - -import java.util.Map; - -public class KeytabJaasConf extends AbstractJaasConf { - private final String keytabFilePath; - - public KeytabJaasConf(final String userPrincipalName, final String keytabFilePath, final boolean enableDebugLogs) { - super(userPrincipalName, enableDebugLogs); - this.keytabFilePath = keytabFilePath; - } - - public void addOptions(final Map options) { - options.put("useKeyTab", Boolean.TRUE.toString()); - options.put("keyTab", keytabFilePath); - options.put("doNotPrompt", Boolean.TRUE.toString()); - } - -} diff --git a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/hadoop/utils/RangerCredentialProvider.java b/auth-agents-cred/src/main/java/org/apache/atlas/authorization/hadoop/utils/RangerCredentialProvider.java deleted file mode 100644 index 58d84ba78e..0000000000 --- a/auth-agents-cred/src/main/java/org/apache/atlas/authorization/hadoop/utils/RangerCredentialProvider.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.authorization.hadoop.utils; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.alias.CredentialProvider; -import org.apache.hadoop.security.alias.CredentialProviderFactory; - -import java.util.List; - - -public final class RangerCredentialProvider { - - private static final Log LOG = LogFactory.getLog(RangerCredentialProvider.class); - - private static final RangerCredentialProvider CRED_PROVIDER = new RangerCredentialProvider(); - - protected RangerCredentialProvider() { - // - } - - public static RangerCredentialProvider getInstance() { - return CRED_PROVIDER; - } - - public String getCredentialString(String url, String alias) { - if (url != null && alias != null) { - List providers = getCredentialProviders(url); - if (providers != null) { - for (CredentialProvider provider : providers) { - try { - CredentialProvider.CredentialEntry credEntry = provider.getCredentialEntry(alias); - if (credEntry != null && credEntry.getCredential() != null) { - return new String(credEntry.getCredential()); - } - } catch (Exception ie) { - LOG.error("Unable to get the Credential Provider from the Configuration", ie); - } - } - } - } - return null; - } - - List getCredentialProviders(String url) { - if (url != null) { - try { - Configuration conf = new Configuration(); - conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, url); - return CredentialProviderFactory.getProviders(conf); - } catch (Exception ie) { - LOG.error("Unable to get the Credential Provider from the Configuration", ie); - } - } - return null; - } - -} diff --git a/auth-audits/pom.xml b/auth-audits/pom.xml index ba0d53f0f8..ea7a6297e4 100644 --- a/auth-audits/pom.xml +++ b/auth-audits/pom.xml @@ -29,67 +29,33 @@ auth-audits - 8 - 8 - 2.9.0 - 1.5.8 + 17 + 17 - org.apache.atlas - auth-agents-cred - ${project.version} + org.elasticsearch.client + elasticsearch-rest-high-level-client + ${elasticsearch.version} - commons-lang - commons-lang - ${commons-lang.version} + com.fasterxml.jackson.core + jackson-databind + ${jackson.databind.version} + - org.apache.solr - solr-solrj - ${solr.version} - - - io.netty - * - - - org.eclipse.jetty.http2 - * - - - org.apache.commons - commons-math3 - - - commons-io - commons-io - - - org.apache.httpcomponents - * - - - org.apache.zookeeper - * - - - org.codehaus.woodstox - * - - - org.eclipse.jetty - * - - + org.apache.commons + commons-lang3 + 3.4 + - org.elasticsearch.client - elasticsearch-rest-high-level-client - ${elasticsearch.version} + com.fasterxml.jackson.core + jackson-annotations + ${jackson.version} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/destination/ElasticSearchAuditDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/destination/ElasticSearchAuditDestination.java index 114d3fe4eb..d28464ed83 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/destination/ElasticSearchAuditDestination.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/destination/ElasticSearchAuditDestination.java @@ -19,21 +19,15 @@ package org.apache.atlas.audit.destination; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHost; -import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.config.AuthSchemes; -import org.apache.http.config.Lookup; -import org.apache.http.config.RegistryBuilder; -import org.apache.http.impl.auth.SPNegoSchemeFactory; import org.apache.atlas.audit.model.AuditEventBase; import org.apache.atlas.audit.model.AuthzAuditEvent; import org.apache.atlas.audit.provider.MiscUtil; -import org.apache.atlas.authorization.credutils.CredentialsProviderUtil; -import org.apache.atlas.authorization.credutils.kerberos.KerberosCredentialsProvider; +import org.apache.atlas.audit.utils.CredentialsProviderUtil; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -44,14 +38,9 @@ import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestHighLevelClient; -import javax.security.auth.Subject; -import javax.security.auth.kerberos.KerberosTicket; -import java.io.File; -import java.security.PrivilegedActionException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -78,7 +67,6 @@ public class ElasticSearchAuditDestination extends AuditDestination { private int port; private String password; private String hosts; - private Subject subject; public ElasticSearchAuditDestination() { propPrefix = CONFIG_PREFIX; @@ -185,21 +173,6 @@ synchronized RestHighLevelClient getClient() { } } } - if (subject != null) { - KerberosTicket ticket = CredentialsProviderUtil.getTGT(subject); - try { - if (new Date().getTime() > ticket.getEndTime().getTime()){ - client = null; - CredentialsProviderUtil.ticketExpireTime80 = 0; - newClient(); - } else if (CredentialsProviderUtil.ticketWillExpire(ticket)) { - subject = CredentialsProviderUtil.login(user, password); - } - } catch (PrivilegedActionException e) { - LOG.error("PrivilegedActionException:", e); - throw new RuntimeException(e); - } - } return client; } @@ -212,22 +185,12 @@ public static RestClientBuilder getRestClientBuilder(String urls, String protoco .toArray(i -> new HttpHost[i]) ); if (StringUtils.isNotBlank(user) && StringUtils.isNotBlank(password) && !user.equalsIgnoreCase("NONE") && !password.equalsIgnoreCase("NONE")) { - if (password.contains("keytab") && new File(password).exists()) { - final KerberosCredentialsProvider credentialsProvider = - CredentialsProviderUtil.getKerberosCredentials(user, password); - Lookup authSchemeRegistry = RegistryBuilder.create() - .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory()).build(); - restClientBuilder.setHttpClientConfigCallback(clientBuilder -> { - clientBuilder.setDefaultCredentialsProvider(credentialsProvider); - clientBuilder.setDefaultAuthSchemeRegistry(authSchemeRegistry); - return clientBuilder; - }); - } else { - final CredentialsProvider credentialsProvider = + + final CredentialsProvider credentialsProvider = CredentialsProviderUtil.getBasicCredentials(user, password); - restClientBuilder.setHttpClientConfigCallback(clientBuilder -> + restClientBuilder.setHttpClientConfigCallback(clientBuilder -> clientBuilder.setDefaultCredentialsProvider(credentialsProvider)); - } + } else { LOG.error("ElasticSearch Credentials not provided!!"); final CredentialsProvider credentialsProvider = null; @@ -239,9 +202,6 @@ public static RestClientBuilder getRestClientBuilder(String urls, String protoco private RestHighLevelClient newClient() { try { - if (StringUtils.isNotBlank(user) && StringUtils.isNotBlank(password) && password.contains("keytab") && new File(password).exists()) { - subject = CredentialsProviderUtil.login(user, password); - } RestClientBuilder restClientBuilder = getRestClientBuilder(hosts, protocol, user, password, port); RestHighLevelClient restHighLevelClient = new RestHighLevelClient(restClientBuilder); diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/destination/FileAuditDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/destination/FileAuditDestination.java deleted file mode 100644 index 1bbc04edde..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/destination/FileAuditDestination.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.destination; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.provider.MiscUtil; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.List; -import java.util.Properties; - -/** - * This class write the logs to local file - */ -public class FileAuditDestination extends AuditDestination { - private static final Log logger = LogFactory - .getLog(FileAuditDestination.class); - - public static final String PROP_FILE_LOCAL_DIR = "dir"; - public static final String PROP_FILE_LOCAL_FILE_NAME_FORMAT = "filename.format"; - public static final String PROP_FILE_FILE_ROLLOVER = "file.rollover.sec"; - - String baseFolder = null; - String fileFormat = null; - int fileRolloverSec = 24 * 60 * 60; // In seconds - private String logFileNameFormat; - - boolean initDone = false; - - private File logFolder; - PrintWriter logWriter = null; - - private Date fileCreateTime = null; - - private String currentFileName; - - private boolean isStopped = false; - - @Override - public void init(Properties prop, String propPrefix) { - super.init(prop, propPrefix); - - // Initialize properties for this class - // Initial folder and file properties - String logFolderProp = MiscUtil.getStringProperty(props, propPrefix - + "." + PROP_FILE_LOCAL_DIR); - logFileNameFormat = MiscUtil.getStringProperty(props, propPrefix + "." - + PROP_FILE_LOCAL_FILE_NAME_FORMAT); - fileRolloverSec = MiscUtil.getIntProperty(props, propPrefix + "." - + PROP_FILE_FILE_ROLLOVER, fileRolloverSec); - - if (logFolderProp == null || logFolderProp.isEmpty()) { - logger.error("File destination folder is not configured. Please set " - + propPrefix - + "." - + PROP_FILE_LOCAL_DIR - + ". name=" - + getName()); - return; - } - logFolder = new File(logFolderProp); - if (!logFolder.isDirectory()) { - logFolder.mkdirs(); - if (!logFolder.isDirectory()) { - logger.error("FileDestination folder not found and can't be created. folder=" - + logFolder.getAbsolutePath() + ", name=" + getName()); - return; - } - } - logger.info("logFolder=" + logFolder + ", name=" + getName()); - - if (logFileNameFormat == null || logFileNameFormat.isEmpty()) { - logFileNameFormat = "%app-type%_ranger_audit.log"; - } - - logger.info("logFileNameFormat=" + logFileNameFormat + ", destName=" - + getName()); - - initDone = true; - } - - @Override - synchronized public boolean logJSON(Collection events) { - logStatusIfRequired(); - addTotalCount(events.size()); - - if (isStopped) { - logError("log() called after stop was requested. name=" + getName()); - addDeferredCount(events.size()); - return false; - } - - try { - PrintWriter out = getLogFileStream(); - for (String event : events) { - out.println(event); - } - out.flush(); - } catch (Throwable t) { - addDeferredCount(events.size()); - logError("Error writing to log file.", t); - return false; - } - addSuccessCount(events.size()); - return true; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.ranger.audit.provider.AuditProvider#log(java.util.Collection) - */ - @Override - public boolean log(Collection events) { - if (isStopped) { - addTotalCount(events.size()); - addDeferredCount(events.size()); - logError("log() called after stop was requested. name=" + getName()); - return false; - } - List jsonList = new ArrayList(); - for (AuditEventBase event : events) { - try { - jsonList.add(MiscUtil.stringify(event)); - } catch (Throwable t) { - addTotalCount(1); - addFailedCount(1); - logFailedEvent(event); - logger.error("Error converting to JSON. event=" + event); - } - } - return logJSON(jsonList); - - } - - /* - * (non-Javadoc) - * - * @see org.apache.ranger.audit.provider.AuditProvider#start() - */ - @Override - public void start() { - // Nothing to do here. We will open the file when the first log request - // comes - } - - @Override - synchronized public void stop() { - isStopped = true; - if (logWriter != null) { - try { - logWriter.flush(); - logWriter.close(); - } catch (Throwable t) { - logger.error("Error on closing log writter. Exception will be ignored. name=" - + getName() + ", fileName=" + currentFileName); - } - logWriter = null; - } - logStatus(); - } - - // Helper methods in this class - synchronized private PrintWriter getLogFileStream() throws Exception { - closeFileIfNeeded(); - - // Either there are no open log file or the previous one has been rolled - // over - if (logWriter == null) { - Date currentTime = new Date(); - // Create a new file - String fileName = MiscUtil.replaceTokens(logFileNameFormat, - currentTime.getTime()); - File outLogFile = new File(logFolder, fileName); - if (outLogFile.exists()) { - // Let's try to get the next available file - int i = 0; - while (true) { - i++; - int lastDot = fileName.lastIndexOf('.'); - String baseName = fileName.substring(0, lastDot); - String extension = fileName.substring(lastDot); - String newFileName = baseName + "." + i + extension; - File newLogFile = new File(logFolder, newFileName); - if (!newLogFile.exists()) { - // Move the file - if (!outLogFile.renameTo(newLogFile)) { - logger.error("Error renameing file. " + outLogFile - + " to " + newLogFile); - } - break; - } - } - } - if (!outLogFile.exists()) { - logger.info("Creating new file. destName=" + getName() - + ", fileName=" + fileName); - // Open the file - logWriter = new PrintWriter(new BufferedWriter(new FileWriter( - outLogFile))); - } else { - logWriter = new PrintWriter(new BufferedWriter(new FileWriter( - outLogFile, true))); - } - fileCreateTime = new Date(); - currentFileName = outLogFile.getPath(); - } - return logWriter; - } - - private void closeFileIfNeeded() { - if (logWriter == null) { - return; - } - if (System.currentTimeMillis() - fileCreateTime.getTime() > fileRolloverSec * 1000) { - logger.info("Closing file. Rolling over. name=" + getName() - + ", fileName=" + currentFileName); - try { - logWriter.flush(); - logWriter.close(); - } catch (Throwable t) { - logger.error("Error on closing log writter. Exception will be ignored. name=" - + getName() + ", fileName=" + currentFileName); - } - logWriter = null; - currentFileName = null; - } - } - -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/destination/HDFSAuditDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/destination/HDFSAuditDestination.java deleted file mode 100644 index 1e048ea107..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/destination/HDFSAuditDestination.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.destination; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.provider.AuditWriterFactory; -import org.apache.atlas.audit.provider.MiscUtil; -import org.apache.atlas.audit.utils.RangerAuditWriter; - -import java.io.File; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -/** - * This class write the logs to local file - */ -public class HDFSAuditDestination extends AuditDestination { - private static final Log logger = LogFactory - .getLog(HDFSAuditDestination.class); - - private Map auditConfigs = null; - private String auditProviderName = null; - private RangerAuditWriter auditWriter = null; - private boolean initDone = false; - private boolean isStopped = false; - - @Override - public void init(Properties prop, String propPrefix) { - super.init(prop, propPrefix); - this.auditProviderName = getName(); - this.auditConfigs = configProps; - - try { - this.auditWriter = getWriter(); - this.initDone = true; - } catch (Exception e) { - logger.error("Error while getting Audit writer", e); - } - } - - @Override - synchronized public boolean logJSON(final Collection events) { - logStatusIfRequired(); - addTotalCount(events.size()); - - if (!initDone) { - addDeferredCount(events.size()); - return false; - } - if (isStopped) { - addDeferredCount(events.size()); - logError("log() called after stop was requested. name=" + getName()); - return false; - } - try { - boolean ret = auditWriter.log(events); - if (!ret) { - addDeferredCount(events.size()); - return false; - } - } catch (Throwable t) { - addDeferredCount(events.size()); - logError("Error writing to log file.", t); - return false; - } finally { - logger.info("Flushing HDFS audit. Event Size:" + events.size()); - if (auditWriter != null) { - flush(); - } - } - addSuccessCount(events.size()); - return true; - } - - @Override - synchronized public boolean logFile(final File file) { - logStatusIfRequired(); - if (!initDone) { - return false; - } - if (isStopped) { - logError("log() called after stop was requested. name=" + getName()); - return false; - } - - try { - boolean ret = auditWriter.logFile(file); - if (!ret) { - return false; - } - } catch (Throwable t) { - logError("Error writing to log file.", t); - return false; - } finally { - logger.info("Flushing HDFS audit. File:" + file.getAbsolutePath() + file.getName()); - if (auditWriter != null) { - flush(); - } - } - return true; - } - - @Override - public void flush() { - logger.info("Flush called. name=" + getName()); - MiscUtil.executePrivilegedAction(new PrivilegedAction() { - @Override - public Void run() { - auditWriter.flush(); - return null; - } - }); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.ranger.audit.provider.AuditProvider#log(java.util.Collection) - */ - @Override - public boolean log(Collection events) { - if (isStopped) { - logStatusIfRequired(); - addTotalCount(events.size()); - addDeferredCount(events.size()); - logError("log() called after stop was requested. name=" + getName()); - return false; - } - List jsonList = new ArrayList(); - for (AuditEventBase event : events) { - try { - jsonList.add(MiscUtil.stringify(event)); - } catch (Throwable t) { - logger.error("Error converting to JSON. event=" + event); - addTotalCount(1); - addFailedCount(1); - logFailedEvent(event); - } - } - return logJSON(jsonList); - - } - - /* - * (non-Javadoc) - * - * @see org.apache.ranger.audit.provider.AuditProvider#start() - */ - @Override - public void start() { - // Nothing to do here. We will open the file when the first log request - // comes - } - - @Override - synchronized public void stop() { - auditWriter.stop(); - logStatus(); - isStopped = true; - } - - public RangerAuditWriter getWriter() throws Exception { - AuditWriterFactory auditWriterFactory = AuditWriterFactory.getInstance(); - auditWriterFactory.init(props, propPrefix, auditProviderName, auditConfigs); - return auditWriterFactory.getAuditWriter(); - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/destination/Log4JAuditDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/destination/Log4JAuditDestination.java deleted file mode 100644 index cb3c80db28..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/destination/Log4JAuditDestination.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.destination; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.provider.MiscUtil; - -import java.util.Collection; -import java.util.Properties; - -public class Log4JAuditDestination extends AuditDestination { - private static final Log logger = LogFactory - .getLog(Log4JAuditDestination.class); - - private static Log auditLogger = null; - - public static final String PROP_LOG4J_LOGGER = "logger"; - public static final String DEFAULT_LOGGER_PREFIX = "ranger.audit"; - private String loggerName = null; - - public Log4JAuditDestination() { - logger.info("Log4JAuditDestination() called."); - - } - - @Override - public void init(Properties prop, String propPrefix) { - super.init(prop, propPrefix); - loggerName = MiscUtil.getStringProperty(props, propPrefix + "." - + PROP_LOG4J_LOGGER); - if (loggerName == null || loggerName.isEmpty()) { - loggerName = DEFAULT_LOGGER_PREFIX + "." + getName(); - logger.info("Logger property " + propPrefix + "." - + PROP_LOG4J_LOGGER + " was not set. Constructing default=" - + loggerName); - } - logger.info("Logger name for " + getName() + " is " + loggerName); - auditLogger = LogFactory.getLog(loggerName); - logger.info("Done initializing logger for audit. name=" + getName() - + ", loggerName=" + loggerName); - } - - - @Override - public void stop() { - super.stop(); - logStatus(); - } - - @Override - public boolean log(AuditEventBase event) { - if (!auditLogger.isInfoEnabled()) { - logStatusIfRequired(); - addTotalCount(1); - return true; - } - - if (event != null) { - String eventStr = MiscUtil.stringify(event); - logJSON(eventStr); - } - return true; - } - - @Override - public boolean log(Collection events) { - if (!auditLogger.isInfoEnabled()) { - logStatusIfRequired(); - addTotalCount(events.size()); - return true; - } - - for (AuditEventBase event : events) { - log(event); - } - return true; - } - - @Override - public boolean logJSON(String event) { - logStatusIfRequired(); - addTotalCount(1); - if (!auditLogger.isInfoEnabled()) { - return true; - } - - if (event != null) { - auditLogger.info(event); - addSuccessCount(1); - } - return true; - } - - @Override - public boolean logJSON(Collection events) { - if (!auditLogger.isInfoEnabled()) { - logStatusIfRequired(); - addTotalCount(events.size()); - return true; - } - - for (String event : events) { - logJSON(event); - } - return false; - } - -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/model/AuthzAuditEvent.java b/auth-audits/src/main/java/org/apache/atlas/audit/model/AuthzAuditEvent.java index f0bae5480c..b0eda51b9a 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/model/AuthzAuditEvent.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/model/AuthzAuditEvent.java @@ -19,8 +19,8 @@ package org.apache.atlas.audit.model; -import com.google.gson.annotations.SerializedName; -import org.apache.commons.lang.StringUtils; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.commons.lang3.StringUtils; import java.util.Date; import java.util.HashSet; @@ -32,96 +32,96 @@ public class AuthzAuditEvent extends AuditEventBase { protected static final int MAX_ACTION_FIELD_SIZE = 1800; protected static final int MAX_REQUEST_DATA_FIELD_SIZE = 1800; - @SerializedName("repoType") + @JsonProperty("repoType") protected int repositoryType = 0; - @SerializedName("repo") + @JsonProperty("repo") protected String repositoryName = null; - @SerializedName("reqUser") + @JsonProperty("reqUser") protected String user = null; - @SerializedName("reqEntityGuid") + @JsonProperty("reqEntityGuid") protected String entityGuid = null; - @SerializedName("evtTime") + @JsonProperty("evtTime") protected Date eventTime = new Date(); - @SerializedName("access") + @JsonProperty("access") protected String accessType = null; - @SerializedName("resource") + @JsonProperty("resource") protected String resourcePath = null; - @SerializedName("resType") + @JsonProperty("resType") protected String resourceType = null; - @SerializedName("action") + @JsonProperty("action") protected String action = null; - @SerializedName("result") + @JsonProperty("result") protected short accessResult = 0; // 0 - DENIED; 1 - ALLOWED; HTTP return // code - @SerializedName("agent") + @JsonProperty("agent") protected String agentId = null; - @SerializedName("policyId") + @JsonProperty("policyId") protected String policyId = "-1"; - @SerializedName("reason") + @JsonProperty("reason") protected String resultReason = null; - @SerializedName("enforcer") + @JsonProperty("enforcer") protected String aclEnforcer = null; - @SerializedName("sess") + @JsonProperty("sess") protected String sessionId = null; - @SerializedName("cliType") + @JsonProperty("cliType") protected String clientType = null; - @SerializedName("cliIP") + @JsonProperty("cliIP") protected String clientIP = null; - @SerializedName("reqData") + @JsonProperty("reqData") protected String requestData = null; - @SerializedName("agentHost") + @JsonProperty("agentHost") protected String agentHostname = null; - @SerializedName("logType") + @JsonProperty("logType") protected String logType = null; - @SerializedName("id") + @JsonProperty("id") protected String eventId = null; /** * This to ensure order within a session. Order not guaranteed across * processes and hosts */ - @SerializedName("seq_num") + @JsonProperty("seq_num") protected long seqNum = 0; - @SerializedName("event_count") + @JsonProperty("event_count") protected long eventCount = 1; - @SerializedName("event_dur_ms") + @JsonProperty("event_dur_ms") protected long eventDurationMS = 0; - @SerializedName("tags") + @JsonProperty("tags") protected Set tags = new HashSet<>(); - @SerializedName("additional_info") + @JsonProperty("additional_info") protected String additionalInfo; - @SerializedName("cluster_name") + @JsonProperty("cluster_name") protected String clusterName; - @SerializedName("zone_name") + @JsonProperty("zone_name") protected String zoneName; - @SerializedName("policy_version") + @JsonProperty("policy_version") protected Long policyVersion; public AuthzAuditEvent() { diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/model/EnumRepositoryType.java b/auth-audits/src/main/java/org/apache/atlas/audit/model/EnumRepositoryType.java deleted file mode 100644 index 4deeaccc8f..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/model/EnumRepositoryType.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.atlas.audit.model; - -public final class EnumRepositoryType { - - public static final int HDFS = 1; - - public static final int HBASE = 2; - - public static final int HIVE = 3; - - public static final int XAAGENT = 4; - - public static final int KNOX = 5; - - public static final int STORM = 6; - - -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditMessageException.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditMessageException.java deleted file mode 100644 index 041d443e7e..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditMessageException.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.provider; - -/** - * This exception should be thrown only when there is an error in the message - * itself. E.g. invalid field type, etc. Don't throw this exception if there is - * a transient error - */ -public class AuditMessageException extends Exception { - - private static final long serialVersionUID = 1L; - - public AuditMessageException() { - } - - /** - * @param message - */ - public AuditMessageException(String message) { - super(message); - } - - /** - * @param cause - */ - public AuditMessageException(Throwable cause) { - super(cause); - } - - /** - * @param message - * @param cause - */ - public AuditMessageException(String message, Throwable cause) { - super(message, cause); - } - - /** - * @param message - * @param cause - * @param enableSuppression - * @param writableStackTrace - */ - public AuditMessageException(String message, Throwable cause, - boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditProviderFactory.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditProviderFactory.java index c5b04a760c..b3e0619a17 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditProviderFactory.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditProviderFactory.java @@ -20,9 +20,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.ShutdownHookManager; import org.apache.atlas.audit.destination.*; -import org.apache.atlas.audit.provider.hdfs.HdfsAuditProvider; import org.apache.atlas.audit.queue.AuditAsyncQueue; import org.apache.atlas.audit.queue.AuditBatchQueue; import org.apache.atlas.audit.queue.AuditFileQueue; @@ -130,15 +128,6 @@ public synchronized void init(Properties props, String appType) { return; } - boolean isAuditToHdfsEnabled = MiscUtil.getBooleanProperty(props, - AUDIT_HDFS_IS_ENABLED_PROP, false); - boolean isAuditToLog4jEnabled = MiscUtil.getBooleanProperty(props, - AUDIT_LOG4J_IS_ENABLED_PROP, false); - boolean isAuditToKafkaEnabled = MiscUtil.getBooleanProperty(props, - AUDIT_KAFKA_IS_ENABLED_PROP, false); - boolean isAuditToSolrEnabled = MiscUtil.getBooleanProperty(props, - AUDIT_SOLR_IS_ENABLED_PROP, false); - boolean isAuditFileCacheProviderEnabled = MiscUtil.getBooleanProperty(props, AUDIT_IS_FILE_CACHE_PROVIDER_ENABLE_PROP, false); @@ -154,11 +143,9 @@ public synchronized void init(Properties props, String appType) { // Process new audit configurations List destNameList = new ArrayList(); - for (Object propNameObj : props.keySet()) { + for (Object propNameObj : props.keySet().stream().filter(x-> x.toString().startsWith(AUDIT_DEST_BASE)).toArray()) { String propName = propNameObj.toString(); - if (!propName.startsWith(AUDIT_DEST_BASE)) { - continue; - } + String destName = propName.substring(AUDIT_DEST_BASE.length() + 1); List splits = MiscUtil.toArray(destName, "."); if (splits.size() > 1) { @@ -279,10 +266,7 @@ public synchronized void init(Properties props, String appType) { } } else { LOG.info("No v3 audit configuration found. Trying v2 audit configurations"); - if (!isEnabled - || !(isAuditToHdfsEnabled - || isAuditToKafkaEnabled || isAuditToLog4jEnabled - || isAuditToSolrEnabled || providers.size() == 0)) { + if (!isEnabled || providers.size() == 0) { LOG.info("AuditProviderFactory: Audit not enabled.."); mProvider = getDefaultProvider(); @@ -290,75 +274,6 @@ public synchronized void init(Properties props, String appType) { return; } - if (isAuditToHdfsEnabled) { - LOG.info("HdfsAuditProvider is enabled"); - - HdfsAuditProvider hdfsProvider = new HdfsAuditProvider(); - - boolean isAuditToHdfsAsync = MiscUtil.getBooleanProperty(props, - HdfsAuditProvider.AUDIT_HDFS_IS_ASYNC_PROP, false); - - if (isAuditToHdfsAsync) { - int maxQueueSize = MiscUtil.getIntProperty(props, - HdfsAuditProvider.AUDIT_HDFS_MAX_QUEUE_SIZE_PROP, - AUDIT_ASYNC_MAX_QUEUE_SIZE_DEFAULT); - int maxFlushInterval = MiscUtil - .getIntProperty( - props, - HdfsAuditProvider.AUDIT_HDFS_MAX_FLUSH_INTERVAL_PROP, - AUDIT_ASYNC_MAX_FLUSH_INTERVAL_DEFAULT); - - AsyncAuditProvider asyncProvider = new AsyncAuditProvider( - "HdfsAuditProvider", maxQueueSize, - maxFlushInterval, hdfsProvider); - - providers.add(asyncProvider); - } else { - providers.add(hdfsProvider); - } - } - - /*if (isAuditToKafkaEnabled) { - LOG.info("KafkaAuditProvider is enabled"); - KafkaAuditProvider kafkaProvider = new KafkaAuditProvider(); - kafkaProvider.init(props); - - if (kafkaProvider.isAsync()) { - AsyncAuditProvider asyncProvider = new AsyncAuditProvider( - "MyKafkaAuditProvider", 1000, 1000, kafkaProvider); - providers.add(asyncProvider); - } else { - providers.add(kafkaProvider); - } - }*/ - - - if (isAuditToLog4jEnabled) { - Log4jAuditProvider log4jProvider = new Log4jAuditProvider(); - - boolean isAuditToLog4jAsync = MiscUtil.getBooleanProperty( - props, Log4jAuditProvider.AUDIT_LOG4J_IS_ASYNC_PROP, - false); - - if (isAuditToLog4jAsync) { - int maxQueueSize = MiscUtil.getIntProperty(props, - Log4jAuditProvider.AUDIT_LOG4J_MAX_QUEUE_SIZE_PROP, - AUDIT_ASYNC_MAX_QUEUE_SIZE_DEFAULT); - int maxFlushInterval = MiscUtil - .getIntProperty( - props, - Log4jAuditProvider.AUDIT_LOG4J_MAX_FLUSH_INTERVAL_PROP, - AUDIT_ASYNC_MAX_FLUSH_INTERVAL_DEFAULT); - - AsyncAuditProvider asyncProvider = new AsyncAuditProvider( - "Log4jAuditProvider", maxQueueSize, - maxFlushInterval, log4jProvider); - - providers.add(asyncProvider); - } else { - providers.add(log4jProvider); - } - } if (providers.size() == 0) { mProvider = getDefaultProvider(); } else if (providers.size() == 1) { @@ -400,16 +315,8 @@ private AuditHandler getProviderFromConfig(Properties props, + ", propertyPrefix=" + propPrefix, e); } } else { - if (providerName.equalsIgnoreCase("file")) { - provider = new FileAuditDestination(); - } else if (providerName.equalsIgnoreCase("hdfs")) { - provider = new HDFSAuditDestination(); - } else if (providerName.equalsIgnoreCase("elasticsearch")) { + if (providerName.equalsIgnoreCase("elasticsearch")) { provider = new ElasticSearchAuditDestination(); - } /*else if (providerName.equalsIgnoreCase("kafka")) { - provider = new KafkaAuditProvider(); - }*/ else if (providerName.equalsIgnoreCase("log4j")) { - provider = new Log4JAuditDestination(); } else if (providerName.equalsIgnoreCase("batch")) { provider = getAuditProvider(props, propPrefix, consumer); } else if (providerName.equalsIgnoreCase("async")) { @@ -462,7 +369,7 @@ private void installJvmSutdownHook(Properties props) { jvmShutdownHook = new JVMShutdownHook(mProvider, shutdownHookMaxWaitSeconds); String appType = this.componentAppType; if (appType != null && !hbaseAppTypes.contains(appType)) { - ShutdownHookManager.get().addShutdownHook(jvmShutdownHook, RANGER_AUDIT_SHUTDOWN_HOOK_PRIORITY); + Runtime.getRuntime().addShutdownHook(jvmShutdownHook); } } diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditWriterFactory.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditWriterFactory.java deleted file mode 100644 index b9a98d5b4e..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/AuditWriterFactory.java +++ /dev/null @@ -1,117 +0,0 @@ -package org.apache.atlas.audit.provider; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.utils.RangerAuditWriter; - -import java.util.Map; -import java.util.Properties; - -public class AuditWriterFactory { - private static final Log logger = LogFactory.getLog(AuditWriterFactory.class); - public static final String AUDIT_FILETYPE_DEFAULT = "json"; - public static final String AUDIT_JSON_FILEWRITER_IMPL = "org.apache.ranger.audit.utils.RangerJSONAuditWriter"; - public static final String AUDIT_ORC_FILEWRITER_IMPL = "org.apache.ranger.audit.utils.RangerORCAuditWriter"; - - public Map auditConfigs = null; - public Properties props = null; - public String propPrefix = null; - public String auditProviderName = null; - public RangerAuditWriter auditWriter = null; - private static volatile AuditWriterFactory me = null; - - public static AuditWriterFactory getInstance() { - AuditWriterFactory auditWriter = me; - if (auditWriter == null) { - synchronized (AuditWriterFactory.class) { - auditWriter = me; - if (auditWriter == null) { - me = auditWriter = new AuditWriterFactory(); - } - } - } - return auditWriter; - } - - public void init(Properties props, String propPrefix, String auditProviderName, Map auditConfigs) throws Exception { - if (logger.isDebugEnabled()) { - logger.debug("==> AuditWriterFactory.init()"); - } - this.props = props; - this.propPrefix = propPrefix; - this.auditProviderName = auditProviderName; - this.auditConfigs = auditConfigs; - String auditFileType = MiscUtil.getStringProperty(props, propPrefix + ".filetype", AUDIT_FILETYPE_DEFAULT); - String writerClass = MiscUtil.getStringProperty(props, propPrefix + ".filewriter.impl"); - - auditWriter = StringUtils.isEmpty(writerClass) ? createWriter(getDefaultWriter(auditFileType)) : createWriter(writerClass); - - if (auditWriter != null) { - auditWriter.init(props, propPrefix, auditProviderName, auditConfigs); - } - - if (logger.isDebugEnabled()) { - logger.debug("<== AuditWriterFactory.init() :" + auditWriter.getClass().getName()); - } - } - - public RangerAuditWriter createWriter(String writerClass) throws Exception { - if (logger.isDebugEnabled()) { - logger.debug("==> AuditWriterFactory.createWriter()"); - } - RangerAuditWriter ret = null; - try { - Class cls = (Class) Class.forName(writerClass); - ret = cls.newInstance(); - } catch (Exception e) { - throw e; - } - if (logger.isDebugEnabled()) { - logger.debug("<== AuditWriterFactory.createWriter()"); - } - return ret; - } - - public String getDefaultWriter(String auditFileType) { - if (logger.isDebugEnabled()) { - logger.debug("==> AuditWriterFactory.getDefaultWriter()"); - } - String ret = null; - switch (auditFileType) { - case "orc": - ret = AUDIT_ORC_FILEWRITER_IMPL; - break; - case "json": - ret = AUDIT_JSON_FILEWRITER_IMPL; - break; - } - if (logger.isDebugEnabled()) { - logger.debug("<== AuditWriterFactory.getDefaultWriter() :" + ret); - } - return ret; - } - - public RangerAuditWriter getAuditWriter(){ - return this.auditWriter; - } -} \ No newline at end of file diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/BaseAuditHandler.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/BaseAuditHandler.java index 2cf7a43f46..f68d96f7a3 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/BaseAuditHandler.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/provider/BaseAuditHandler.java @@ -18,14 +18,11 @@ */ package org.apache.atlas.audit.provider; -import com.google.gson.GsonBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.atlas.audit.model.AuditEventBase; import org.apache.atlas.audit.model.AuthzAuditEvent; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.TrustManagerFactory; import java.io.File; import java.util.ArrayList; import java.util.Collection; @@ -41,22 +38,6 @@ public abstract class BaseAuditHandler implements AuditHandler { static final String AUDIT_LOG_FAILURE_REPORT_MIN_INTERVAL_PROP = "xasecure.audit.log.failure.report.min.interval.ms"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE = "xasecure.policymgr.clientssl.keystore"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE = "xasecure.policymgr.clientssl.keystore.type"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.keystore.credential.file"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_CREDENTIAL_ALIAS = "sslKeyStore"; - public static final String RANGER_POLICYMGR_CLIENT_KEY_FILE_TYPE_DEFAULT = "jks"; - - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE = "xasecure.policymgr.clientssl.truststore"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE = "xasecure.policymgr.clientssl.truststore.type"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL = "xasecure.policymgr.clientssl.truststore.credential.file"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_CREDENTIAL_ALIAS = "sslTrustStore"; - public static final String RANGER_POLICYMGR_TRUSTSTORE_FILE_TYPE_DEFAULT = "jks"; - - public static final String RANGER_SSL_KEYMANAGER_ALGO_TYPE = KeyManagerFactory.getDefaultAlgorithm(); - public static final String RANGER_SSL_TRUSTMANAGER_ALGO_TYPE = TrustManagerFactory.getDefaultAlgorithm(); - public static final String RANGER_SSL_CONTEXT_ALGO_TYPE = "TLS"; - public static final String PROP_CONFIG = "config"; private int mLogFailureReportMinIntervalInMs = 60 * 1000; @@ -75,9 +56,6 @@ public abstract class BaseAuditHandler implements AuditHandler { protected String providerName = null; protected String parentPath = null; - protected int failedRetryTimes = 3; - protected int failedRetrySleep = 3 * 1000; - int errorLogIntervalMS = 30 * 1000; // Every 30 seconds long lastErrorLogMS = 0; @@ -130,14 +108,6 @@ public void init(Properties props, String basePropertyName) { } LOG.info("providerName=" + getName()); - try { - new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").create(); - } catch (Throwable excp) { - LOG.warn( - "Log4jAuditProvider.init(): failed to create GsonBuilder object. events will be formated using toString(), instead of Json", - excp); - } - mLogFailureReportMinIntervalInMs = MiscUtil.getIntProperty(props, AUDIT_LOG_FAILURE_REPORT_MIN_INTERVAL_PROP, 60 * 1000); diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/BufferedAuditProvider.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/BufferedAuditProvider.java deleted file mode 100644 index eeee82b3a3..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/BufferedAuditProvider.java +++ /dev/null @@ -1,120 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.audit.provider; - -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.model.AuthzAuditEvent; - -import java.util.Collection; - -public abstract class BufferedAuditProvider extends BaseAuditHandler { - private LogBuffer mBuffer = null; - private LogDestination mDestination = null; - - @Override - public boolean log(AuditEventBase event) { - if (event instanceof AuthzAuditEvent) { - AuthzAuditEvent authzEvent = (AuthzAuditEvent) event; - - if (authzEvent.getAgentHostname() == null) { - authzEvent.setAgentHostname(MiscUtil.getHostname()); - } - - if (authzEvent.getLogType() == null) { - authzEvent.setLogType("RangerAudit"); - } - - if (authzEvent.getEventId() == null) { - authzEvent.setEventId(MiscUtil.generateUniqueId()); - } - } - - if (!mBuffer.add(event)) { - logFailedEvent(event); - return false; - } - return true; - } - - @Override - public boolean log(Collection events) { - boolean ret = true; - for (AuditEventBase event : events) { - ret = log(event); - if (!ret) { - break; - } - } - return ret; - } - - @Override - public boolean logJSON(String event) { - AuditEventBase eventObj = MiscUtil.fromJson(event, - AuthzAuditEvent.class); - return log(eventObj); - } - - @Override - public boolean logJSON(Collection events) { - boolean ret = true; - for (String event : events) { - ret = logJSON(event); - if (!ret) { - break; - } - } - return ret; - } - - @Override - public void start() { - mBuffer.start(mDestination); - } - - @Override - public void stop() { - mBuffer.stop(); - } - - @Override - public void waitToComplete() { - } - - @Override - public void waitToComplete(long timeout) { - } - - @Override - public void flush() { - } - - protected LogBuffer getBuffer() { - return mBuffer; - } - - protected LogDestination getDestination() { - return mDestination; - } - - protected void setBufferAndDestination(LogBuffer buffer, - LogDestination destination) { - mBuffer = buffer; - mDestination = destination; - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/DebugTracer.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/DebugTracer.java deleted file mode 100644 index 914d56b638..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/DebugTracer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.audit.provider; - -public interface DebugTracer { - void debug(String msg); - void debug(String msg, Throwable excp); - void info(String msg); - void info(String msg, Throwable excp); - void warn(String msg); - void warn(String msg, Throwable excp); - void error(String msg); - void error(String msg, Throwable excp); -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LocalFileLogBuffer.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/LocalFileLogBuffer.java deleted file mode 100644 index 291203ea5d..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LocalFileLogBuffer.java +++ /dev/null @@ -1,695 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.atlas.audit.provider; - -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileFilter; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.io.Writer; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.Comparator; -import java.util.TreeSet; - - -public class LocalFileLogBuffer implements LogBuffer { - private String mDirectory = null; - private String mFile = null; - private int mFlushIntervalSeconds = 1 * 60; - private int mFileBufferSizeBytes = 8 * 1024; - private String mEncoding = null; - private boolean mIsAppend = true; - private int mRolloverIntervalSeconds = 10 * 60; - private String mArchiveDirectory = null; - private int mArchiveFileCount = 10; - private DebugTracer mLogger = null; - - private Writer mWriter = null; - private String mBufferFilename = null; - private long mNextRolloverTime = 0; - private long mNextFlushTime = 0; - private int mFileOpenRetryIntervalInMs = 60 * 1000; - private long mNextFileOpenRetryTime = 0; - - private DestinationDispatcherThread mDispatcherThread = null; - - public LocalFileLogBuffer(DebugTracer tracer) { - mLogger = tracer; - } - - public String getDirectory() { - return mDirectory; - } - - public void setDirectory(String directory) { - mDirectory = directory; - } - - public String getFile() { - return mFile; - } - - public void setFile(String file) { - mFile = file; - } - - public int getFileBufferSizeBytes() { - return mFileBufferSizeBytes; - } - - public void setFileBufferSizeBytes(int fileBufferSizeBytes) { - mFileBufferSizeBytes = fileBufferSizeBytes; - } - - public int getFlushIntervalSeconds() { - return mFlushIntervalSeconds; - } - - public void setFlushIntervalSeconds(int flushIntervalSeconds) { - mFlushIntervalSeconds = flushIntervalSeconds; - } - - public String getEncoding() { - return mEncoding; - } - - public void setEncoding(String encoding) { - mEncoding = encoding; - } - - public boolean getIsAppend() { - return mIsAppend; - } - - public void setIsAppend(boolean isAppend) { - mIsAppend = isAppend; - } - - public int getRolloverIntervalSeconds() { - return mRolloverIntervalSeconds; - } - - public void setRolloverIntervalSeconds(int rolloverIntervalSeconds) { - mRolloverIntervalSeconds = rolloverIntervalSeconds; - } - - public String getArchiveDirectory() { - return mArchiveDirectory; - } - - public void setArchiveDirectory(String archiveDirectory) { - mArchiveDirectory = archiveDirectory; - } - - public int getArchiveFileCount() { - return mArchiveFileCount; - } - - public void setArchiveFileCount(int archiveFileCount) { - mArchiveFileCount = archiveFileCount; - } - - - @Override - public void start(LogDestination destination) { - mLogger.debug("==> LocalFileLogBuffer.start()"); - - mDispatcherThread = new DestinationDispatcherThread(this, destination, mLogger); - - mDispatcherThread.setDaemon(true); - - mDispatcherThread.start(); - - mLogger.debug("<== LocalFileLogBuffer.start()"); - } - - @Override - public void stop() { - mLogger.debug("==> LocalFileLogBuffer.stop()"); - - DestinationDispatcherThread dispatcherThread = mDispatcherThread; - mDispatcherThread = null; - - if(dispatcherThread != null && dispatcherThread.isAlive()) { - dispatcherThread.stopThread(); - - try { - dispatcherThread.join(); - } catch (InterruptedException e) { - mLogger.warn("LocalFileLogBuffer.stop(): failed in waiting for DispatcherThread", e); - } - } - - closeFile(); - - mLogger.debug("<== LocalFileLogBuffer.stop()"); - } - - @Override - public boolean isAvailable() { - return mWriter != null; - } - - @Override - public boolean add(T log) { - boolean ret = false; - - String msg = MiscUtil.stringify(log); - - if(msg.contains(MiscUtil.LINE_SEPARATOR)) { - msg = msg.replace(MiscUtil.LINE_SEPARATOR, MiscUtil.ESCAPE_STR + MiscUtil.LINE_SEPARATOR); - } - - synchronized(this) { - checkFileStatus(); - - Writer writer = mWriter; - - if(writer != null) { - try { - writer.write(msg + MiscUtil.LINE_SEPARATOR); - - if(mFileBufferSizeBytes == 0) { - writer.flush(); - } - - ret = true; - } catch(IOException excp) { - mLogger.warn("LocalFileLogBuffer.add(): write failed", excp); - - closeFile(); - } - } - } - - return ret; - } - - @Override - public boolean isEmpty() { - return mDispatcherThread == null || mDispatcherThread.isIdle(); - } - - private synchronized void openFile() { - mLogger.debug("==> LocalFileLogBuffer.openFile()"); - - long now = System.currentTimeMillis(); - - closeFile(); - - if(mNextFileOpenRetryTime <= now) { - try { - mNextRolloverTime = MiscUtil.getNextRolloverTime(mNextRolloverTime, (mRolloverIntervalSeconds * 1000L)); - - long startTime = MiscUtil.getRolloverStartTime(mNextRolloverTime, (mRolloverIntervalSeconds * 1000L)); - - mBufferFilename = MiscUtil.replaceTokens(mDirectory + File.separator + mFile, startTime); - - MiscUtil.createParents(new File(mBufferFilename)); - - FileOutputStream ostream = null; - try { - ostream = new FileOutputStream(mBufferFilename, mIsAppend); - } catch(Exception excp) { - mLogger.warn("LocalFileLogBuffer.openFile(): failed to open file " + mBufferFilename, excp); - } - - if(ostream != null) { - mWriter = createWriter(ostream); - - if(mWriter != null) { - mLogger.debug("LocalFileLogBuffer.openFile(): opened file " + mBufferFilename); - - mNextFlushTime = System.currentTimeMillis() + (mFlushIntervalSeconds * 1000L); - } else { - mLogger.warn("LocalFileLogBuffer.openFile(): failed to open file for write " + mBufferFilename); - - mBufferFilename = null; - } - } - } finally { - if(mWriter == null) { - mNextFileOpenRetryTime = now + mFileOpenRetryIntervalInMs; - } - } - } - - mLogger.debug("<== LocalFileLogBuffer.openFile()"); - } - - private synchronized void closeFile() { - mLogger.debug("==> LocalFileLogBuffer.closeFile()"); - - Writer writer = mWriter; - - mWriter = null; - - if(writer != null) { - try { - writer.flush(); - writer.close(); - } catch(IOException excp) { - mLogger.warn("LocalFileLogBuffer: failed to close file " + mBufferFilename, excp); - } - - if(mDispatcherThread != null) { - mDispatcherThread.addLogfile(mBufferFilename); - } - } - - mLogger.debug("<== LocalFileLogBuffer.closeFile()"); - } - - private void rollover() { - mLogger.debug("==> LocalFileLogBuffer.rollover()"); - - closeFile(); - - openFile(); - - mLogger.debug("<== LocalFileLogBuffer.rollover()"); - } - - private void checkFileStatus() { - long now = System.currentTimeMillis(); - - if(now > mNextRolloverTime) { - rollover(); - } else if(mWriter == null) { - openFile(); - } else if(now > mNextFlushTime) { - try { - mNextFlushTime = now + (mFlushIntervalSeconds * 1000L); - - mWriter.flush(); - } catch (IOException excp) { - mLogger.warn("LocalFileLogBuffer: failed to flush to file " + mBufferFilename, excp); - } - } - } - - private Writer createWriter(OutputStream os ) { - Writer writer = null; - - if(os != null) { - if(mEncoding != null) { - try { - writer = new OutputStreamWriter(os, mEncoding); - } catch(UnsupportedEncodingException excp) { - mLogger.warn("LocalFileLogBuffer: failed to create output writer for file " + mBufferFilename, excp); - } - } - - if(writer == null) { - writer = new OutputStreamWriter(os); - } - - if(mFileBufferSizeBytes > 0 && writer != null) { - writer = new BufferedWriter(writer, mFileBufferSizeBytes); - } - } - - return writer; - } - - boolean isCurrentFilename(String filename) { - return filename != null && filename.equals(mBufferFilename); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - - sb.append("LocalFileLogBuffer {"); - sb.append("Directory=").append(mDirectory).append("; "); - sb.append("File=").append(mFile).append("; "); - sb.append("RolloverIntervaSeconds=").append(mRolloverIntervalSeconds).append("; "); - sb.append("ArchiveDirectory=").append(mArchiveDirectory).append("; "); - sb.append("ArchiveFileCount=").append(mArchiveFileCount); - sb.append("}"); - - return sb.toString(); - } - -} - -class DestinationDispatcherThread extends Thread { - private TreeSet mCompletedLogfiles = new TreeSet(); - private boolean mStopThread = false; - private LocalFileLogBuffer mFileLogBuffer = null; - private LogDestination mDestination = null; - private DebugTracer mLogger = null; - - private String mCurrentLogfile = null; - - public DestinationDispatcherThread(LocalFileLogBuffer fileLogBuffer, LogDestination destination, DebugTracer tracer) { - super(DestinationDispatcherThread.class.getSimpleName() + "-" + System.currentTimeMillis()); - - mLogger = tracer; - - mFileLogBuffer = fileLogBuffer; - mDestination = destination; - - setDaemon(true); - } - - public void addLogfile(String filename) { - mLogger.debug("==> DestinationDispatcherThread.addLogfile(" + filename + ")"); - - if(filename != null) { - synchronized(mCompletedLogfiles) { - mCompletedLogfiles.add(filename); - mCompletedLogfiles.notify(); - } - } - - mLogger.debug("<== DestinationDispatcherThread.addLogfile(" + filename + ")"); - } - - public void stopThread() { - mStopThread = true; - } - - public boolean isIdle() { - synchronized(mCompletedLogfiles) { - return mCompletedLogfiles.isEmpty() && mCurrentLogfile == null; - } - } - - @Override - public void run() { - UserGroupInformation loginUser = null; - - try { - loginUser = UserGroupInformation.getLoginUser(); - } catch (IOException excp) { - mLogger.error("DestinationDispatcherThread.run(): failed to get login user details. Audit files will not be sent to HDFS destination", excp); - } - - if(loginUser == null) { - mLogger.error("DestinationDispatcherThread.run(): failed to get login user. Audit files will not be sent to HDFS destination"); - - return; - } - - loginUser.doAs(new PrivilegedAction() { - @Override - public Integer run() { - doRun(); - - return 0; - } - }); - } - - private void doRun() { - init(); - - mDestination.start(); - - long pollIntervalInMs = 1000L; - - while(! mStopThread) { - synchronized(mCompletedLogfiles) { - while(mCompletedLogfiles.isEmpty() && !mStopThread) { - try { - mCompletedLogfiles.wait(pollIntervalInMs); - } catch(InterruptedException excp) { - throw new RuntimeException("DestinationDispatcherThread.run(): failed to wait for log file", excp); - } - } - - mCurrentLogfile = mCompletedLogfiles.pollFirst(); - } - - if(mCurrentLogfile != null) { - sendCurrentFile(); - } - } - - mDestination.stop(); - } - - private void init() { - mLogger.debug("==> DestinationDispatcherThread.init()"); - - String dirName = MiscUtil.replaceTokens(mFileLogBuffer.getDirectory(), 0); - - if(dirName != null) { - File directory = new File(dirName); - - if(directory.exists() && directory.isDirectory()) { - File[] files = directory.listFiles(); - - if(files != null) { - for(File file : files) { - if(file.exists() && file.isFile() && file.canRead()) { - String filename = file.getAbsolutePath(); - if(! mFileLogBuffer.isCurrentFilename(filename)) { - addLogfile(filename); - } - } - } - } - } - } - - mLogger.debug("<== DestinationDispatcherThread.init()"); - } - - private boolean sendCurrentFile() { - mLogger.debug("==> DestinationDispatcherThread.sendCurrentFile()"); - - boolean ret = false; - - long destinationPollIntervalInMs = 1000L; - - BufferedReader reader = openCurrentFile(); - try { - while(!mStopThread) { - String log = getNextStringifiedLog(reader); - - if(log == null) { // reached end-of-file - ret = true; - - break; - } - - try { - // loop until log is sent successfully - while(!mStopThread && !mDestination.sendStringified(log)) { - try { - Thread.sleep(destinationPollIntervalInMs); - } catch(InterruptedException excp) { - throw new RuntimeException("LocalFileLogBuffer.sendCurrentFile(" + mCurrentLogfile + "): failed while waiting for destination to be available", excp); - } - } - } catch ( AuditMessageException msgError) { - mLogger.error("Error in log message:" + log); - //If there is error in log message, then it will be skipped - } - } - } finally { - closeCurrentFile(reader); - } - - if(!mStopThread) { - mDestination.flush(); - archiveCurrentFile(); - } - - mLogger.debug("<== DestinationDispatcherThread.sendCurrentFile()"); - - return ret; - } - - private String getNextStringifiedLog(BufferedReader mReader) { - String log = null; - - if(mReader != null) { - try { - while(true) { - String line = mReader.readLine(); - - if(line == null) { // reached end-of-file - break; - } - - if(line.endsWith(MiscUtil.ESCAPE_STR)) { - line = line.substring(0, line.length() - MiscUtil.ESCAPE_STR.length()); - - if(log == null) { - log = line; - } else { - log += MiscUtil.LINE_SEPARATOR; - log += line; - } - - continue; - } else { - if(log == null) { - log = line; - } else { - log += line; - } - break; - } - } - } catch (IOException excp) { - mLogger.warn("getNextStringifiedLog.getNextLog(): failed to read from file " + mCurrentLogfile, excp); - } - } - - return log; - } - - private BufferedReader openCurrentFile() { - mLogger.debug("==> openCurrentFile(" + mCurrentLogfile + ")"); - BufferedReader mReader = null; - - if(mCurrentLogfile != null) { - try { - FileInputStream inStr = new FileInputStream(mCurrentLogfile); - - InputStreamReader strReader = createReader(inStr); - - if(strReader != null) { - mReader = new BufferedReader(strReader); - } - } catch(FileNotFoundException excp) { - mLogger.warn("openNextFile(): error while opening file " + mCurrentLogfile, excp); - } - } - - mLogger.debug("<== openCurrentFile(" + mCurrentLogfile + ")"); - return mReader; - } - - private void closeCurrentFile(BufferedReader mReader) { - mLogger.debug("==> closeCurrentFile(" + mCurrentLogfile + ")"); - - if(mReader != null) { - try { - mReader.close(); - } catch(IOException excp) { - // ignore - } - } - - mLogger.debug("<== closeCurrentFile(" + mCurrentLogfile + ")"); - } - - private void archiveCurrentFile() { - if(mCurrentLogfile != null) { - File logFile = new File(mCurrentLogfile); - String archiveDirName = MiscUtil.replaceTokens(mFileLogBuffer.getArchiveDirectory(), 0); - String archiveFilename = archiveDirName + File.separator +logFile.getName(); - - try { - if(logFile.exists()) { - File archiveFile = new File(archiveFilename); - - MiscUtil.createParents(archiveFile); - - if(! logFile.renameTo(archiveFile)) { - // TODO: renameTo() does not work in all cases. in case of failure, copy the file contents to the destination and delete the file - mLogger.warn("archiving failed to move file: " + mCurrentLogfile + " ==> " + archiveFilename); - } - - File archiveDir = new File(archiveDirName); - File[] files = archiveDir.listFiles(new FileFilter() { - @Override - public boolean accept(File f) { - return f.isFile(); - } - }); - - int numOfFilesToDelete = files == null ? 0 : (files.length - mFileLogBuffer.getArchiveFileCount()); - - if(numOfFilesToDelete > 0) { - Arrays.sort(files, new Comparator() { - @Override - public int compare(File f1, File f2) { - return (int)(f1.lastModified() - f2.lastModified()); - } - }); - - for(int i = 0; i < numOfFilesToDelete; i++) { - if(! files[i].delete()) { - mLogger.warn("archiving failed to delete file: " + files[i].getAbsolutePath()); - } - } - } - } - } catch(Exception excp) { - mLogger.warn("archiveCurrentFile(): faile to move " + mCurrentLogfile + " to archive location " + archiveFilename, excp); - } - } - mCurrentLogfile = null; - } - - private InputStreamReader createReader(InputStream iStr) { - InputStreamReader reader = null; - - if(iStr != null) { - String encoding = mFileLogBuffer.getEncoding(); - - if(encoding != null) { - try { - reader = new InputStreamReader(iStr, encoding); - } catch(UnsupportedEncodingException excp) { - mLogger.warn("createReader(): failed to create input reader.", excp); - } - } - - if(reader == null) { - reader = new InputStreamReader(iStr); - } - } - - return reader; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - - sb.append("DestinationDispatcherThread {"); - sb.append("ThreadName=").append(this.getName()).append("; "); - sb.append("CompletedLogfiles.size()=").append(mCompletedLogfiles.size()).append("; "); - sb.append("StopThread=").append(mStopThread).append("; "); - sb.append("CurrentLogfile=").append(mCurrentLogfile); - sb.append("}"); - - return sb.toString(); - } -} - diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jAuditProvider.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jAuditProvider.java deleted file mode 100644 index 97a8a0d246..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jAuditProvider.java +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.audit.provider; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.destination.AuditDestination; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.model.AuthzAuditEvent; - -import java.util.Collection; -import java.util.Properties; - - -public class Log4jAuditProvider extends AuditDestination { - - private static final Log LOG = LogFactory.getLog(Log4jAuditProvider.class); - private static final Log AUDITLOG = LogFactory.getLog("xaaudit." + Log4jAuditProvider.class.getName()); - - public static final String AUDIT_LOG4J_IS_ASYNC_PROP = "xasecure.audit.log4j.is.async"; - public static final String AUDIT_LOG4J_MAX_QUEUE_SIZE_PROP = "xasecure.audit.log4j.async.max.queue.size"; - public static final String AUDIT_LOG4J_MAX_FLUSH_INTERVAL_PROP = "xasecure.audit.log4j.async.max.flush.interval.ms"; - - - public Log4jAuditProvider() { - LOG.info("Log4jAuditProvider: creating.."); - } - - @Override - public void init(Properties props) { - LOG.info("Log4jAuditProvider.init()"); - - super.init(props); - } - - @Override - public boolean log(AuditEventBase event) { - if(! AUDITLOG.isInfoEnabled()) - return true; - - if(event != null) { - String eventStr = MiscUtil.stringify(event); - AUDITLOG.info(eventStr); - } - return true; - } - - @Override - public boolean log(Collection events) { - for (AuditEventBase event : events) { - log(event); - } - return true; - } - - @Override - public boolean logJSON(String event) { - AuditEventBase eventObj = MiscUtil.fromJson(event, - AuthzAuditEvent.class); - return log(eventObj); - } - - @Override - public boolean logJSON(Collection events) { - for (String event : events) { - logJSON(event); - } - return true; - } - - @Override - public void start() { - // intentionally left empty - } - - @Override - public void stop() { - // intentionally left empty - } - - - - -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jTracer.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jTracer.java deleted file mode 100644 index 32767287af..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/Log4jTracer.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.audit.provider; - -import org.apache.commons.logging.Log; - -public class Log4jTracer implements DebugTracer { - private Log mLogger = null; - - public Log4jTracer(Log logger) { - mLogger = logger; - } - - public void debug(String msg) { - mLogger.debug(msg); - } - - public void debug(String msg, Throwable excp) { - mLogger.debug(msg, excp); - } - - public void info(String msg) { - mLogger.info(msg); - } - - public void info(String msg, Throwable excp) { - mLogger.info(msg, excp); - } - - public void warn(String msg) { - mLogger.warn(msg); - } - - public void warn(String msg, Throwable excp) { - mLogger.warn(msg, excp); - } - - public void error(String msg) { - mLogger.error(msg); - } - - public void error(String msg, Throwable excp) { - mLogger.error(msg, excp); - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LogDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/LogDestination.java deleted file mode 100644 index 644200a27a..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/LogDestination.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.atlas.audit.provider; - -import org.apache.atlas.audit.model.AuditEventBase; - -public interface LogDestination { - void start(); - - void stop(); - - boolean isAvailable(); - - boolean send(AuditEventBase log) throws AuditMessageException; - - boolean send(AuditEventBase[] logs) throws AuditMessageException; - - boolean sendStringified(String log) throws AuditMessageException; - - boolean sendStringified(String[] logs) throws AuditMessageException; - - boolean flush(); - - /** - * Name for the destination - * - * @return - */ - String getName(); -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/MiscUtil.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/MiscUtil.java index 93974cb92b..38920904ff 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/MiscUtil.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/provider/MiscUtil.java @@ -16,50 +16,27 @@ */ package org.apache.atlas.audit.provider; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.authentication.util.KerberosUtil; -import org.apache.atlas.authorization.hadoop.utils.RangerCredentialProvider; - -import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; -import javax.security.auth.login.LoginContext; +import org.apache.atlas.audit.utils.AuthObjectUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; -import java.io.IOException; import java.net.InetAddress; import java.rmi.dgc.VMID; -import java.security.Principal; -import java.security.PrivilegedAction; -import java.security.PrivilegedExceptionAction; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Calendar; -import java.util.Collections; -import java.util.Date; -import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; -import java.util.Set; import java.util.StringTokenizer; -import java.util.TimeZone; import java.util.UUID; -import java.util.regex.Pattern; -import static org.apache.hadoop.util.PlatformName.IBM_JAVA; public class MiscUtil { - private static final Log logger = LogFactory.getLog(MiscUtil.class); + private static final Logger logger = LoggerFactory.getLogger(MiscUtil.class); public static final String TOKEN_START = "%"; public static final String TOKEN_END = "%"; @@ -69,34 +46,15 @@ public class MiscUtil { public static final String TOKEN_TIME = "time:"; public static final String TOKEN_PROPERTY = "property:"; public static final String TOKEN_ENV = "env:"; - public static final String ESCAPE_STR = "\\"; private static final VMID sJvmID = new VMID(); - public static String LINE_SEPARATOR = System.getProperty("line.separator"); - - private static Gson sGsonBuilder = null; private static String sApplicationType = null; - private static UserGroupInformation ugiLoginUser = null; - private static Subject subjectLoginUser = null; private static String local_hostname = null; private static Map logHistoryList = new Hashtable(); private static int logInterval = 30000; // 30 seconds - static { - try { - sGsonBuilder = new GsonBuilder().setDateFormat( - "yyyy-MM-dd HH:mm:ss.SSS").create(); - } catch (Throwable excp) { - logger.warn( - "failed to create GsonBuilder object. stringify() will return obj.toString(), instead of Json", - excp); - } - - initLocalHost(); - } - public static String replaceTokens(String str, long time) { if (str == null) { return str; @@ -309,10 +267,8 @@ public static String stringify(T log) { if (log != null) { if (log instanceof String) { ret = (String) log; - } else if (MiscUtil.sGsonBuilder != null) { - ret = MiscUtil.sGsonBuilder.toJson(log); } else { - ret = log.toString(); + ret = AuthObjectUtil.toJson(log); } } @@ -320,7 +276,7 @@ public static String stringify(T log) { } static public T fromJson(String jsonStr, Class clazz) { - return sGsonBuilder.fromJson(jsonStr, clazz); + return AuthObjectUtil.fromJson(jsonStr, clazz); } public static String getStringProperty(Properties props, String propName) { @@ -444,173 +400,7 @@ public static List toArray(String destListStr, String delim) { return list; } - public static String getCredentialString(String url, String alias) { - if (url != null && alias != null) { - return RangerCredentialProvider.getInstance() - .getCredentialString(url, alias); - } - return null; - } - - public static UserGroupInformation createUGIFromSubject(Subject subject) - throws IOException { - logger.info("SUBJECT " + (subject == null ? "not found" : "found")); - UserGroupInformation ugi = null; - if (subject != null) { - logger.info("SUBJECT.PRINCIPALS.size()=" - + subject.getPrincipals().size()); - Set principals = subject.getPrincipals(); - for (Principal principal : principals) { - logger.info("SUBJECT.PRINCIPAL.NAME=" + principal.getName()); - } - try { - // Do not remove the below statement. The default - // getLoginUser does some initialization which is needed - // for getUGIFromSubject() to work. - UserGroupInformation.getLoginUser(); - logger.info("Default UGI before using new Subject:" - + UserGroupInformation.getLoginUser()); - } catch (Throwable t) { - logger.error(t); - } - ugi = UserGroupInformation.getUGIFromSubject(subject); - logger.info("SUBJECT.UGI.NAME=" + ugi.getUserName() + ", ugi=" - + ugi); - } else { - logger.info("Server username is not available"); - } - return ugi; - } - - /** - * @param newUGI - * @param newSubject - */ - public static void setUGILoginUser(UserGroupInformation newUGI, - Subject newSubject) { - if (newUGI != null) { - UserGroupInformation.setLoginUser(newUGI); - ugiLoginUser = newUGI; - logger.info("Setting UGI=" + newUGI); - } else { - logger.error("UGI is null. Not setting it."); - } - if (newSubject != null) { - logger.info("Setting SUBJECT"); - subjectLoginUser = newSubject; - } - } - - public static UserGroupInformation getUGILoginUser() { - UserGroupInformation ret = ugiLoginUser; - - if (ret == null) { - try { - // Do not cache ugiLoginUser if it is not explicitly set with - // setUGILoginUser. - // It appears that the user represented by - // the returned object is periodically logged out and logged back - // in when the token is scheduled to expire. So it is better - // to get the user object every time from UserGroupInformation class and - // not cache it - ret = getLoginUser(); - } catch (IOException e) { - logger.error("Error getting UGI.", e); - } - } - - if(ret != null) { - try { - ret.checkTGTAndReloginFromKeytab(); - } catch(IOException ioe) { - logger.error("Error renewing TGT and relogin. Ignoring Exception, and continuing with the old TGT", ioe); - } - } - - return ret; - } - - /** - * Execute the {@link PrivilegedExceptionAction} on the {@link UserGroupInformation} if it's set, otherwise call it directly - */ - public static X executePrivilegedAction(final PrivilegedExceptionAction action) throws Exception { - final UserGroupInformation ugi = getUGILoginUser(); - if (ugi != null) { - return ugi.doAs(action); - } else { - return action.run(); - } - } - - /** - * Execute the {@link PrivilegedAction} on the {@link UserGroupInformation} if it's set, otherwise call it directly. - */ - public static X executePrivilegedAction(final PrivilegedAction action) { - final UserGroupInformation ugi = getUGILoginUser(); - if (ugi != null) { - return ugi.doAs(action); - } else { - return action.run(); - } - } - - public static Subject getSubjectLoginUser() { - return subjectLoginUser; - } - - public static String getKerberosNamesRules() { - return KerberosName.getRules(); - } - /** - * - * @param principal - * This could be in the format abc/host@domain.com - * @return - */ - static public String getShortNameFromPrincipalName(String principal) { - if (principal == null) { - return null; - } - try { - // Assuming it is kerberos name for now - KerberosName kerbrosName = new KerberosName(principal); - String userName = kerbrosName.getShortName(); - userName = StringUtils.substringBefore(userName, "/"); - userName = StringUtils.substringBefore(userName, "@"); - return userName; - } catch (Throwable t) { - logger.error("Error converting kerberos name. principal=" - + principal + ", KerberosName.rules=" + KerberosName.getRules()); - } - return principal; - } - - /** - * @param userName - * @return - */ - static public Set getGroupsForRequestUser(String userName) { - if (userName != null) { - try { - UserGroupInformation ugi = UserGroupInformation - .createRemoteUser(userName); - String[] groups = ugi.getGroupNames(); - if (groups != null && groups.length > 0) { - Set groupsSet = new java.util.HashSet(); - for (String group : groups) { - groupsSet.add(group); - } - return groupsSet; - } - } catch (Throwable e) { - logErrorMessageByInterval(logger, - "Error getting groups for users. userName=" + userName, e); - } - } - return Collections.emptySet(); - } - - static public boolean logErrorMessageByInterval(Log useLogger, + static public boolean logErrorMessageByInterval(Logger useLogger, String message) { return logErrorMessageByInterval(useLogger, message, null); } @@ -620,7 +410,7 @@ static public boolean logErrorMessageByInterval(Log useLogger, * @param message * @param e */ - static public boolean logErrorMessageByInterval(Log useLogger, + static public boolean logErrorMessageByInterval(Logger useLogger, String message, Throwable e) { if (message == null) { return false; @@ -652,198 +442,11 @@ static public boolean logErrorMessageByInterval(Log useLogger, } - public static void setUGIFromJAASConfig(String jaasConfigAppName) throws Exception { - String keytabFile = null; - String principal = null; - UserGroupInformation ugi = null; - if (logger.isDebugEnabled()){ - logger.debug("===> MiscUtil.setUGIFromJAASConfig() jaasConfigAppName: " + jaasConfigAppName); - } - try { - AppConfigurationEntry entries[] = Configuration.getConfiguration().getAppConfigurationEntry(jaasConfigAppName); - if(!ArrayUtils.isEmpty(entries)) { - for (AppConfigurationEntry entry : entries) { - if (entry.getOptions().get("keyTab") != null) { - keytabFile = (String) entry.getOptions().get("keyTab"); - } - if (entry.getOptions().get("principal") != null) { - principal = (String) entry.getOptions().get("principal"); - } - if (!StringUtils.isEmpty(principal) && !StringUtils.isEmpty(keytabFile)) { - break; - } - } - if (!StringUtils.isEmpty(principal) && !StringUtils.isEmpty(keytabFile)) { - // This will login and set the UGI - UserGroupInformation.loginUserFromKeytab(principal, keytabFile); - ugi = UserGroupInformation.getLoginUser(); - } else { - String error_mesage = "Unable to get the principal/keytab from jaasConfigAppName: " + jaasConfigAppName; - logger.error(error_mesage); - throw new Exception(error_mesage); - } - logger.info("MiscUtil.setUGIFromJAASConfig() UGI: " + ugi + " principal: " + principal + " keytab: " + keytabFile); - } else { - logger.warn("JAASConfig file not found! Ranger Plugin will not working in a Secure Cluster..."); - } - } catch ( Exception e) { - logger.error("Unable to set UGI for Principal: " + principal + " keytab: " + keytabFile ); - throw e; - } - if (logger.isDebugEnabled()) { - logger.debug("<=== MiscUtil.setUGIFromJAASConfig() jaasConfigAppName: " + jaasConfigAppName + " UGI: " + ugi + " principal: " + principal + " keytab: " + keytabFile); - } - } - - public static void authWithKerberos(String keytab, String principal, - String nameRules) { - - if (keytab == null || principal == null) { - return; - } - Subject serverSubject = new Subject(); - int successLoginCount = 0; - String[] spnegoPrincipals = null; - - try { - if (principal.equals("*")) { - spnegoPrincipals = KerberosUtil.getPrincipalNames(keytab, - Pattern.compile("HTTP/.*")); - if (spnegoPrincipals.length == 0) { - logger.error("No principals found in keytab=" + keytab); - } - } else { - spnegoPrincipals = new String[] { principal }; - } - - if (nameRules != null) { - KerberosName.setRules(nameRules); - } - - boolean useKeytab = true; - if (!useKeytab) { - logger.info("Creating UGI with subject"); - LoginContext loginContext = null; - List loginContexts = new ArrayList(); - for (String spnegoPrincipal : spnegoPrincipals) { - try { - logger.info("Login using keytab " + keytab - + ", for principal " + spnegoPrincipal); - final KerberosConfiguration kerberosConfiguration = new KerberosConfiguration( - keytab, spnegoPrincipal); - loginContext = new LoginContext("", - serverSubject, null, kerberosConfiguration); - loginContext.login(); - successLoginCount++; - logger.info("Login success keytab " + keytab - + ", for principal " + spnegoPrincipal); - loginContexts.add(loginContext); - } catch (Throwable t) { - logger.error("Login failed keytab " + keytab - + ", for principal " + spnegoPrincipal, t); - } - if (successLoginCount > 0) { - logger.info("Total login success count=" - + successLoginCount); - try { - UserGroupInformation - .loginUserFromSubject(serverSubject); - // UserGroupInformation ugi = - // createUGIFromSubject(serverSubject); - // if (ugi != null) { - // setUGILoginUser(ugi, serverSubject); - // } - } catch (Throwable e) { - logger.error("Error creating UGI from subject. subject=" - + serverSubject); - } finally { - if (loginContext != null) { - loginContext.logout(); - } - } - } else { - logger.error("Total logins were successfull from keytab=" - + keytab + ", principal=" + principal); - } - } - } else { - logger.info("Creating UGI from keytab directly. keytab=" - + keytab + ", principal=" + spnegoPrincipals[0]); - UserGroupInformation ugi = UserGroupInformation - .loginUserFromKeytabAndReturnUGI(spnegoPrincipals[0], - keytab); - MiscUtil.setUGILoginUser(ugi, null); - } - - } catch (Throwable t) { - logger.error("Failed to login with given keytab and principal", t); - } - - } - static class LogHistory { long lastLogTime = 0; int counter = 0; } - /** - * Kerberos context configuration for the JDK GSS library. - */ - private static class KerberosConfiguration extends Configuration { - private String keytab; - private String principal; - - public KerberosConfiguration(String keytab, String principal) { - this.keytab = keytab; - this.principal = principal; - } - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - Map options = new HashMap(); - if (IBM_JAVA) { - options.put("useKeytab", keytab.startsWith("file://") ? keytab - : "file://" + keytab); - options.put("principal", principal); - options.put("credsType", "acceptor"); - } else { - options.put("keyTab", keytab); - options.put("principal", principal); - options.put("useKeyTab", "true"); - options.put("storeKey", "true"); - options.put("doNotPrompt", "true"); - options.put("useTicketCache", "true"); - options.put("renewTGT", "true"); - options.put("isInitiator", "false"); - } - options.put("refreshKrb5Config", "true"); - String ticketCache = System.getenv("KRB5CCNAME"); - if (ticketCache != null) { - if (IBM_JAVA) { - options.put("useDefaultCcache", "true"); - // The first value searched when "useDefaultCcache" is used. - System.setProperty("KRB5CCNAME", ticketCache); - options.put("renewTGT", "true"); - options.put("credsType", "both"); - } else { - options.put("ticketCache", ticketCache); - } - } - if (logger.isDebugEnabled()) { - options.put("debug", "true"); - } - - return new AppConfigurationEntry[] { new AppConfigurationEntry( - KerberosUtil.getKrb5LoginModuleName(), - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, - options), }; - } - } - - public static UserGroupInformation getLoginUser() throws IOException { - return UserGroupInformation.getLoginUser(); - } - private static void initLocalHost() { if ( logger.isDebugEnabled() ) { logger.debug("==> MiscUtil.initLocalHost()"); @@ -858,24 +461,6 @@ private static void initLocalHost() { logger.debug("<== MiscUtil.initLocalHost()"); } } - public static Date getUTCDateForLocalDate(Date date) { - TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT+0"); - Calendar local = Calendar.getInstance(); - int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); - GregorianCalendar utc = new GregorianCalendar(gmtTimeZone); - utc.setTimeInMillis(date.getTime()); - utc.add(Calendar.MILLISECOND, -offset); - return utc.getTime(); - } - public static Date getUTCDate() { - TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT+0"); - Calendar local = Calendar.getInstance(); - int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); - GregorianCalendar utc = new GregorianCalendar(gmtTimeZone); - utc.setTimeInMillis(local.getTimeInMillis()); - utc.add(Calendar.MILLISECOND, -offset); - return utc.getTime(); - } // use Holder class to defer initialization until needed private static class RandomHolder { diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/MultiDestAuditProvider.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/MultiDestAuditProvider.java index 91acde58e5..78c6d55bc0 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/MultiDestAuditProvider.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/provider/MultiDestAuditProvider.java @@ -40,13 +40,6 @@ public MultiDestAuditProvider() { setName(DEFAULT_NAME); } - public MultiDestAuditProvider(AuditHandler provider) { - LOG.info("MultiDestAuditProvider(): provider=" - + (provider == null ? null : provider.getName())); - setName(DEFAULT_NAME); - addAuditProvider(provider); - } - @Override public void init(Properties props) { LOG.info("MultiDestAuditProvider.init()"); diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/StandAloneAuditProviderFactory.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/StandAloneAuditProviderFactory.java deleted file mode 100644 index 2ab6d63d9d..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/StandAloneAuditProviderFactory.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.audit.provider; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class StandAloneAuditProviderFactory extends AuditProviderFactory { - private static final Log LOG = LogFactory.getLog(StandAloneAuditProviderFactory.class); - - private volatile static StandAloneAuditProviderFactory sFactory = null; - - public static StandAloneAuditProviderFactory getInstance() { - StandAloneAuditProviderFactory ret = sFactory; - if(ret == null) { - synchronized(StandAloneAuditProviderFactory.class) { - ret = sFactory; - if(ret == null) { - ret = sFactory = new StandAloneAuditProviderFactory(); - } - } - } - return ret; - } - - private StandAloneAuditProviderFactory() { - super(); - LOG.info("StandAloneAuditProviderFactory: created.."); - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsAuditProvider.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsAuditProvider.java deleted file mode 100644 index 87ce5b312e..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsAuditProvider.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.audit.provider.hdfs; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.provider.BufferedAuditProvider; -import org.apache.atlas.audit.provider.DebugTracer; -import org.apache.atlas.audit.provider.LocalFileLogBuffer; -import org.apache.atlas.audit.provider.Log4jTracer; -import org.apache.atlas.audit.provider.MiscUtil; - -import java.util.Map; -import java.util.Properties; - -public class HdfsAuditProvider extends BufferedAuditProvider { - private static final Log LOG = LogFactory.getLog(HdfsAuditProvider.class); - - public static final String AUDIT_HDFS_IS_ASYNC_PROP = "xasecure.audit.hdfs.is.async"; - public static final String AUDIT_HDFS_MAX_QUEUE_SIZE_PROP = "xasecure.audit.hdfs.async.max.queue.size"; - public static final String AUDIT_HDFS_MAX_FLUSH_INTERVAL_PROP = "xasecure.audit.hdfs.async.max.flush.interval.ms"; - - public HdfsAuditProvider() { - } - - public void init(Properties props) { - LOG.info("HdfsAuditProvider.init()"); - - super.init(props); - - Map hdfsProps = MiscUtil.getPropertiesWithPrefix(props, "xasecure.audit.hdfs.config."); - - String encoding = hdfsProps.get("encoding"); - - String hdfsDestinationDirectory = hdfsProps.get("destination.directory"); - String hdfsDestinationFile = hdfsProps.get("destination.file"); - int hdfsDestinationFlushIntervalSeconds = MiscUtil.parseInteger(hdfsProps.get("destination.flush.interval.seconds"), 15 * 60); - int hdfsDestinationRolloverIntervalSeconds = MiscUtil.parseInteger(hdfsProps.get("destination.rollover.interval.seconds"), 24 * 60 * 60); - int hdfsDestinationOpenRetryIntervalSeconds = MiscUtil.parseInteger(hdfsProps.get("destination.open.retry.interval.seconds"), 60); - - String localFileBufferDirectory = hdfsProps.get("local.buffer.directory"); - String localFileBufferFile = hdfsProps.get("local.buffer.file"); - int localFileBufferFlushIntervalSeconds = MiscUtil.parseInteger(hdfsProps.get("local.buffer.flush.interval.seconds"), 1 * 60); - int localFileBufferFileBufferSizeBytes = MiscUtil.parseInteger(hdfsProps.get("local.buffer.file.buffer.size.bytes"), 8 * 1024); - int localFileBufferRolloverIntervalSeconds = MiscUtil.parseInteger(hdfsProps.get("local.buffer.rollover.interval.seconds"), 10 * 60); - String localFileBufferArchiveDirectory = hdfsProps.get("local.archive.directory"); - int localFileBufferArchiveFileCount = MiscUtil.parseInteger(hdfsProps.get("local.archive.max.file.count"), 10); - // Added for Azure. Note that exact name of these properties is not known as it contains the variable account name in it. - Map configProps = MiscUtil.getPropertiesWithPrefix(props, "xasecure.audit.destination.hdfs.config."); - - DebugTracer tracer = new Log4jTracer(LOG); - - HdfsLogDestination mHdfsDestination = new HdfsLogDestination(tracer); - - mHdfsDestination.setDirectory(hdfsDestinationDirectory); - mHdfsDestination.setFile(hdfsDestinationFile); - mHdfsDestination.setFlushIntervalSeconds(hdfsDestinationFlushIntervalSeconds); - mHdfsDestination.setEncoding(encoding); - mHdfsDestination.setRolloverIntervalSeconds(hdfsDestinationRolloverIntervalSeconds); - mHdfsDestination.setOpenRetryIntervalSeconds(hdfsDestinationOpenRetryIntervalSeconds); - mHdfsDestination.setConfigProps(configProps); - - LocalFileLogBuffer mLocalFileBuffer = new LocalFileLogBuffer(tracer); - - mLocalFileBuffer.setDirectory(localFileBufferDirectory); - mLocalFileBuffer.setFile(localFileBufferFile); - mLocalFileBuffer.setFlushIntervalSeconds(localFileBufferFlushIntervalSeconds); - mLocalFileBuffer.setFileBufferSizeBytes(localFileBufferFileBufferSizeBytes); - mLocalFileBuffer.setEncoding(encoding); - mLocalFileBuffer.setRolloverIntervalSeconds(localFileBufferRolloverIntervalSeconds); - mLocalFileBuffer.setArchiveDirectory(localFileBufferArchiveDirectory); - mLocalFileBuffer.setArchiveFileCount(localFileBufferArchiveFileCount); - - setBufferAndDestination(mLocalFileBuffer, mHdfsDestination); - } -} - - - diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsLogDestination.java b/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsLogDestination.java deleted file mode 100644 index 3e4b998e48..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/provider/hdfs/HdfsLogDestination.java +++ /dev/null @@ -1,517 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.atlas.audit.provider.hdfs; - - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.atlas.audit.model.AuditEventBase; -import org.apache.atlas.audit.provider.DebugTracer; -import org.apache.atlas.audit.provider.LogDestination; -import org.apache.atlas.audit.provider.MiscUtil; - -import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.util.Map; - -public class HdfsLogDestination implements LogDestination { - public final static String EXCP_MSG_FILESYSTEM_CLOSED = "Filesystem closed"; - - private String name = getClass().getName(); - - private String mDirectory = null; - private String mFile = null; - private int mFlushIntervalSeconds = 1 * 60; - private String mEncoding = null; - private boolean mIsAppend = false; - private int mRolloverIntervalSeconds = 24 * 60 * 60; - private int mOpenRetryIntervalSeconds = 60; - private DebugTracer mLogger = null; - - private FSDataOutputStream mFsDataOutStream = null; - private OutputStreamWriter mWriter = null; - private String mHdfsFilename = null; - private long mNextRolloverTime = 0; - private long mNextFlushTime = 0; - private long mLastOpenFailedTime = 0; - private boolean mIsStopInProgress = false; - private Map configProps = null; - - public HdfsLogDestination(DebugTracer tracer) { - mLogger = tracer; - } - - - public void setName(String name) { - this.name = name; - } - - - /* (non-Javadoc) - * @see org.apache.ranger.audit.provider.LogDestination#getName() - */ - @Override - public String getName() { - return name; - } - - public String getDirectory() { - return mDirectory; - } - - public void setDirectory(String directory) { - this.mDirectory = directory; - } - - public String getFile() { - return mFile; - } - - public void setFile(String file) { - this.mFile = file; - } - - public int getFlushIntervalSeconds() { - return mFlushIntervalSeconds; - } - - public void setFlushIntervalSeconds(int flushIntervalSeconds) { - mFlushIntervalSeconds = flushIntervalSeconds; - } - - public String getEncoding() { - return mEncoding; - } - - public void setEncoding(String encoding) { - mEncoding = encoding; - } - - public int getRolloverIntervalSeconds() { - return mRolloverIntervalSeconds; - } - - public void setRolloverIntervalSeconds(int rolloverIntervalSeconds) { - this.mRolloverIntervalSeconds = rolloverIntervalSeconds; - } - - public int getOpenRetryIntervalSeconds() { - return mOpenRetryIntervalSeconds; - } - - public void setOpenRetryIntervalSeconds(int minIntervalOpenRetrySeconds) { - this.mOpenRetryIntervalSeconds = minIntervalOpenRetrySeconds; - } - - @Override - public void start() { - mLogger.debug("==> HdfsLogDestination.start()"); - - openFile(); - - mLogger.debug("<== HdfsLogDestination.start()"); - } - - @Override - public void stop() { - mLogger.debug("==> HdfsLogDestination.stop()"); - - mIsStopInProgress = true; - - closeFile(); - - mIsStopInProgress = false; - - mLogger.debug("<== HdfsLogDestination.stop()"); - } - - @Override - public boolean isAvailable() { - return mWriter != null; - } - - @Override - public boolean send(AuditEventBase log) { - boolean ret = true; - - if(log != null) { - String msg = MiscUtil.stringify(log); - - ret = sendStringified(msg); - } - - return ret; - } - - - @Override - public boolean send(AuditEventBase[] logs) { - for (AuditEventBase log : logs) { - boolean ret = send(log); - if(!ret) { - return ret; - } - } - return true; - } - - @Override - public boolean sendStringified(String log) { - boolean ret = false; - - checkFileStatus(); - - OutputStreamWriter writer = mWriter; - - if(writer != null) { - try { - writer.write(log + MiscUtil.LINE_SEPARATOR); - - ret = true; - } catch (IOException excp) { - mLogger.warn("HdfsLogDestination.sendStringified(): write failed", excp); - - closeFile(); - } - } - - return ret; - } - - @Override - public boolean sendStringified(String[] logs) { - for (String log : logs) { - boolean ret = sendStringified(log); - if(!ret) { - return ret; - } - } - return true; - } - - - @Override - public boolean flush() { - mLogger.debug("==> HdfsLogDestination.flush()"); - - boolean ret = false; - - OutputStreamWriter writer = mWriter; - - if(writer != null) { - try { - writer.flush(); - - ret = true; - } catch (IOException excp) { - logException("HdfsLogDestination: flush() failed", excp); - } - } - - FSDataOutputStream ostream = mFsDataOutStream; - - if(ostream != null) { - try { - ostream.hflush(); - - ret = true; - } catch (IOException excp) { - logException("HdfsLogDestination: hflush() failed", excp); - } - } - - if(ret) { - mNextFlushTime = System.currentTimeMillis() + (mFlushIntervalSeconds * 1000L); - } - - mLogger.debug("<== HdfsLogDestination.flush()"); - - return ret; - } - - private void openFile() { - mLogger.debug("==> HdfsLogDestination.openFile()"); - - closeFile(); - - mNextRolloverTime = MiscUtil.getNextRolloverTime(mNextRolloverTime, (mRolloverIntervalSeconds * 1000L)); - - long startTime = MiscUtil.getRolloverStartTime(mNextRolloverTime, (mRolloverIntervalSeconds * 1000L)); - - mHdfsFilename = MiscUtil.replaceTokens(mDirectory + Path.SEPARATOR + mFile, startTime); - - FSDataOutputStream ostream = null; - FileSystem fileSystem = null; - Path pathLogfile = null; - Configuration conf = null; - boolean bOverwrite = false; - - try { - mLogger.debug("HdfsLogDestination.openFile(): opening file " + mHdfsFilename); - - URI uri = URI.create(mHdfsFilename); - - // TODO: mechanism to XA-HDFS plugin to disable auditing of access checks to the current HDFS file - - conf = createConfiguration(); - pathLogfile = new Path(mHdfsFilename); - fileSystem = FileSystem.get(uri, conf); - - try { - if(fileSystem.exists(pathLogfile)) { // file already exists. either append to the file or write to a new file - if(mIsAppend) { - mLogger.info("HdfsLogDestination.openFile(): opening file for append " + mHdfsFilename); - - ostream = fileSystem.append(pathLogfile); - } else { - mHdfsFilename = getNewFilename(mHdfsFilename, fileSystem); - pathLogfile = new Path(mHdfsFilename); - } - } - - // if file does not exist or if mIsAppend==false, create the file - if(ostream == null) { - mLogger.info("HdfsLogDestination.openFile(): opening file for write " + mHdfsFilename); - - createParents(pathLogfile, fileSystem); - ostream = fileSystem.create(pathLogfile, bOverwrite); - } - } catch(IOException excp) { - // append may not be supported by the filesystem; or the file might already be open by another application. Try a different filename - String failedFilename = mHdfsFilename; - - mHdfsFilename = getNewFilename(mHdfsFilename, fileSystem); - pathLogfile = new Path(mHdfsFilename); - - mLogger.info("HdfsLogDestination.openFile(): failed in opening file " + failedFilename + ". Will try opening " + mHdfsFilename); - } - - if(ostream == null){ - mLogger.info("HdfsLogDestination.openFile(): opening file for write " + mHdfsFilename); - - createParents(pathLogfile, fileSystem); - ostream = fileSystem.create(pathLogfile, bOverwrite); - } - } catch(Throwable ex) { - mLogger.warn("HdfsLogDestination.openFile() failed", ex); -// } finally { - // TODO: unset the property set above to exclude auditing of logfile opening - // System.setProperty(hdfsCurrentFilenameProperty, null); - } - - mWriter = createWriter(ostream); - - if(mWriter != null) { - mLogger.debug("HdfsLogDestination.openFile(): opened file " + mHdfsFilename); - - mFsDataOutStream = ostream; - mNextFlushTime = System.currentTimeMillis() + (mFlushIntervalSeconds * 1000L); - mLastOpenFailedTime = 0; - } else { - mLogger.warn("HdfsLogDestination.openFile(): failed to open file for write " + mHdfsFilename); - - mHdfsFilename = null; - mLastOpenFailedTime = System.currentTimeMillis(); - } - - mLogger.debug("<== HdfsLogDestination.openFile(" + mHdfsFilename + ")"); - } - - private void closeFile() { - mLogger.debug("==> HdfsLogDestination.closeFile()"); - - flush(); - - OutputStreamWriter writer = mWriter; - - mWriter = null; - mFsDataOutStream = null; - - if(writer != null) { - try { - mLogger.info("HdfsLogDestination.closeFile(): closing file " + mHdfsFilename); - - writer.close(); - } catch(IOException excp) { - logException("HdfsLogDestination: failed to close file " + mHdfsFilename, excp); - } - } - - mLogger.debug("<== HdfsLogDestination.closeFile()"); - } - - private void rollover() { - mLogger.debug("==> HdfsLogDestination.rollover()"); - - closeFile(); - - openFile(); - - mLogger.debug("<== HdfsLogDestination.rollover()"); - } - - private void checkFileStatus() { - long now = System.currentTimeMillis(); - - if(mWriter == null) { - if(now > (mLastOpenFailedTime + (mOpenRetryIntervalSeconds * 1000L))) { - openFile(); - } - } else if(now > mNextRolloverTime) { - rollover(); - } else if(now > mNextFlushTime) { - flush(); - } - } - - private OutputStreamWriter createWriter(OutputStream os ) { - OutputStreamWriter writer = null; - - if(os != null) { - if(mEncoding != null) { - try { - writer = new OutputStreamWriter(os, mEncoding); - } catch(UnsupportedEncodingException excp) { - mLogger.warn("HdfsLogDestination.createWriter(): failed to create output writer.", excp); - } - } - - if(writer == null) { - writer = new OutputStreamWriter(os); - } - } - - return writer; - } - - private void createParents(Path pathLogfile, FileSystem fileSystem) { - try { - Path parentPath = pathLogfile != null ? pathLogfile.getParent() : null; - - if(parentPath != null && fileSystem != null && !fileSystem.exists(parentPath)) { - fileSystem.mkdirs(parentPath); - } - } catch (IOException e) { - logException("HdfsLogDestination.createParents() failed", e); - } catch (Throwable e) { - mLogger.warn("HdfsLogDestination.createParents() failed", e); - } - } - - private String getNewFilename(String fileName, FileSystem fileSystem) { - if(fileName == null) { - return ""; - } - - for(int i = 1;; i++) { - String ret = fileName; - - String strToAppend = "-" + Integer.toString(i); - - int extnPos = ret.lastIndexOf("."); - - if(extnPos < 0) { - ret += strToAppend; - } else { - String extn = ret.substring(extnPos); - - ret = ret.substring(0, extnPos) + strToAppend + extn; - } - - if(fileSystem != null && fileExists(ret, fileSystem)) { - continue; - } else { - return ret; - } - } - } - - private boolean fileExists(String fileName, FileSystem fileSystem) { - boolean ret = false; - - if(fileName != null && fileSystem != null) { - Path path = new Path(fileName); - - try { - ret = fileSystem.exists(path); - } catch(IOException excp) { - // ignore - } - } - - return ret; - } - - private void logException(String msg, IOException excp) { - // during shutdown, the underlying FileSystem might already be closed; so don't print error details - - if(mIsStopInProgress) { - return; - } - - String excpMsgToExclude = EXCP_MSG_FILESYSTEM_CLOSED; - String excpMsg = excp != null ? excp.getMessage() : null; - boolean excpExcludeLogging = (excpMsg != null && excpMsg.contains(excpMsgToExclude)); - - if(! excpExcludeLogging) { - mLogger.warn(msg, excp); - } - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - - sb.append("HdfsLogDestination {"); - sb.append("Directory=").append(mDirectory).append("; "); - sb.append("File=").append(mFile).append("; "); - sb.append("RolloverIntervalSeconds=").append(mRolloverIntervalSeconds); - sb.append("}"); - - return sb.toString(); - } - - public void setConfigProps(Map configProps) { - this.configProps = configProps; - } - - Configuration createConfiguration() { - Configuration conf = new Configuration(); - if (configProps != null) { - for (Map.Entry entry : configProps.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - // for ease of install config file may contain properties with empty value, skip those - if (StringUtils.isNotEmpty(value)) { - conf.set(key, value); - } - mLogger.info("Adding property to HDFS config: " + key + " => " + value); - } - } - - mLogger.info("Returning HDFS Filesystem Config: " + conf.toString()); - return conf; - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileCacheProviderSpool.java b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileCacheProviderSpool.java index aa869eb84c..b49c0a883d 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileCacheProviderSpool.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileCacheProviderSpool.java @@ -19,11 +19,10 @@ package org.apache.atlas.audit.queue; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.atlas.audit.utils.AuthObjectUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -//import org.apache.log4j.MDC; import org.apache.atlas.audit.model.AuditEventBase; import org.apache.atlas.audit.model.AuthzAuditEvent; import org.apache.atlas.audit.provider.AuditHandler; @@ -41,6 +40,7 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Date; @@ -115,8 +115,6 @@ public enum SPOOL_FILE_STATUS { boolean isDestDown = false; boolean isSpoolingSuccessful = true; - private Gson gson = null; - public AuditFileCacheProviderSpool(AuditHandler consumerProvider) { this.consumerProvider = consumerProvider; } @@ -140,8 +138,6 @@ public boolean init(Properties props, String basePropertyName) { } try { - gson = new GsonBuilder().setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") - .create(); // Initial folder and file properties String logFolderProp = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_LOCAL_DIR); @@ -597,8 +593,7 @@ void loadIndexFile() throws IOException { String line; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, - AuditIndexRecord.class); + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); indexRecords.add(record); } } @@ -645,7 +640,7 @@ synchronized void removeIndexRecord(AuditIndexRecord indexRecord) synchronized void saveIndexFile() throws FileNotFoundException, IOException { PrintWriter out = new PrintWriter(indexFile,"UTF-8"); for (AuditIndexRecord auditIndexRecord : indexRecords) { - out.println(gson.toJson(auditIndexRecord)); + out.println(AuthObjectUtil.toJson(auditIndexRecord)); } out.close(); // printIndex(); @@ -657,7 +652,7 @@ void appendToDoneFile(AuditIndexRecord indexRecord) logger.info("Moving to done file. " + indexRecord.filePath + ", queueName=" + FILE_CACHE_PROVIDER_NAME + ", consumer=" + consumerProvider.getName()); - String line = gson.toJson(indexRecord); + String line = AuthObjectUtil.toJson(indexRecord); PrintWriter out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream( indexDoneFile, true),"UTF-8"))); out.println(line); @@ -705,8 +700,7 @@ public boolean accept(File pathname) { int filesDeletedCount = 0; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, - AuditIndexRecord.class); + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); logFile = new File(record.filePath); String fileName = logFile.getName(); archiveFile = new File(archiveFolder, fileName); diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileQueueSpool.java b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileQueueSpool.java index 2cf53542b7..89b93e4eb6 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileQueueSpool.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileQueueSpool.java @@ -19,15 +19,13 @@ package org.apache.atlas.audit.queue; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -//import org.apache.log4j.MDC; import org.apache.atlas.audit.model.AuditEventBase; import org.apache.atlas.audit.model.AuthzAuditEvent; import org.apache.atlas.audit.provider.AuditHandler; import org.apache.atlas.audit.provider.MiscUtil; +import org.apache.atlas.audit.utils.AuthObjectUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -58,7 +56,7 @@ */ public class AuditFileQueueSpool implements Runnable { - private static final Log logger = LogFactory.getLog(AuditFileQueueSpool.class); + private static final Logger logger = LoggerFactory.getLogger(AuditFileQueueSpool.class); public enum SPOOL_FILE_STATUS { pending, write_inprogress, read_inprogress, done @@ -114,8 +112,6 @@ public enum SPOOL_FILE_STATUS { boolean isDestDown = false; boolean isSpoolingSuccessful = true; - private Gson gson = null; - public AuditFileQueueSpool(AuditHandler consumerProvider) { this.consumerProvider = consumerProvider; } @@ -139,8 +135,6 @@ public boolean init(Properties props, String basePropertyName) { } try { - gson = new GsonBuilder().setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") - .create(); // Initial folder and file properties String logFolderProp = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_LOCAL_DIR); @@ -166,7 +160,7 @@ public boolean init(Properties props, String basePropertyName) { + FILE_QUEUE_PROVIDER_NAME); if (logFolderProp == null || logFolderProp.isEmpty()) { - logger.fatal("Audit spool folder is not configured. Please set " + logger.error("Audit spool folder is not configured. Please set " + propPrefix + "." + PROP_FILE_SPOOL_LOCAL_DIR @@ -177,7 +171,7 @@ public boolean init(Properties props, String basePropertyName) { if (!logFolder.isDirectory()) { boolean result = logFolder.mkdirs(); if (!logFolder.isDirectory() || !result) { - logger.fatal("File Spool folder not found and can't be created. folder=" + logger.error("File Spool folder not found and can't be created. folder=" + logFolder.getAbsolutePath() + ", queueName=" + FILE_QUEUE_PROVIDER_NAME); @@ -227,7 +221,7 @@ public boolean init(Properties props, String basePropertyName) { if (!indexFile.exists()) { boolean ret = indexFile.createNewFile(); if (!ret) { - logger.fatal("Error creating index file. fileName=" + logger.error("Error creating index file. fileName=" + indexFile.getPath()); return false; } @@ -245,7 +239,7 @@ public boolean init(Properties props, String basePropertyName) { if (!indexDoneFile.exists()) { boolean ret = indexDoneFile.createNewFile(); if (!ret) { - logger.fatal("Error creating index done file. fileName=" + logger.error("Error creating index done file. fileName=" + indexDoneFile.getPath()); return false; } @@ -291,7 +285,7 @@ public boolean init(Properties props, String basePropertyName) { } } catch (Throwable t) { - logger.fatal("Error initializing File Spooler. queue=" + logger.error("Error initializing File Spooler. queue=" + FILE_QUEUE_PROVIDER_NAME, t); return false; } @@ -600,7 +594,7 @@ void loadIndexFile() throws IOException { String line; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); indexRecords.add(record); } @@ -648,7 +642,7 @@ synchronized void removeIndexRecord(AuditIndexRecord indexRecord) synchronized void saveIndexFile() throws FileNotFoundException, IOException { PrintWriter out = new PrintWriter(indexFile,"UTF-8"); for (AuditIndexRecord auditIndexRecord : indexRecords) { - out.println(gson.toJson(auditIndexRecord)); + out.println(AuthObjectUtil.toJson(auditIndexRecord)); } out.close(); // printIndex(); @@ -660,7 +654,7 @@ void appendToDoneFile(AuditIndexRecord indexRecord) logger.info("Moving to done file. " + indexRecord.filePath + ", queueName=" + FILE_QUEUE_PROVIDER_NAME + ", consumer=" + consumerProvider.getName()); - String line = gson.toJson(indexRecord); + String line = AuthObjectUtil.toJson(indexRecord); PrintWriter out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream( indexDoneFile, true),"UTF-8"))); out.println(line); @@ -708,8 +702,7 @@ public boolean accept(File pathname) { int filesDeletedCount = 0; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, - AuditIndexRecord.class); + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); logFile = new File(record.filePath); String fileName = logFile.getName(); archiveFile = new File(archiveFolder, fileName); @@ -789,7 +782,7 @@ public void run() { //MDC.clear(); runLogAudit(); } catch (Throwable t) { - logger.fatal("Exited thread without abnormaly. queue=" + logger.error("Exited thread without abnormaly. queue=" + consumerProvider.getName(), t); } } diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileSpool.java b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileSpool.java index 039f7ccb24..b358744111 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileSpool.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/queue/AuditFileSpool.java @@ -19,8 +19,7 @@ package org.apache.atlas.audit.queue; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; +import org.apache.atlas.audit.utils.AuthObjectUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; //import org.apache.log4j.MDC; @@ -108,8 +107,6 @@ public enum SPOOL_FILE_STATUS { boolean isDrain = false; boolean isDestDown = false; - private Gson gson = null; - public AuditFileSpool(AuditQueue queueProvider, AuditHandler consumerProvider) { this.queueProvider = queueProvider; @@ -133,9 +130,6 @@ public boolean init(Properties props, String basePropertyName) { } try { - gson = new GsonBuilder().setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") - .create(); - // Initial folder and file properties String logFolderProp = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_LOCAL_DIR); @@ -575,8 +569,7 @@ void loadIndexFile() throws IOException { String line; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, - AuditIndexRecord.class); + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); indexRecords.add(record); } } @@ -619,7 +612,7 @@ synchronized void removeIndexRecord(AuditIndexRecord indexRecord) synchronized void saveIndexFile() throws FileNotFoundException, IOException { PrintWriter out = new PrintWriter(indexFile); for (AuditIndexRecord auditIndexRecord : indexRecords) { - out.println(gson.toJson(auditIndexRecord)); + out.println(AuthObjectUtil.toJson(auditIndexRecord)); } out.close(); // printIndex(); @@ -631,7 +624,7 @@ void appendToDoneFile(AuditIndexRecord indexRecord) logger.info("Moving to done file. " + indexRecord.filePath + ", queueName=" + queueProvider.getName() + ", consumer=" + consumerProvider.getName()); - String line = gson.toJson(indexRecord); + String line = AuthObjectUtil.toJson(indexRecord); PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter( indexDoneFile, true))); out.println(line); @@ -669,8 +662,7 @@ public boolean accept(File pathname) { int filesDeletedCount = 0; while ((line = br.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { - AuditIndexRecord record = gson.fromJson(line, - AuditIndexRecord.class); + AuditIndexRecord record = AuthObjectUtil.fromJson(line, AuditIndexRecord.class); logFile = new File(record.filePath); String fileName = logFile.getName(); archiveFile = new File(archiveFolder, fileName); diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/AbstractKerberosUser.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/AbstractKerberosUser.java deleted file mode 100644 index 44528b003d..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/AbstractKerberosUser.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.Subject; -import javax.security.auth.kerberos.KerberosPrincipal; -import javax.security.auth.kerberos.KerberosTicket; -import javax.security.auth.login.LoginContext; -import javax.security.auth.login.LoginException; -import java.security.PrivilegedAction; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; - -public abstract class AbstractKerberosUser implements KerberosUser { - - private static final Logger LOG = LoggerFactory.getLogger(AbstractKerberosUser.class); - - static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'"; - - /** - * Percentage of the ticket window to use before we renew the TGT. - */ - static final float TICKET_RENEW_WINDOW = 0.80f; - - protected final AtomicBoolean loggedIn = new AtomicBoolean(false); - - protected Subject subject; - protected LoginContext loginContext; - - public AbstractKerberosUser() { - } - - /** - * Performs a login using the specified principal and keytab. - * - * @throws LoginException if the login fails - */ - @Override - public synchronized void login() throws LoginException { - if (isLoggedIn()) { - return; - } - - try { - // If it's the first time ever calling login then we need to initialize a new context - if (loginContext == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Initializing new login context..."); - } - if (this.subject == null) { - // only create a new subject if a current one does not exist - // other classes may be referencing an existing subject and replacing it may break functionality of those other classes after relogin - this.subject = new Subject(); - } - this.loginContext = createLoginContext(subject); - } - - loginContext.login(); - loggedIn.set(true); - if (LOG.isDebugEnabled()) { - LOG.debug("Successful login for {}", new Object[]{getPrincipal()}); - } - } catch (LoginException le) { - LoginException loginException = new LoginException("Unable to login with " + getPrincipal() + " due to: " + le.getMessage()); - loginException.setStackTrace(le.getStackTrace()); - throw loginException; - } - } - - protected abstract LoginContext createLoginContext(final Subject subject) throws LoginException; - - /** - * Performs a logout of the current user. - * - * @throws LoginException if the logout fails - */ - @Override - public synchronized void logout() throws LoginException { - if (!isLoggedIn()) { - return; - } - - try { - loginContext.logout(); - loggedIn.set(false); - LOG.debug("Successful logout for {}", new Object[]{getPrincipal()}); - - loginContext = null; - } catch (LoginException e) { - throw new LoginException("Logout failed due to: " + e.getMessage()); - } - } - - /** - * Executes the PrivilegedAction as this user. - * - * @param action the action to execute - * @param the type of result - * @return the result of the action - * @throws IllegalStateException if this method is called while not logged in - */ - @Override - public T doAs(final PrivilegedAction action) throws IllegalStateException { - if (!isLoggedIn()) { - throw new IllegalStateException("Must login before executing actions"); - } - - return Subject.doAs(subject, action); - } - - /** - * Executes the PrivilegedAction as this user. - * - * @param action the action to execute - * @param the type of result - * @return the result of the action - * @throws IllegalStateException if this method is called while not logged in - * @throws PrivilegedActionException if an exception is thrown from the action - */ - @Override - public T doAs(final PrivilegedExceptionAction action) - throws IllegalStateException, PrivilegedActionException { - if (!isLoggedIn()) { - throw new IllegalStateException("Must login before executing actions"); - } - - return Subject.doAs(subject, action); - } - - /** - * Re-login a user from keytab if TGT is expired or is close to expiry. - * - * @throws LoginException if an error happens performing the re-login - */ - @Override - public synchronized boolean checkTGTAndRelogin() throws LoginException { - final KerberosTicket tgt = getTGT(); - if (tgt == null) { - LOG.debug("TGT was not found"); - } - - if (tgt != null && System.currentTimeMillis() < getRefreshTime(tgt)) { - LOG.debug("TGT was found, but has not reached expiration window"); - return false; - } - - LOG.debug("Performing relogin for {}", new Object[]{getPrincipal()}); - logout(); - login(); - return true; - } - - /** - * Get the Kerberos TGT. - * - * @return the user's TGT or null if none was found - */ - private synchronized KerberosTicket getTGT() { - final Set tickets = subject.getPrivateCredentials(KerberosTicket.class); - - for (KerberosTicket ticket : tickets) { - if (isTGSPrincipal(ticket.getServer())) { - return ticket; - } - } - - return null; - } - - /** - * TGS must have the server principal of the form "krbtgt/FOO@FOO". - * - * @param principal the principal to check - * @return true if the principal is the TGS, false otherwise - */ - private boolean isTGSPrincipal(final KerberosPrincipal principal) { - if (principal == null) { - return false; - } - - if (principal.getName().equals("krbtgt/" + principal.getRealm() + "@" + principal.getRealm())) { - if (LOG.isTraceEnabled()) { - LOG.trace("Found TGT principal: " + principal.getName()); - } - return true; - } - - return false; - } - - private long getRefreshTime(final KerberosTicket tgt) { - long start = tgt.getStartTime().getTime(); - long end = tgt.getEndTime().getTime(); - - if (LOG.isTraceEnabled()) { - final SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT); - final String startDate = dateFormat.format(new Date(start)); - final String endDate = dateFormat.format(new Date(end)); - LOG.trace("TGT valid starting at: " + startDate); - LOG.trace("TGT expires at: " + endDate); - } - - return start + (long) ((end - start) * TICKET_RENEW_WINDOW); - } - - /** - * @return true if this user is currently logged in, false otherwise - */ - @Override - public boolean isLoggedIn() { - return loggedIn.get(); - } - - @Override - public String toString() { - return "KerberosUser{" + - "principal='" + getPrincipal() + '\'' + - ", loggedIn=" + loggedIn + - '}'; - } -} - diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/AuthObjectUtil.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/AuthObjectUtil.java new file mode 100644 index 0000000000..3a61264163 --- /dev/null +++ b/auth-audits/src/main/java/org/apache/atlas/audit/utils/AuthObjectUtil.java @@ -0,0 +1,47 @@ +package org.apache.atlas.audit.utils; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.text.SimpleDateFormat; + +public class AuthObjectUtil { + private static final Logger LOG = LoggerFactory.getLogger(AuthObjectUtil.class); + + private static ObjectMapper MAPPER = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")); + + public static String toJson(Object obj) { + String ret; + try { + if (obj instanceof JsonNode && ((JsonNode) obj).isTextual()) { + ret = ((JsonNode) obj).textValue(); + } else { + ret = MAPPER.writeValueAsString(obj); + } + }catch (IOException e){ + LOG.error("AuthObjectUtil.toJson()", e); + + ret = null; + } + return ret; + } + + public static T fromJson(String jsonStr, Class type) { + T ret = null; + + if (jsonStr != null) { + try { + ret = MAPPER.readValue(jsonStr, type); + } catch (IOException e) { + LOG.error("AuthObjectUtil.fromJson()", e); + + ret = null; + } + } + + return ret; + } +} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/RangerAuditWriter.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/CredentialsProviderUtil.java similarity index 53% rename from auth-audits/src/main/java/org/apache/atlas/audit/utils/RangerAuditWriter.java rename to auth-audits/src/main/java/org/apache/atlas/audit/utils/CredentialsProviderUtil.java index 4a34ff54de..4fbac81606 100644 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/RangerAuditWriter.java +++ b/auth-audits/src/main/java/org/apache/atlas/audit/utils/CredentialsProviderUtil.java @@ -1,5 +1,3 @@ -package org.apache.atlas.audit.utils; - /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -19,21 +17,23 @@ * under the License. */ -import java.io.File; -import java.util.Collection; -import java.util.Map; -import java.util.Properties; - -public interface RangerAuditWriter { - void init(Properties prop, String propPrefix, String auditProviderName, Map auditConfigs); - - boolean log(Collection events) throws Exception; +package org.apache.atlas.audit.utils; - boolean logFile(File file) throws Exception; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; - void start(); +public class CredentialsProviderUtil { + private static final Logger logger = LoggerFactory.getLogger(CredentialsProviderUtil.class); - void flush(); + public static CredentialsProvider getBasicCredentials(String user, String password) { + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(user, password)); + return credentialsProvider; + } - void stop(); } diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/InMemoryJAASConfiguration.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/InMemoryJAASConfiguration.java deleted file mode 100644 index 0baf32ad4d..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/InMemoryJAASConfiguration.java +++ /dev/null @@ -1,378 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.audit.utils; - -import org.apache.commons.collections.MapUtils; -import org.apache.hadoop.security.SecurityUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.SortedSet; -import java.util.StringTokenizer; -import java.util.TreeSet; - -/** - * InMemoryJAASConfiguration - * - * An utility class - which has a static method init to load all JAAS configuration from Application properties file (eg: kafka.properties) and - * set it as part of the default lookup configuration for all JAAS configuration lookup. - * - * Example settings in application.properties: - * - * xasecure.audit.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule - * xasecure.audit.jaas.KafkaClient.loginModuleControlFlag = required - * xasecure.audit.jaas.KafkaClient.option.useKeyTab = true - * xasecure.audit.jaas.KafkaClient.option.storeKey = true - * xasecure.audit.jaas.KafkaClient.option.serviceName = kafka - * xasecure.audit.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/kafka_client.keytab - * xasecure.audit.jaas.KafkaClient.option.principal = kafka-client-1@EXAMPLE.COM - - * xasecure.audit.jaas.MyClient.0.loginModuleName = com.sun.security.auth.module.Krb5LoginModule - * xasecure.audit.jaas.MyClient.0.loginModuleControlFlag = required - * xasecure.audit.jaas.MyClient.0.option.useKeyTab = true - * xasecure.audit.jaas.MyClient.0.option.storeKey = true - * xasecure.audit.jaas.MyClient.0.option.serviceName = kafka - * xasecure.audit.jaas.MyClient.0.option.keyTab = /etc/security/keytabs/kafka_client.keytab - * xasecure.audit.jaas.MyClient.0.option.principal = kafka-client-1@EXAMPLE.COM - * - * xasecure.audit.jaas.MyClient.1.loginModuleName = com.sun.security.auth.module.Krb5LoginModule - * xasecure.audit.jaas.MyClient.1.loginModuleControlFlag = optional - * xasecure.audit.jaas.MyClient.1.option.useKeyTab = true - * xasecure.audit.jaas.MyClient.1.option.storeKey = true - * xasecure.audit.jaas.MyClient.1.option.serviceName = kafka - * xasecure.audit.jaas.MyClient.1.option.keyTab = /etc/security/keytabs/kafka_client.keytab - * xasecure.audit.jaas.MyClient.1.option.principal = kafka-client-1@EXAMPLE.COM - - * This will set the JAAS configuration - equivalent to the jaas.conf file entries: - * KafkaClient { - * com.sun.security.auth.module.Krb5LoginModule required - * useKeyTab=true - * storeKey=true - * serviceName=kafka - * keyTab="/etc/security/keytabs/kafka_client.keytab" - * principal="kafka-client-1@EXAMPLE.COM"; - * }; - * MyClient { - * com.sun.security.auth.module.Krb5LoginModule required - * useKeyTab=true - * storeKey=true - * serviceName=kafka keyTab="/etc/security/keytabs/kafka_client.keytab" - * principal="kafka-client-1@EXAMPLE.COM"; - * }; - * MyClient { - * com.sun.security.auth.module.Krb5LoginModule optional - * useKeyTab=true - * storeKey=true - * serviceName=kafka - * keyTab="/etc/security/keytabs/kafka_client.keytab" - * principal="kafka-client-1@EXAMPLE.COM"; - * }; - * - * Here is the syntax for atlas.properties to add JAAS configuration: - * - * The property name has to begin with 'xasecure.audit.jaas.' + clientId (in case of Kafka client, - * it expects the clientId to be KafkaClient). - * The following property must be there to specify the JAAS loginModule name - * 'xasecure.audit.jaas.' +' + clientId + '.loginModuleName' - * The following optional property should be set to specify the loginModuleControlFlag - * 'xasecure.audit.jaas.' +' + clientId + '.loginModuleControlFlag' - * Default value : required , Possible values: required, optional, sufficient, requisite - * Then you can add additional optional parameters as options for the configuration using the following - * syntax: - * 'xasecure.audit.jaas.' +' + clientId + '.option.' + = - * - * The current setup will lookup JAAS configration from the atlas-application.properties first, if not available, - * it will delegate to the original configuration - * - */ - -public final class InMemoryJAASConfiguration extends Configuration { - - private static final Logger LOG = LoggerFactory.getLogger(InMemoryJAASConfiguration.class); - - public static final String JAAS_CONFIG_PREFIX_PARAM = "xasecure.audit.jaas."; - public static final String JAAS_CONFIG_LOGIN_MODULE_NAME_PARAM = "loginModuleName"; - public static final String JAAS_CONFIG_LOGIN_MODULE_CONTROL_FLAG_PARAM = "loginModuleControlFlag"; - public static final String JAAS_CONFIG_LOGIN_OPTIONS_PREFIX = "option"; - public static final String JAAS_PRINCIPAL_PROP = "principal"; - - private final Configuration parent; - private final Map> applicationConfigEntryMap = new HashMap<>(); - - public static InMemoryJAASConfiguration init(String propFile) throws Exception { - LOG.debug("==> InMemoryJAASConfiguration.init( {} ) ", propFile); - - InMemoryJAASConfiguration ret = null; - InputStream in = null; - - try { - Properties properties = new Properties(); - - in = ClassLoader.getSystemResourceAsStream(propFile); - - if (in == null) { - if (!propFile.startsWith("/")) { - in = ClassLoader.getSystemResourceAsStream("/" + propFile); - } - if (in == null) { - in = new FileInputStream(new File(propFile)); - } - } - - properties.load(in); - - ret = init(properties); - } catch (IOException e) { - throw new Exception("Failed to load JAAS application properties", e); - } finally { - if ( in != null) { - try { - in.close(); - } catch ( Exception e) { - //Ignore - } - } - } - - LOG.debug("<== InMemoryJAASConfiguration.init( {} ) ", propFile); - - return ret; - } - - public static InMemoryJAASConfiguration init(Properties properties) throws Exception { - LOG.debug("==> InMemoryJAASConfiguration.init()"); - - InMemoryJAASConfiguration ret = null; - - if (properties != null && MapUtils.isNotEmpty(properties)) { - ret = new InMemoryJAASConfiguration(properties); - } else { - throw new Exception("Failed to load JAAS application properties: properties NULL or empty!"); - } - - LOG.debug("<== InMemoryJAASConfiguration.init()"); - - return ret; - } - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - LOG.debug("==> InMemoryJAASConfiguration.getAppConfigurationEntry( {} )", name); - - AppConfigurationEntry[] ret = null; - - if (parent != null) { - ret = parent.getAppConfigurationEntry(name); - } - - if (ret == null || ret.length == 0) { - List retList = applicationConfigEntryMap.get(name); - - if (retList != null && retList.size() > 0) { - ret = retList.toArray(new AppConfigurationEntry[retList.size()]); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== InMemoryJAASConfiguration.getAppConfigurationEntry( {} ) : {}", name, toString(ret)); - } - - return ret; - } - - private InMemoryJAASConfiguration(Properties prop) { - parent = Configuration.getConfiguration(); - - initialize(prop); - } - - private void initialize(Properties properties) { - LOG.debug("==> InMemoryJAASConfiguration.initialize()"); - - int prefixLen = JAAS_CONFIG_PREFIX_PARAM.length(); - Map> jaasClients = new HashMap<>(); - - for(String key : properties.stringPropertyNames()) { - if (key.startsWith(JAAS_CONFIG_PREFIX_PARAM)) { - String jaasKey = key.substring(prefixLen); - StringTokenizer tokenizer = new StringTokenizer(jaasKey, "."); - int tokenCount = tokenizer.countTokens(); - - if (tokenCount > 0) { - String clientId = tokenizer.nextToken(); - SortedSet indexList = jaasClients.get(clientId); - - if (indexList == null) { - indexList = new TreeSet<>(); - - jaasClients.put(clientId, indexList); - } - - String indexStr = tokenizer.nextToken(); - int indexId = isNumeric(indexStr) ? Integer.parseInt(indexStr) : -1; - Integer clientIdIndex = Integer.valueOf(indexId); - - if (!indexList.contains(clientIdIndex)) { - indexList.add(clientIdIndex); - } - } - } - } - - for(String jaasClient : jaasClients.keySet()) { - for(Integer index : jaasClients.get(jaasClient)) { - String keyPrefix = JAAS_CONFIG_PREFIX_PARAM + jaasClient + "."; - - if (index > -1) { - keyPrefix = keyPrefix + String.valueOf(index) + "."; - } - - String keyParam = keyPrefix + JAAS_CONFIG_LOGIN_MODULE_NAME_PARAM; - String loginModuleName = properties.getProperty(keyParam); - - if (loginModuleName == null) { - LOG.error("Unable to add JAAS configuration for " - + "client [" + jaasClient + "] as it is missing param [" + keyParam + "]." - + " Skipping JAAS config for [" + jaasClient + "]"); - continue; - } else { - loginModuleName = loginModuleName.trim(); - } - - keyParam = keyPrefix + JAAS_CONFIG_LOGIN_MODULE_CONTROL_FLAG_PARAM; - - String controlFlag = properties.getProperty(keyParam); - - AppConfigurationEntry.LoginModuleControlFlag loginControlFlag = null; - - if (controlFlag != null) { - controlFlag = controlFlag.trim().toLowerCase(); - - if (controlFlag.equals("optional")) { - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL; - } else if (controlFlag.equals("requisite")) { - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUISITE; - } else if (controlFlag.equals("sufficient")) { - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT; - } else if (controlFlag.equals("required")) { - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - } else { - String validValues = "optional|requisite|sufficient|required"; - LOG.warn("Unknown JAAS configuration value for (" + keyParam - + ") = [" + controlFlag + "], valid value are [" + validValues - + "] using the default value, REQUIRED"); - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - } - } else { - LOG.warn("Unable to find JAAS configuration (" - + keyParam + "); using the default value, REQUIRED"); - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - } - - Map options = new HashMap<>(); - String optionPrefix = keyPrefix + JAAS_CONFIG_LOGIN_OPTIONS_PREFIX + "."; - int optionPrefixLen = optionPrefix.length(); - - for(String key : properties.stringPropertyNames()) { - if (key.startsWith(optionPrefix)) { - String optionKey = key.substring(optionPrefixLen); - String optionVal = properties.getProperty(key); - - if (optionVal != null) { - optionVal = optionVal.trim(); - - try { - if (optionKey.equalsIgnoreCase(JAAS_PRINCIPAL_PROP)) { - optionVal = SecurityUtil.getServerPrincipal(optionVal, (String) null); - } - } catch (IOException e) { - LOG.warn("Failed to build serverPrincipal. Using provided value:[" - + optionVal + "]"); - } - } - - options.put(optionKey, optionVal); - } - } - - AppConfigurationEntry entry = new AppConfigurationEntry(loginModuleName, loginControlFlag, options); - - if (LOG.isDebugEnabled()) { - StringBuilder sb = new StringBuilder(); - - sb.append("Adding client: [").append(jaasClient).append("{").append(index).append("}]\n"); - sb.append("\tloginModule: [").append(loginModuleName).append("]\n"); - sb.append("\tcontrolFlag: [").append(loginControlFlag).append("]\n"); - - for (String key : options.keySet()) { - String val = options.get(key); - - sb.append("\tOptions: [").append(key).append("] => [").append(val).append("]\n"); - } - - LOG.debug(sb.toString()); - } - - List retList = applicationConfigEntryMap.get(jaasClient); - - if (retList == null) { - retList = new ArrayList<>(); - - applicationConfigEntryMap.put(jaasClient, retList); - } - - retList.add(entry); - } - } - - LOG.debug("<== InMemoryJAASConfiguration.initialize()"); - } - - private static boolean isNumeric(String str) { - return str.matches("-?\\d+(\\.\\d+)?"); //match a number with optional '-' and decimal. - } - - private String toString(AppConfigurationEntry[] entries) { - StringBuilder sb = new StringBuilder(); - - sb.append('['); - if (entries != null) { - for (AppConfigurationEntry entry : entries) { - sb.append("{ loginModuleName=").append(entry.getLoginModuleName()) - .append(", controlFlag=").append(entry.getControlFlag()) - .append(", options=").append(entry.getOptions()) - .append("}"); - } - } - sb.append(']'); - - return sb.toString(); - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosAction.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosAction.java deleted file mode 100644 index 7af54484ed..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosAction.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.utils; - -import org.apache.commons.lang3.Validate; -import org.apache.commons.logging.Log; - -import javax.security.auth.login.LoginException; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; - -/** - * Helper class for processors to perform an action as a KerberosUser. - */ -public class KerberosAction { - - private final KerberosUser kerberosUser; - private final PrivilegedExceptionAction action; - private final Log logger; - - public KerberosAction(final KerberosUser kerberosUser, - final PrivilegedExceptionAction action, - final Log logger) { - this.kerberosUser = kerberosUser; - this.action = action; - this.logger = logger; - Validate.notNull(this.kerberosUser); - Validate.notNull(this.action); - Validate.notNull(this.logger); - } - - public T execute() throws Exception { - T result; - // lazily login the first time the processor executes - if (!kerberosUser.isLoggedIn()) { - try { - kerberosUser.login(); - logger.info("Successful login for " + kerberosUser.getPrincipal()); - } catch (LoginException e) { - throw new Exception("Login failed due to: " + e.getMessage(), e); - } - } - - // check if we need to re-login, will only happen if re-login window is reached (80% of TGT life) - try { - kerberosUser.checkTGTAndRelogin(); - } catch (LoginException e) { - throw new Exception("Relogin check failed due to: " + e.getMessage(), e); - } - - // attempt to execute the action, if an exception is caught attempt to logout/login and retry - try { - result = kerberosUser.doAs(action); - } catch (SecurityException se) { - logger.info("Privileged action failed, attempting relogin and retrying..."); - logger.debug("", se); - - try { - kerberosUser.logout(); - kerberosUser.login(); - result = kerberosUser.doAs(action); - } catch (Exception e) { - throw new Exception("Retrying privileged action failed due to: " + e.getMessage(), e); - } - } catch (PrivilegedActionException pae) { - final Exception cause = pae.getException(); - throw new Exception("Privileged action failed due to: " + cause.getMessage(), cause); - } - - return result; - } -} diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosJAASConfigUser.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosJAASConfigUser.java deleted file mode 100644 index 5b0ff718e1..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosJAASConfigUser.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.utils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; -import javax.security.auth.login.LoginContext; -import javax.security.auth.login.LoginException; - -/** - * Used to authenticate and execute actions when Kerberos is enabled and a keytab is being used. - * - * */ -public class KerberosJAASConfigUser extends AbstractKerberosUser { - private static final Logger LOG = LoggerFactory.getLogger(KerberosJAASConfigUser.class); - - private final String configName; - private final Configuration config; - - public KerberosJAASConfigUser(final String configName, final Configuration config) { - this.configName = configName; - this.config = config; - } - - - @Override - public String getPrincipal() { - String ret = null; - AppConfigurationEntry[] entries = config.getAppConfigurationEntry(configName); - - if (entries != null) { - for (AppConfigurationEntry entry : entries) { - if (entry.getOptions().containsKey(InMemoryJAASConfiguration.JAAS_PRINCIPAL_PROP)) { - ret = (String) entry.getOptions().get(InMemoryJAASConfiguration.JAAS_PRINCIPAL_PROP); - - break; - } - } - } - - return ret; - } - - @Override - protected LoginContext createLoginContext(Subject subject) throws LoginException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> KerberosJAASConfigUser.createLoginContext()"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== KerberosJAASConfigUser.createLoginContext(), Subject: " + subject); - } - - return new LoginContext(configName, subject, null, config); - } -} - diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosUser.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosUser.java deleted file mode 100644 index 8e6a95bc8f..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/KerberosUser.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.atlas.audit.utils; - -import javax.security.auth.login.LoginException; -import java.security.PrivilegedAction; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; - -/** - * A keytab-based user that can login/logout and perform actions as the given user. - */ -public interface KerberosUser { - - /** - * Performs a login for the given user. - * - * @throws LoginException if the login fails - */ - void login() throws LoginException; - - /** - * Performs a logout for the given user. - * - * @throws LoginException if the logout fails - */ - void logout() throws LoginException; - - /** - * Executes the given action as the given user. - * - * @param action the action to execute - * @param the type of response - * @return the result of the action - * @throws IllegalStateException if attempting to execute an action before performing a login - */ - T doAs(PrivilegedAction action) throws IllegalStateException; - - /** - * Executes the given action as the given user. - * - * @param action the action to execute - * @param the type of response - * @return the result of the action - * @throws IllegalStateException if attempting to execute an action before performing a login - * @throws PrivilegedActionException if the action itself threw an exception - */ - T doAs(PrivilegedExceptionAction action) - throws IllegalStateException, PrivilegedActionException; - - /** - * Performs a re-login if the TGT is close to expiration. - * - * @return true if a relogin was performed, false otherwise - * @throws LoginException if the relogin fails - */ - boolean checkTGTAndRelogin() throws LoginException; - - /** - * @return true if this user is currently logged in, false otherwise - */ - boolean isLoggedIn(); - - /** - * @return the principal for this user - */ - String getPrincipal(); - -} - diff --git a/auth-audits/src/main/java/org/apache/atlas/audit/utils/RollingTimeUtil.java b/auth-audits/src/main/java/org/apache/atlas/audit/utils/RollingTimeUtil.java deleted file mode 100644 index 505591e9f9..0000000000 --- a/auth-audits/src/main/java/org/apache/atlas/audit/utils/RollingTimeUtil.java +++ /dev/null @@ -1,269 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.audit.utils; - -import org.apache.commons.lang.StringUtils; - -import java.util.Calendar; -import java.util.Date; - -public class RollingTimeUtil { - public static final String MINUTES ="m"; //minutes - public static final String HOURS ="h"; //hours - public static final String DAYS ="d"; //days - public static final String WEEKS ="w"; //weeks - public static final String MONTHS ="M"; //months - public static final String YEARS ="y"; //years - - private static volatile RollingTimeUtil me = null; - - public static RollingTimeUtil getInstance() { - RollingTimeUtil result = me; - if ( result == null) { - synchronized(RollingTimeUtil.class) { - result = me; - if ( result == null){ - me = result = new RollingTimeUtil(); - } - } - } - return result; - } - - public RollingTimeUtil() { - } - - public Date computeNextRollingTime(String rollingTimePeriod) throws Exception{ - Date ret = null; - - if (!StringUtils.isEmpty(rollingTimePeriod)) { - String computePeriod = getTimeLiteral(rollingTimePeriod); - int timeNumeral = getTimeNumeral(rollingTimePeriod,computePeriod); - switch(computePeriod) { - case MINUTES: - ret = computeTopOfMinuteDate(timeNumeral); - break; - case HOURS: - ret = computeTopOfHourDate(timeNumeral); - break; - case DAYS: - ret = computeTopOfDayDate(timeNumeral); - break; - case WEEKS: - ret = computeTopOfWeekDate(timeNumeral); - break; - case MONTHS: - ret = computeTopofMonthDate(timeNumeral); - break; - case YEARS: - ret = computeTopOfYearDate(timeNumeral); - break; - } - } else { - throw new Exception("Unable to compute Next Rolling using the given Rollover period"); - } - return ret; - } - - public String convertRolloverSecondsToRolloverPeriod(long duration) { - final int SECONDS_IN_MINUTE = 60; - final int SECONDS_IN_HOUR = 60 * SECONDS_IN_MINUTE; - final int SECONDS_IN_DAY = 24 * SECONDS_IN_HOUR; - - String ret = null; - int days = (int) (duration / SECONDS_IN_DAY); - duration %= SECONDS_IN_DAY; - int hours = (int) (duration / SECONDS_IN_HOUR); - duration %= SECONDS_IN_HOUR; - int minutes = (int) (duration / SECONDS_IN_MINUTE); - - if(days != 0) { - if(hours == 0 && minutes == 0) { - ret = (days + DAYS); - } - } else if(hours != 0) { - if(minutes == 0) { - ret = (hours + HOURS); - } - } else if(minutes != 0) { - ret = (minutes + MINUTES); - } - return ret; - } - - public long computeNextRollingTime(long durationSeconds, Date previousRolloverTime) { - long now = System.currentTimeMillis(); - long nextRolloverTime = (previousRolloverTime == null) ? now : previousRolloverTime.getTime(); - long durationMillis = (durationSeconds < 1 ? 1 : durationSeconds) * 1000; - - while( nextRolloverTime <= now ) { - nextRolloverTime += durationMillis; - } - - return nextRolloverTime; - } - - private Date computeTopOfYearDate( int years){ - Date ret = null; - - Calendar calendarStart=Calendar.getInstance(); - calendarStart.add(Calendar.YEAR,years); - calendarStart.set(Calendar.MONTH,0); - calendarStart.set(Calendar.DAY_OF_MONTH,1); - calendarStart.set(Calendar.HOUR_OF_DAY,0); - calendarStart.clear(Calendar.MINUTE); - calendarStart.clear(Calendar.SECOND); - calendarStart.clear(Calendar.MILLISECOND); - - ret = calendarStart.getTime(); - - return ret; - } - - private Date computeTopofMonthDate(int months){ - - Date ret = null; - - Calendar calendarMonth=Calendar.getInstance(); - calendarMonth.set(Calendar.DAY_OF_MONTH,1); - calendarMonth.add(Calendar.MONTH, months); - calendarMonth.set(Calendar.HOUR_OF_DAY, 0); - calendarMonth.clear(Calendar.MINUTE); - calendarMonth.clear(Calendar.SECOND); - calendarMonth.clear(Calendar.MILLISECOND); - - ret = calendarMonth.getTime(); - - return ret; - } - - private Date computeTopOfWeekDate(int weeks) { - Date ret = null; - - Calendar calendarWeek=Calendar.getInstance(); - calendarWeek.set(Calendar.DAY_OF_WEEK,calendarWeek.getFirstDayOfWeek()); - calendarWeek.add(Calendar.WEEK_OF_YEAR,weeks); - calendarWeek.set(Calendar.HOUR_OF_DAY,0); - calendarWeek.clear(Calendar.MINUTE); - calendarWeek.clear(Calendar.SECOND); - calendarWeek.clear(Calendar.MILLISECOND); - - ret=calendarWeek.getTime(); - - return ret; - } - - private Date computeTopOfDayDate(int days){ - - Date ret = null; - - Calendar calendarDay=Calendar.getInstance(); - calendarDay.add(Calendar.DAY_OF_MONTH, days); - calendarDay.set(Calendar.HOUR_OF_DAY, 0); - calendarDay.clear(Calendar.MINUTE); - calendarDay.clear(Calendar.SECOND); - calendarDay.clear(Calendar.MILLISECOND); - - ret = calendarDay.getTime(); - - return ret; - - } - - private Date computeTopOfHourDate(int hours) { - Date ret = null; - - Calendar calendarHour=Calendar.getInstance(); - calendarHour.add(Calendar.HOUR_OF_DAY, hours); - calendarHour.clear(Calendar.MINUTE); - calendarHour.clear(Calendar.SECOND); - calendarHour.clear(Calendar.MILLISECOND); - - ret = calendarHour.getTime(); - - return ret; - } - - private Date computeTopOfMinuteDate(int mins) { - Date ret = null; - - Calendar calendarMin=Calendar.getInstance(); - calendarMin.add(Calendar.MINUTE,mins); - calendarMin.clear(Calendar.SECOND); - calendarMin.clear(Calendar.MILLISECOND); - - ret = calendarMin.getTime(); - - return ret; - } - - private int getTimeNumeral(String rollOverPeriod, String timeLiteral) throws Exception { - - int ret = Integer.valueOf(rollOverPeriod.substring(0, rollOverPeriod.length() - (rollOverPeriod.length() - rollOverPeriod.indexOf(timeLiteral)))); - - return ret; - } - - private String getTimeLiteral(String rollOverPeriod) throws Exception { - String ret = null; - if(StringUtils.isEmpty(rollOverPeriod)) { - throw new Exception("empty rollover period"); - } else if(rollOverPeriod.endsWith(MINUTES)) { - ret = MINUTES; - } else if(rollOverPeriod.endsWith(HOURS)) { - ret = HOURS; - } else if(rollOverPeriod.endsWith(DAYS)) { - ret = DAYS; - } else if(rollOverPeriod.endsWith(WEEKS)) { - ret = WEEKS; - } else if(rollOverPeriod.endsWith(MONTHS)) { - ret = MONTHS; - } else if(rollOverPeriod.endsWith(YEARS)) { - ret = YEARS; - } else { - throw new Exception(rollOverPeriod + ": invalid rollover period"); - } - return ret; - } - - public static void main(String[] args) { - // Test Method for RolloverTime calculation - // Set rollOverPeriod 10m,30m..,1h,2h,..1d,2d..,1w,2w..,1M,2M..1y..2y - // If nothing is set for rollOverPeriod or Duration default rollOverPeriod is 1 day - String rollOverPeriod = ""; - RollingTimeUtil rollingTimeUtil = new RollingTimeUtil(); - int duration = 86400; - Date nextRollOvertime = null; - - try { - nextRollOvertime = rollingTimeUtil.computeNextRollingTime(rollOverPeriod); - } catch (Exception e) { - rollOverPeriod = rollingTimeUtil.convertRolloverSecondsToRolloverPeriod(duration); - System.out.println(rollOverPeriod); - try { - nextRollOvertime = rollingTimeUtil.computeNextRollingTime(rollOverPeriod); - System.out.println(nextRollOvertime); - } catch (Exception e1) { - e1.printStackTrace(); - } - long rollOverTime = rollingTimeUtil.computeNextRollingTime(duration, null); - nextRollOvertime = new Date(rollOverTime); - } - } -} diff --git a/auth-plugin-atlas/pom.xml b/auth-plugin-atlas/pom.xml index 01386ce3bf..ba7f98e5aa 100644 --- a/auth-plugin-atlas/pom.xml +++ b/auth-plugin-atlas/pom.xml @@ -29,8 +29,8 @@ auth-plugin-atlas - 8 - 8 + 17 + 17 diff --git a/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizer.java b/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizer.java index dd645b473b..2019c68c6f 100644 --- a/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizer.java +++ b/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizer.java @@ -63,9 +63,7 @@ import java.util.Set; import static org.apache.atlas.authorization.atlas.authorizer.RangerAtlasAuthorizerUtil.*; -import static org.apache.atlas.authorize.AtlasAuthorizationUtils.getCurrentUserGroups; -import static org.apache.atlas.authorize.AtlasAuthorizationUtils.getCurrentUserName; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.*; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.*; public class RangerAtlasAuthorizer implements AtlasAuthorizer { diff --git a/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizerUtil.java b/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizerUtil.java index c78e986581..20d2bae20b 100644 --- a/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizerUtil.java +++ b/auth-plugin-atlas/src/main/java/org/apache/atlas/authorization/atlas/authorizer/RangerAtlasAuthorizerUtil.java @@ -33,12 +33,12 @@ import java.util.Set; import static org.apache.atlas.authorization.atlas.authorizer.RangerAtlasAuthorizer.CLASSIFICATION_PRIVILEGES; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_CLASSIFICATION; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_ENTITY_BUSINESS_METADATA; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_ENTITY_ID; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_ENTITY_LABEL; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_ENTITY_OWNER; -import static org.apache.atlas.services.atlas.RangerServiceAtlas.RESOURCE_ENTITY_TYPE; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_CLASSIFICATION; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_ENTITY_BUSINESS_METADATA; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_ENTITY_ID; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_ENTITY_LABEL; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_ENTITY_OWNER; +import static org.apache.atlas.authorization.utils.RangerAtlasConstants.RESOURCE_ENTITY_TYPE; public class RangerAtlasAuthorizerUtil { diff --git a/auth-plugin-atlas/src/main/java/org/apache/atlas/services/atlas/RangerServiceAtlas.java b/auth-plugin-atlas/src/main/java/org/apache/atlas/services/atlas/RangerServiceAtlas.java deleted file mode 100644 index cf12333b9a..0000000000 --- a/auth-plugin-atlas/src/main/java/org/apache/atlas/services/atlas/RangerServiceAtlas.java +++ /dev/null @@ -1,693 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.atlas.services.atlas; - -import com.google.gson.Gson; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.model.discovery.AtlasSearchResult; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.io.IOCase; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.atlas.plugin.client.BaseClient; -import org.apache.atlas.plugin.client.HadoopException; -import org.apache.atlas.plugin.model.RangerPolicy; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItem; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyItemAccess; -import org.apache.atlas.plugin.model.RangerPolicy.RangerPolicyResource; -import org.apache.atlas.plugin.model.RangerService; -import org.apache.atlas.plugin.model.RangerServiceDef; -import org.apache.atlas.plugin.policyengine.RangerPolicyEngine; -import org.apache.atlas.plugin.service.RangerBaseService; -import org.apache.atlas.plugin.service.ResourceLookupContext; -import org.apache.atlas.plugin.util.PasswordUtils; - -import javax.security.auth.Subject; -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.NewCookie; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class RangerServiceAtlas extends RangerBaseService { - private static final Log LOG = LogFactory.getLog(RangerServiceAtlas.class); - - public static final String RESOURCE_SERVICE = "atlas-service"; - public static final String RESOURCE_TYPE_CATEGORY = "type-category"; - public static final String RESOURCE_TYPE_NAME = "type"; - public static final String RESOURCE_ENTITY_TYPE = "entity-type"; - public static final String RESOURCE_ENTITY_CLASSIFICATION = "entity-classification"; - public static final String RESOURCE_CLASSIFICATION = "classification"; - public static final String RESOURCE_ENTITY_ID = "entity"; - public static final String RESOURCE_ENTITY_LABEL = "entity-label"; - public static final String RESOURCE_ENTITY_BUSINESS_METADATA = "entity-business-metadata"; - public static final String RESOURCE_ENTITY_OWNER = "owner"; - public static final String RESOURCE_RELATIONSHIP_TYPE = "relationship-type"; - public static final String RESOURCE_END_ONE_ENTITY_TYPE = "end-one-entity-type"; - public static final String RESOURCE_END_ONE_ENTITY_CLASSIFICATION = "end-one-entity-classification"; - public static final String RESOURCE_END_ONE_ENTITY_ID = "end-one-entity"; - public static final String RESOURCE_END_TWO_ENTITY_TYPE = "end-two-entity-type"; - public static final String RESOURCE_END_TWO_ENTITY_CLASSIFICATION = "end-two-entity-classification"; - public static final String RESOURCE_END_TWO_ENTITY_ID = "end-two-entity"; - public static final String SEARCH_FEATURE_POLICY_NAME = "Allow users to manage favorite searches"; - - public static final String ACCESS_TYPE_ENTITY_READ = "entity-read"; - public static final String ACCESS_TYPE_TYPE_READ = "type-read"; - public static final String ACCESS_TYPE_ENTITY_CREATE = "entity-create"; - public static final String ACCESS_TYPE_ENTITY_UPDATE = "entity-update"; - public static final String ACCESS_TYPE_ENTITY_DELETE = "entity-delete"; - public static final String ADMIN_USERNAME_DEFAULT = "admin"; - public static final String TAGSYNC_USERNAME_DEFAULT = "rangertagsync"; - public static final String ENTITY_TYPE_USER_PROFILE = "__AtlasUserProfile"; - public static final String ENTITY_TYPE_SAVED_SEARCH = "__AtlasUserSavedSearch"; - public static final String ENTITY_ID_USER_PROFILE = RangerPolicyEngine.USER_CURRENT; - public static final String ENTITY_ID_USER_SAVED_SEARCH= RangerPolicyEngine.USER_CURRENT + ":*"; - - - public static final String CONFIG_REST_ADDRESS = "atlas.rest.address"; - public static final String CONFIG_USERNAME = "username"; - public static final String CONFIG_PASSWORD = "password"; - public static final String ENTITY_NOT_CLASSIFIED = "_NOT_CLASSIFIED"; - - private static final String TYPE_ENTITY = "entity"; - private static final String TYPE_CLASSIFICATION = "classification"; - private static final String TYPE_STRUCT = "struct"; - private static final String TYPE_ENUM = "enum"; - private static final String TYPE_RELATIONSHIP = "relationship"; - private static final String TYPE_BUSINESS_METADATA = "business_metadata"; - - private static final String URL_LOGIN = "/j_spring_security_check"; - private static final String URL_GET_TYPESDEF_HEADERS = "/api/atlas/v2/types/typedefs/headers"; - private static final String URl_ENTITY_SEARCH = "v2/search/attribute?attrName=qualifiedName"; - - private static final String WEB_RESOURCE_CONTENT_TYPE = "application/x-www-form-urlencoded"; - private static final String CONNECTION_ERROR_MSG = " You can still save the repository and start creating" - + " policies, but you would not be able to use autocomplete for" - + " resource names. Check ranger_admin.log for more info."; - - public RangerServiceAtlas() { - super(); - } - - @Override - public void init(RangerServiceDef serviceDef, RangerService service) { - super.init(serviceDef, service); - } - - @Override - public Map validateConfig() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceAtlas.validateConfig()"); - } - - AtlasServiceClient client = new AtlasServiceClient(getServiceName(), configs); - Map ret = client.validateConfig(); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceAtlas.validateConfig(): " + ret ); - } - - return ret; - } - - @Override - public List lookupResource(ResourceLookupContext context)throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceAtlas.lookupResource(" + context + ")"); - } - - AtlasServiceClient client = new AtlasServiceClient(getServiceName(), configs); - List ret = client.lookupResource(context); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceAtlas.lookupResource("+ context + "): " + ret); - } - - return ret; - } - - @Override - public List getDefaultRangerPolicies() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceAtlas.getDefaultRangerPolicies()"); - } - - List ret = super.getDefaultRangerPolicies(); - String adminUser = getStringConfig("atlas.admin.user", ADMIN_USERNAME_DEFAULT); - String tagSyncUser = getStringConfig("atlas.rangertagsync.user", TAGSYNC_USERNAME_DEFAULT); - - boolean relationshipTypeAllowPublic = getBooleanConfig("atlas.default-policy.relationship-type.allow.public", true); - - - for (RangerPolicy defaultPolicy : ret) { - final Map policyResources = defaultPolicy.getResources(); - - // 1. add adminUser to every policyItem - for (RangerPolicyItem defaultPolicyItem : defaultPolicy.getPolicyItems()) { - defaultPolicyItem.getUsers().add(adminUser); - } - - // 2. add a policy-item for rangertagsync user with 'entity-read' permission in the policy for 'entity-type' - if (policyResources.containsKey(RESOURCE_ENTITY_TYPE) && !policyResources.containsKey(RESOURCE_CLASSIFICATION)) { - RangerPolicyItem policyItemForTagSyncUser = new RangerPolicyItem(); - - policyItemForTagSyncUser.setUsers(Collections.singletonList(tagSyncUser)); - policyItemForTagSyncUser.setGroups(Collections.singletonList(RangerPolicyEngine.GROUP_PUBLIC)); - policyItemForTagSyncUser.setAccesses(Collections.singletonList(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_READ))); - - defaultPolicy.getPolicyItems().add(policyItemForTagSyncUser); - } - - if (relationshipTypeAllowPublic) { - // 3. add 'public' group in the policy for 'relationship-type', - if (policyResources.containsKey(RangerServiceAtlas.RESOURCE_RELATIONSHIP_TYPE)) { - for (RangerPolicyItem defaultPolicyItem : defaultPolicy.getPolicyItems()) { - defaultPolicyItem.getGroups().add(RangerPolicyEngine.GROUP_PUBLIC); - } - } - } - - if (defaultPolicy.getName().contains("all") - && policyResources.containsKey(RangerServiceAtlas.RESOURCE_ENTITY_TYPE) - && StringUtils.isNotBlank(lookUpUser) && !policyResources.containsKey(RESOURCE_CLASSIFICATION)) { - RangerPolicyItem policyItemForLookupUser = new RangerPolicyItem(); - policyItemForLookupUser.setUsers(Collections.singletonList(lookUpUser)); - policyItemForLookupUser.setAccesses(Collections.singletonList(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_READ))); - policyItemForLookupUser.setDelegateAdmin(false); - defaultPolicy.getPolicyItems().add(policyItemForLookupUser); - } - - // add a policy-item for rangertagsync user with 'type-read' permission in the policy for 'type-category' - if (policyResources.containsKey(RangerServiceAtlas.RESOURCE_TYPE_CATEGORY)) { - RangerPolicyItem policyItemTypeReadForAll = new RangerPolicyItem(); - policyItemTypeReadForAll.setGroups(Collections.singletonList(RangerPolicyEngine.GROUP_PUBLIC)); - policyItemTypeReadForAll.setAccesses(Collections.singletonList(new RangerPolicyItemAccess(ACCESS_TYPE_TYPE_READ))); - defaultPolicy.getPolicyItems().add(policyItemTypeReadForAll); - } - } - - //4.add new policy for public group with entity-read, entity-create, entity-update, entity-delete for __AtlasUserProfile, __AtlasUserSavedSearch entity type - RangerPolicy searchFeaturePolicy = getSearchFeaturePolicy(); - ret.add(searchFeaturePolicy); - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceAtlas.getDefaultRangerPolicies()"); - } - - return ret; - } - - private RangerPolicy getSearchFeaturePolicy() { - RangerPolicy searchFeaturePolicy = new RangerPolicy(); - - searchFeaturePolicy.setName(SEARCH_FEATURE_POLICY_NAME); - searchFeaturePolicy.setService(serviceName); - searchFeaturePolicy.setResources(getSearchFeaturePolicyResource()); - searchFeaturePolicy.setPolicyItems(getSearchFeaturePolicyItem()); - - return searchFeaturePolicy; - } - - private List getSearchFeaturePolicyItem() { - List accesses = new ArrayList(); - - accesses.add(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_READ)); - accesses.add(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_CREATE)); - accesses.add(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_UPDATE)); - accesses.add(new RangerPolicyItemAccess(ACCESS_TYPE_ENTITY_DELETE)); - - RangerPolicyItem item = new RangerPolicyItem(accesses, Arrays.asList(RangerPolicyEngine.USER_CURRENT), null, null, null, false); - - return Collections.singletonList(item); - } - - private Map getSearchFeaturePolicyResource() { - Map resources = new HashMap<>(); - - resources.put(RESOURCE_ENTITY_TYPE, new RangerPolicyResource(Arrays.asList(ENTITY_TYPE_USER_PROFILE, ENTITY_TYPE_SAVED_SEARCH), false, false)); - resources.put(RESOURCE_ENTITY_CLASSIFICATION, new RangerPolicyResource("*")); - resources.put(RESOURCE_ENTITY_ID, new RangerPolicyResource(Arrays.asList(ENTITY_ID_USER_PROFILE, ENTITY_ID_USER_SAVED_SEARCH), false, false)); - - return resources; - } - - private static class AtlasServiceClient extends BaseClient { - private static final String[] TYPE_CATEGORIES = new String[] { "classification", "enum", "entity", "relationship", "struct" ,"business_metadata" }; - - Map> typesDef = new HashMap<>(); - - public AtlasServiceClient(String serviceName, Map serviceConfig) { - super(serviceName, serviceConfig); - } - - public Map validateConfig() { - Map ret = new HashMap<>(); - - loginToAtlas(Client.create()); - - BaseClient.generateResponseDataMap(true, "ConnectionTest Successful", "ConnectionTest Successful", null, null, ret); - - return ret; - } - - public List lookupResource(ResourceLookupContext lookupContext) { - final List ret = new ArrayList<>(); - final String userInput = lookupContext.getUserInput(); - final List currentValues = lookupContext.getResources().get(lookupContext.getResourceName()); - - switch(lookupContext.getResourceName()) { - case RESOURCE_TYPE_CATEGORY: { - for (String typeCategory : TYPE_CATEGORIES) { - addIfStartsWithAndNotExcluded(ret, typeCategory, userInput, currentValues); - } - } - break; - - case RESOURCE_TYPE_NAME: { - refreshTypesDefs(); - - final List typeCategories = lookupContext.getResources().get(RESOURCE_TYPE_CATEGORY); - - if (emptyOrContainsMatch(typeCategories, TYPE_CLASSIFICATION)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_CLASSIFICATION), userInput, currentValues); - } - - if (emptyOrContainsMatch(typeCategories, TYPE_ENTITY)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_ENTITY), userInput, currentValues); - } - - if (emptyOrContainsMatch(typeCategories, TYPE_ENUM)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_ENUM), userInput, currentValues); - } - - if (emptyOrContainsMatch(typeCategories, TYPE_STRUCT)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_STRUCT), userInput, currentValues); - } - - if (emptyOrContainsMatch(typeCategories, TYPE_RELATIONSHIP)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_RELATIONSHIP), userInput, currentValues); - } - - if (emptyOrContainsMatch(typeCategories, TYPE_BUSINESS_METADATA)) { - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_BUSINESS_METADATA), userInput, currentValues); - } - } - break; - - case RESOURCE_END_ONE_ENTITY_TYPE: - case RESOURCE_END_TWO_ENTITY_TYPE: - case RESOURCE_ENTITY_TYPE: { - refreshTypesDefs(); - - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_ENTITY), userInput, currentValues); - } - break; - - case RESOURCE_END_ONE_ENTITY_CLASSIFICATION: - case RESOURCE_END_TWO_ENTITY_CLASSIFICATION: - case RESOURCE_ENTITY_CLASSIFICATION: { - refreshTypesDefs(); - - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_CLASSIFICATION), userInput, currentValues); - } - break; - - case RESOURCE_ENTITY_ID: { - List searchTypes = lookupContext.getResources().get("entity-type"); - - if (searchTypes != null && searchTypes.size() == 1) { - List values = searchEntities(userInput, searchTypes.get(0)); - - addIfStartsWithAndNotExcluded(ret, values, userInput, currentValues); - } - } - break; - - case RESOURCE_RELATIONSHIP_TYPE: { - refreshTypesDefs(); - addIfStartsWithAndNotExcluded(ret, typesDef.get(TYPE_RELATIONSHIP), userInput, currentValues); - - } - break; - - case RESOURCE_END_ONE_ENTITY_ID: { - - List searchTypes = lookupContext.getResources().get(RESOURCE_END_ONE_ENTITY_TYPE); - - if (searchTypes != null && searchTypes.size() == 1) { - List values = searchEntities(userInput, searchTypes.get(0)); - - addIfStartsWithAndNotExcluded(ret, values, userInput, currentValues); - } - - } - break; - - case RESOURCE_END_TWO_ENTITY_ID: { - List searchTypes = lookupContext.getResources().get(RESOURCE_END_TWO_ENTITY_TYPE); - - if (searchTypes != null && searchTypes.size() == 1) { - List values = searchEntities(userInput, searchTypes.get(0)); - - addIfStartsWithAndNotExcluded(ret, values, userInput, currentValues); - } - } - break; - - default: { - ret.add(lookupContext.getResourceName()); - } - } - - return ret; - } - - private ClientResponse loginToAtlas(Client client) { - ClientResponse ret = null; - HadoopException excp = null; - String loginUrl = null; - - for (String atlasUrl : getAtlasUrls()) { - try { - loginUrl = atlasUrl + URL_LOGIN; - - WebResource webResource = client.resource(loginUrl); - MultivaluedMap formData = new MultivaluedMapImpl(); - String password = null; - - try { - password = PasswordUtils.decryptPassword(getPassword()); - } catch (Exception ex) { - LOG.info("Password decryption failed; trying Atlas connection with received password string"); - } - - if (password == null) { - password = getPassword(); - } - - formData.add("j_username", getUserName()); - formData.add("j_password", password); - - try { - ret = webResource.type(WEB_RESOURCE_CONTENT_TYPE).post(ClientResponse.class, formData); - } catch (Exception e) { - LOG.error("failed to login to Atlas at " + loginUrl, e); - } - - if (ret != null) { - break; - } - } catch (Throwable t) { - String msgDesc = "Exception while login to Atlas at : " + loginUrl; - - LOG.error(msgDesc, t); - - excp = new HadoopException(msgDesc, t); - - excp.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + CONNECTION_ERROR_MSG, null, null); - } - } - - if (ret == null) { - if (excp == null) { - String msgDesc = "Exception while login to Atlas at : " + loginUrl; - - excp = new HadoopException(msgDesc); - - excp.generateResponseDataMap(false, "", msgDesc + CONNECTION_ERROR_MSG, null, null); - } - - throw excp; - } - - return ret; - } - - private boolean refreshTypesDefs() { - boolean ret = false; - Subject subj = getLoginSubject(); - - if (subj == null) { - return ret; - } - - Map> typesDef = Subject.doAs(subj, new PrivilegedAction>>() { - @Override - public Map> run() { - Map> ret = null; - - for (String atlasUrl : getAtlasUrls()) { - Client client = null; - - try { - client = Client.create(); - - ClientResponse loginResponse = loginToAtlas(client); - WebResource webResource = client.resource(atlasUrl + URL_GET_TYPESDEF_HEADERS); - WebResource.Builder builder = webResource.getRequestBuilder(); - - for (NewCookie cook : loginResponse.getCookies()) { - builder = builder.cookie(cook); - } - - ClientResponse response = builder.get(ClientResponse.class); - - if (response != null) { - String jsonString = response.getEntity(String.class); - Gson gson = new Gson(); - List types = gson.fromJson(jsonString, List.class); - - ret = new HashMap<>(); - - for (Object type : types) { - if (type instanceof Map) { - Map typeDef = (Map) type; - Object name = typeDef.get("name"); - Object category = typeDef.get("category"); - - if (name != null && category != null) { - String strCategory = category.toString().toLowerCase(); - List categoryList = ret.get(strCategory); - - if (categoryList == null) { - categoryList = new ArrayList<>(); - - ret.put(strCategory, categoryList); - } - - categoryList.add(name.toString()); - } - } - } - - break; - } - } catch (Throwable t) { - String msgDesc = "Exception while getting Atlas Resource List."; - - LOG.error(msgDesc, t); - } finally { - if (client != null) { - client.destroy(); - } - } - } - - return ret; - } - }); - - if (typesDef != null) { - this.typesDef = typesDef; - - ret = true; - } - - return ret; - } - - private List searchEntities(String userInput, String entityType) { - if( LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceAtlas.searchEntities(userInput=" + userInput + ", entityType=" + entityType + ")"); - } - - Subject subj = getLoginSubject(); - - if (subj == null) { - return null; - } - - List list = Subject.doAs(subj, new PrivilegedAction>() { - @Override - public List run() { - List ret = null; - - for (String atlasUrl : getAtlasUrls()) { - Client client = null; - - try { - client = Client.create(); - - ClientResponse loginResponse = loginToAtlas(client); - String entitySearcApiUrl = atlasUrl + "/api/atlas/" + URl_ENTITY_SEARCH; - StringBuilder searchUrl = new StringBuilder(); - - searchUrl.append(entitySearcApiUrl) - .append("&typeName=") - .append(entityType) - .append("&attrValuePrefix=" + userInput + "&limit=25"); - - - WebResource webResource = client.resource(searchUrl.toString()); - WebResource.Builder builder = webResource.getRequestBuilder(); - - for (NewCookie cook : loginResponse.getCookies()) { - builder = builder.cookie(cook); - } - - ClientResponse response = builder.get(ClientResponse.class); - - if (response != null) { - String jsonString = response.getEntity(String.class); - Gson gson = new Gson(); - AtlasSearchResult searchResult = gson.fromJson(jsonString, AtlasSearchResult.class); - - ret = new ArrayList<>(); - - if (searchResult != null) { - List entityHeaderList = searchResult.getEntities(); - - for (AtlasEntityHeader entity : entityHeaderList) { - ret.add((String) entity.getAttribute("qualifiedName")); - } - } - } - } catch (Throwable t) { - String msgDesc = "Exception while getting Atlas Entity Resource List."; - - LOG.error(msgDesc, t); - } finally { - if (client != null) { - client.destroy(); - } - } - } - - return ret; - } - }); - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceAtlas.searchEntities(userInput=" + userInput + ", entityType=" + entityType + "): " + list); - } - - return list; - } - - String[] getAtlasUrls() { - String urlString = connectionProperties.get(CONFIG_REST_ADDRESS); - String[] ret = urlString == null ? new String[0] : urlString.split(","); - - // remove separator at the end - for (int i = 0; i < ret.length; i++) { - String url = ret[i]; - - while (url.length() > 0 && url.charAt(url.length() - 1) == '/') { - url = url.substring(0, url.length() - 1); - } - - ret[i] = url; - } - - return ret; - } - - String getUserName() { - return connectionProperties.get(CONFIG_USERNAME); - } - - String getPassword() { - return connectionProperties.get(CONFIG_PASSWORD); - } - - boolean emptyOrContainsMatch(List list, String value) { - if (list == null || list.isEmpty()) { - return true; - } - - for (String item : list) { - if (StringUtils.equalsIgnoreCase(item, value) || FilenameUtils.wildcardMatch(value, item, IOCase.INSENSITIVE)) { - return true; - } - } - - return false; - } - - void addIfStartsWithAndNotExcluded(List list, List values, String prefix, List excludeList) { - if (list == null) { - return; - } - - if (values == null) { - addIfStartsWithAndNotExcluded(list, ENTITY_NOT_CLASSIFIED, prefix, excludeList); - } else { - for (String value : values) { - addIfStartsWithAndNotExcluded(list, value, prefix, excludeList); - } - } - } - - void addIfStartsWithAndNotExcluded(List list, String value, String prefix, List excludeList) { - if (value == null || list == null) { - return; - } - - if (prefix != null && !value.startsWith(prefix)) { - return; - } - - if (excludeList != null && excludeList.contains(value)) { - return; - } - - list.add(value); - } - } - - String getStringConfig(String configName, String defaultValue) { - String val = service.getConfigs().get(configName); - - return StringUtils.isBlank(val) ? defaultValue : val; - } - - boolean getBooleanConfig(String configName, boolean defaultValue) { - String val = service.getConfigs().get(configName); - - return StringUtils.isBlank(val) ? defaultValue : Boolean.parseBoolean(val); - } -} diff --git a/build.sh b/build.sh index d60c540769..b72534a720 100755 --- a/build.sh +++ b/build.sh @@ -16,6 +16,9 @@ # limitations under the License. # +java -version + + mkdir -p ~/.m2/repository/org/keycloak wget https://atlan-public.s3.eu-west-1.amazonaws.com/artifact/keycloak-15.0.2.1.zip @@ -24,9 +27,9 @@ unzip -o keycloak-15.0.2.1.zip -d ~/.m2/repository/org echo "Maven Building" if [ "$1" == "build_without_dashboard" ]; then - mvn -pl '!test-tools,!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge,!dashboardv2,!dashboardv3' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipOverlay -DskipEnunciate=true package -Pdist + mvn -pl '!test-tools,!dashboardv2,!dashboardv3' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipOverlay -DskipEnunciate=true package -Pdist else - mvn -pl '!test-tools,!addons/hdfs-model,!addons/hive-bridge,!addons/hive-bridge-shim,!addons/falcon-bridge-shim,!addons/falcon-bridge,!addons/sqoop-bridge,!addons/sqoop-bridge-shim,!addons/hbase-bridge,!addons/hbase-bridge-shim,!addons/hbase-testing-util,!addons/kafka-bridge,!addons/impala-hook-api,!addons/impala-bridge-shim,!addons/impala-bridge' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipEnunciate=true package -Pdist + mvn -pl '!test-tools' -Dmaven.test.skip -DskipTests -Drat.skip=true -DskipEnunciate=true package -Pdist fi echo "[DEBUG listing distro/target" diff --git a/client-auth/pom.xml b/client-auth/pom.xml index 9087a98bed..2016aa577c 100644 --- a/client-auth/pom.xml +++ b/client-auth/pom.xml @@ -30,8 +30,8 @@ client-auth - 8 - 8 + 17 + 17 15.1.0 diff --git a/client-heracles/pom.xml b/client-heracles/pom.xml index b812aefb65..63276fcfd4 100644 --- a/client-heracles/pom.xml +++ b/client-heracles/pom.xml @@ -30,8 +30,8 @@ client-heracles - 8 - 8 + 17 + 17 diff --git a/client/client-v1/pom.xml b/client/client-v1/pom.xml deleted file mode 100644 index 7d017cfa68..0000000000 --- a/client/client-v1/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - atlas-client - org.apache.atlas - 3.0.0-SNAPSHOT - - 4.0.0 - - atlas-client-v1 - - - - org.apache.atlas - atlas-client-common - ${project.version} - - - org.apache.atlas - atlas-common - ${project.version} - - - diff --git a/client/client-v1/src/main/java/org/apache/atlas/AtlasAdminClient.java b/client/client-v1/src/main/java/org/apache/atlas/AtlasAdminClient.java deleted file mode 100644 index d22963d470..0000000000 --- a/client/client-v1/src/main/java/org/apache/atlas/AtlasAdminClient.java +++ /dev/null @@ -1,174 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas; - -import org.apache.atlas.model.metrics.AtlasMetrics; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.configuration.Configuration; - -import java.util.Arrays; - - -/** - * An application that allows users to run admin commands against an Atlas server. - * - * The application uses {@link AtlasClient} to send REST requests to the Atlas server. The details of connections - * and other configuration is specified in the Atlas properties file. - * Exit status of the application will be as follows: - *
  • 0: successful execution
  • - *
  • 1: error in options used for the application
  • - *
  • -1/255: application error
  • - */ -public class AtlasAdminClient { - - private static final Option STATUS = new Option("status", false, "Get the status of an atlas instance"); - private static final Option STATS = new Option("stats", false, "Get the metrics of an atlas instance"); - private static final Option CREDENTIALS = new Option("u", true, "Authorized atlas user credentials (:)"); - - private static final Options OPTIONS = new Options(); - - private static final int INVALID_OPTIONS_STATUS = 1; - private static final int PROGRAM_ERROR_STATUS = -1; - - static { - OPTIONS.addOption(STATUS); - OPTIONS.addOption(STATS); - OPTIONS.addOption(CREDENTIALS); - } - - public static void main(String[] args) throws AtlasException, ParseException { - AtlasAdminClient atlasAdminClient = new AtlasAdminClient(); - int result = atlasAdminClient.run(args); - System.exit(result); - } - - private int run(String[] args) throws AtlasException { - CommandLine commandLine = parseCommandLineOptions(args); - Configuration configuration = ApplicationProperties.get(); - String[] atlasServerUri = configuration.getStringArray(AtlasConstants.ATLAS_REST_ADDRESS_KEY); - - if (atlasServerUri == null || atlasServerUri.length == 0) { - atlasServerUri = new String[] { AtlasConstants.DEFAULT_ATLAS_REST_ADDRESS }; - } - - return handleCommand(commandLine, atlasServerUri); - } - - private int handleCommand(CommandLine commandLine, String[] atlasServerUri) throws AtlasException { - AtlasClient atlasClient; - - String[] providedUserPassword = getUserPassword(commandLine); - - int cmdStatus = PROGRAM_ERROR_STATUS; - if (commandLine.hasOption(STATUS.getOpt())) { - atlasClient = initAtlasClient(atlasServerUri, providedUserPassword); // Status is open API, no auth needed - try { - System.out.println(atlasClient.getAdminStatus()); - cmdStatus = 0; - } catch (AtlasServiceException e) { - System.err.println("Could not retrieve status of the server at " + Arrays.toString(atlasServerUri)); - printStandardHttpErrorDetails(e); - } - } else if (commandLine.hasOption(STATS.getOpt())) { - atlasClient = initAtlasClient(atlasServerUri, providedUserPassword); // Stats/metrics is open API, no auth needed - try { - AtlasMetrics atlasMetrics = atlasClient.getAtlasMetrics(); - String json = AtlasType.toJson(atlasMetrics); - System.out.println(json); - cmdStatus = 0; - } catch (AtlasServiceException e) { - System.err.println("Could not retrieve metrics of the server at " + Arrays.toString(atlasServerUri)); - printStandardHttpErrorDetails(e); - } - } else { - System.err.println("Unsupported option. Refer to usage for valid options."); - printUsage(); - } - - return cmdStatus; - } - - private String[] getUserPassword(CommandLine commandLine) { - String[] basicAuthUsernamePassword = null; - - // Parse the provided username password - if (commandLine.hasOption(CREDENTIALS.getOpt())) { - String value = commandLine.getOptionValue(CREDENTIALS.getOpt()); - if (value != null) { - basicAuthUsernamePassword = value.split(":"); - } - } - if (basicAuthUsernamePassword == null || basicAuthUsernamePassword.length != 2) { - System.err.println("Invalid credentials. Format: :"); - } - return basicAuthUsernamePassword; - } - - private AtlasClient initAtlasClient(final String[] atlasServerUri, final String[] providedUserNamePassword) throws AtlasException { - AtlasClient atlasClient; - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - if (providedUserNamePassword == null || providedUserNamePassword.length < 2) { - atlasClient = new AtlasClient(atlasServerUri, AuthenticationUtil.getBasicAuthenticationInput()); - } else { - atlasClient = new AtlasClient(atlasServerUri, providedUserNamePassword); - } - } else { - atlasClient = new AtlasClient(atlasServerUri); - } - return atlasClient; - } - - private void printStandardHttpErrorDetails(AtlasServiceException e) { - System.err.println("Error details: "); - System.err.println("HTTP Status: " + e.getStatus().getStatusCode() + "," - + e.getStatus().getReasonPhrase()); - System.err.println("Exception message: " + e.getMessage()); - } - - private CommandLine parseCommandLineOptions(String[] args) { - if (args.length == 0) { - printUsage(); - } - CommandLineParser parser = new GnuParser(); - CommandLine commandLine = null; - try { - commandLine = parser.parse(OPTIONS, args); - } catch (ParseException e) { - System.err.println("Could not parse command line options. " + e.getMessage()); - printUsage(); - } - return commandLine; - } - - private void printUsage() { - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp("atlas_admin.py", OPTIONS); - System.exit(AtlasAdminClient.INVALID_OPTIONS_STATUS); - } - -} diff --git a/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java b/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java deleted file mode 100644 index 11a8bf8ba9..0000000000 --- a/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java +++ /dev/null @@ -1,938 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.annotations.VisibleForTesting; -import com.sun.jersey.api.client.WebResource; -import org.apache.atlas.model.legacy.EntityResult; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.instance.Struct; -import org.apache.atlas.v1.model.typedef.AttributeDefinition; -import org.apache.atlas.v1.model.typedef.TraitTypeDefinition; -import org.apache.atlas.v1.model.typedef.TypesDef; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.atlas.utils.AtlasJson; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.typesystem.types.DataTypes; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.ws.rs.HttpMethod; -import javax.ws.rs.core.Cookie; -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import java.io.IOException; -import java.util.*; - -/** - * Client for metadata. - */ -@Deprecated -public class AtlasClient extends AtlasBaseClient { - private static final Logger LOG = LoggerFactory.getLogger(AtlasClient.class); - - public static final String TYPE = "type"; - public static final String TYPENAME = "typeName"; - public static final String GUID = "GUID"; - public static final String ENTITIES = "entities"; - public static final String GUID_ASSIGNMENTS = "guidAssignments"; - - public static final String DEFINITION = "definition"; - public static final String ERROR = "error"; - public static final String STACKTRACE = "stackTrace"; - public static final String REQUEST_ID = "requestId"; - public static final String RESULTS = "results"; - public static final String COUNT = "count"; - public static final String ROWS = "rows"; - public static final String DATATYPE = "dataType"; - public static final String STATUS = "Status"; - - public static final String EVENTS = "events"; - public static final String START_KEY = "startKey"; - public static final String NUM_RESULTS = "count"; - - public static final String URI_ENTITY = "entities"; - public static final String URI_ENTITY_AUDIT = "audit"; - public static final String URI_SEARCH = "discovery/search"; - public static final String URI_NAME_LINEAGE = "lineage/hive/table"; - public static final String URI_LINEAGE = "lineage/"; - public static final String URI_TRAITS = "traits"; - public static final String TRAITS = "traits"; - public static final String TRAIT_DEFINITIONS = "traitDefinitions"; - - - public static final String QUERY_TYPE = "queryType"; - public static final String ATTRIBUTE_NAME = "property"; - public static final String ATTRIBUTE_VALUE = "value"; - - public static final String SUPERTYPE = "supertype"; - public static final String NOT_SUPERTYPE = "notsupertype"; - - public static final String ASSET_TYPE = "Asset"; - public static final String NAME = "name"; - public static final String DESCRIPTION = "description"; - public static final String OWNER = "owner"; - public static final String CREATE_TIME = "createTime"; - - public static final String INFRASTRUCTURE_SUPER_TYPE = "Infrastructure"; - public static final String DATA_SET_SUPER_TYPE = "Catalog"; - public static final String PROCESS_SUPER_TYPE = "Process"; - public static final String PROCESS_ATTRIBUTE_INPUTS = "inputs"; - public static final String PROCESS_ATTRIBUTE_OUTPUTS = "outputs"; - - public static final String REFERENCEABLE_SUPER_TYPE = "Referenceable"; - public static final String QUALIFIED_NAME = "qualifiedName"; - public static final String REFERENCEABLE_ATTRIBUTE_NAME = QUALIFIED_NAME; - - public static final String UNKNOWN_STATUS = "Unknown status"; - - /** - * Constructor for AtlasClient with cookie params as header - * @param baseUrl - * @param cookieName - * @param value - * @param path - * @param domain - */ - - public AtlasClient(String[] baseUrl, String cookieName, String value, String path, String domain) { - super(baseUrl, new Cookie(cookieName, value, path, domain)); - } - - /** - * Constructor for AtlasClient with cookie as header - * @param baseUrl - * @param cookie - */ - - public AtlasClient(String[] baseUrl, Cookie cookie) { - super(baseUrl, cookie); - } - - - // New constructor for Basic auth - public AtlasClient(String[] baseUrl, String[] basicAuthUserNamePassword) { - super(baseUrl, basicAuthUserNamePassword); - } - - /** - * Create a new Atlas client. - * @param baseUrls A list of URLs that point to an ensemble of Atlas servers working in - * High Availability mode. The client will automatically determine the - * active instance on startup and also when there is a scenario of - * failover. - */ - public AtlasClient(String... baseUrls) throws AtlasException { - this(getCurrentUGI(), baseUrls); - } - - /** - * Create a new Atlas client. - * @param ugi UserGroupInformation - * @param doAsUser - * @param baseUrls A list of URLs that point to an ensemble of Atlas servers working in - * High Availability mode. The client will automatically determine the - * active instance on startup and also when there is a scenario of - * failover. - */ - public AtlasClient(UserGroupInformation ugi, String doAsUser, String... baseUrls) { - initializeState(baseUrls, ugi, doAsUser); - } - - private AtlasClient(UserGroupInformation ugi, String[] baseUrls) { - this(ugi, ugi.getShortUserName(), baseUrls); - } - - //Used by LocalAtlasClient - protected AtlasClient() { - //Do nothing - } - - @VisibleForTesting - public AtlasClient(Configuration configuration, String[] baseUrl, String[] basicAuthUserNamePassword) { - super(configuration, baseUrl, basicAuthUserNamePassword); - } - - @Override - protected API formatPathParameters(final API api, final String... params) { - return new API(String.format(api.getPath(), params), api.getMethod(), api.getExpectedStatus()); - } - - @VisibleForTesting - public AtlasClient(Configuration configuration, String... baseUrls) throws AtlasException { - initializeState(configuration, baseUrls, getCurrentUGI(), getCurrentUGI().getShortUserName()); - } - - @VisibleForTesting - AtlasClient(WebResource service, Configuration configuration) { - super(service, configuration); - } - - public WebResource getResource() { - return service; - } - - public static class API_V1 extends API { - //Admin operations - public static final API_V1 VERSION = new API_V1(BASE_URI + ADMIN_VERSION, HttpMethod.GET, Response.Status.OK); - public static final API_V1 STATUS = new API_V1(BASE_URI + ADMIN_STATUS, HttpMethod.GET, Response.Status.OK); - - //Type operations - public static final API_V1 CREATE_TYPE = new API_V1(BASE_URI + TYPES, HttpMethod.POST, Response.Status.CREATED); - public static final API_V1 UPDATE_TYPE = new API_V1(BASE_URI + TYPES, HttpMethod.PUT, Response.Status.OK); - public static final API_V1 GET_TYPE = new API_V1(BASE_URI + TYPES, HttpMethod.GET, Response.Status.OK); - public static final API_V1 LIST_TYPES = new API_V1(BASE_URI + TYPES, HttpMethod.GET, Response.Status.OK); - public static final API_V1 LIST_TRAIT_TYPES = new API_V1(BASE_URI + TYPES + "?type=trait", HttpMethod.GET, Response.Status.OK); - - //Entity operations - public static final API_V1 CREATE_ENTITY = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.POST, Response.Status.CREATED); - public static final API_V1 GET_ENTITY = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - public static final API_V1 UPDATE_ENTITY = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.PUT, Response.Status.OK); - public static final API_V1 UPDATE_ENTITY_PARTIAL = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.POST, Response.Status.OK); - public static final API_V1 LIST_ENTITIES = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - public static final API_V1 DELETE_ENTITIES = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.DELETE, Response.Status.OK); - public static final API_V1 DELETE_ENTITY = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.DELETE, Response.Status.OK); - - //audit operation - public static final API_V1 LIST_ENTITY_AUDIT = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - - //Trait operations - public static final API_V1 ADD_TRAITS = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.POST, Response.Status.CREATED); - public static final API_V1 DELETE_TRAITS = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.DELETE, Response.Status.OK); - public static final API_V1 LIST_TRAITS = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - public static final API_V1 GET_ALL_TRAIT_DEFINITIONS = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - public static final API_V1 GET_TRAIT_DEFINITION = new API_V1(BASE_URI + URI_ENTITY, HttpMethod.GET, Response.Status.OK); - - //Search operations - public static final API_V1 SEARCH = new API_V1(BASE_URI + URI_SEARCH, HttpMethod.GET, Response.Status.OK); - public static final API_V1 SEARCH_DSL = new API_V1(BASE_URI + URI_SEARCH + "/dsl", HttpMethod.GET, Response.Status.OK); - public static final API_V1 SEARCH_FULL_TEXT = new API_V1(BASE_URI + URI_SEARCH + "/fulltext", HttpMethod.GET, Response.Status.OK); - public static final API_V1 GREMLIN_SEARCH = new API_V1(BASE_URI + URI_SEARCH + "/gremlin", HttpMethod.GET, Response.Status.OK); - - //Lineage operations based on dataset name - public static final API_V1 NAME_LINEAGE_INPUTS_GRAPH = new API_V1(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK); - public static final API_V1 NAME_LINEAGE_OUTPUTS_GRAPH = new API_V1(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK); - public static final API_V1 NAME_LINEAGE_SCHEMA = new API_V1(BASE_URI + URI_NAME_LINEAGE, HttpMethod.GET, Response.Status.OK); - - //Lineage operations based on entity id of the dataset - public static final API_V1 LINEAGE_INPUTS_GRAPH = new API_V1(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK); - public static final API_V1 LINEAGE_OUTPUTS_GRAPH = new API_V1(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK); - public static final API_V1 LINEAGE_SCHEMA = new API_V1(BASE_URI + URI_LINEAGE, HttpMethod.GET, Response.Status.OK); - - private API_V1(String path, String method, Response.Status status) { - super(path, method, status); - } - } - - /** - * Register the given type(meta model) - * @param typeAsJson type definition a jaon - * @return result json object - * @throws AtlasServiceException - */ - public List createType(String typeAsJson) throws AtlasServiceException { - LOG.debug("Creating type definition: {}", typeAsJson); - ObjectNode response = callAPIWithBody(API_V1.CREATE_TYPE, typeAsJson); - List results = extractResults(response, AtlasClient.TYPES, new ExtractOperation() { - @Override - String extractElement(ObjectNode element) { - return element.get(AtlasClient.NAME).asText(); - } - }); - LOG.debug("Create type definition returned results: {}", results); - return results; - } - - /** - * Register the given type(meta model) - * @param typeDef type definition - * @return result json object - * @throws AtlasServiceException - */ - public List createType(TypesDef typeDef) throws AtlasServiceException { - return createType(AtlasType.toV1Json(typeDef)); - } - - /** - * Creates trait type with specifiedName, superTraits and attributes - * @param traitName the name of the trait type - * @param superTraits the list of super traits from which this trait type inherits attributes - * @param attributeDefinitions the list of attributes of the trait type - * @return the list of types created - * @throws AtlasServiceException - */ - public List createTraitType(String traitName, Set superTraits, AttributeDefinition... attributeDefinitions) throws AtlasServiceException { - TraitTypeDefinition piiTrait = TypesUtil.createTraitTypeDef(traitName, null, superTraits, Arrays.asList(attributeDefinitions)); - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), - Collections.emptyList()); - - LOG.debug("Creating trait type {} {}", traitName, AtlasType.toV1Json(piiTrait)); - - return createType(AtlasType.toV1Json(typesDef)); - } - - /** - * Creates simple trait type with specifiedName with no superTraits or attributes - * @param traitName the name of the trait type - * @return the list of types created - * @throws AtlasServiceException - */ - public List createTraitType(String traitName) throws AtlasServiceException { - return createTraitType(traitName, null); - } - - /** - * Register the given type(meta model) - * @param typeAsJson type definition a jaon - * @return result json object - * @throws AtlasServiceException - */ - public List updateType(String typeAsJson) throws AtlasServiceException { - LOG.debug("Updating type definition: {}", typeAsJson); - ObjectNode response = callAPIWithBody(API_V1.UPDATE_TYPE, typeAsJson); - List results = extractResults(response, AtlasClient.TYPES, new ExtractOperation() { - @Override - String extractElement(ObjectNode element) { - return element.get(AtlasClient.NAME).asText(); - } - }); - LOG.debug("Update type definition returned results: {}", results); - return results; - } - - /** - * Register the given type(meta model) - * @param typeDef type definition - * @return result json object - * @throws AtlasServiceException - */ - public List updateType(TypesDef typeDef) throws AtlasServiceException { - return updateType(AtlasType.toV1Json(typeDef)); - } - - /** - * Returns all type names in the system - * @return list of type names - * @throws AtlasServiceException - */ - public List listTypes() throws AtlasServiceException { - final ObjectNode jsonResponse = callAPIWithQueryParams(API_V1.LIST_TYPES, null); - return extractStringList(jsonResponse); - } - - /** - * Returns all type names with the given category - * @param category - * @return list of type names - * @throws AtlasServiceException - */ - public List listTypes(final DataTypes.TypeCategory category) throws AtlasServiceException { - final API api = API_V1.LIST_TYPES; - ObjectNode response = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api.getNormalizedPath()); - resource = resource.queryParam(TYPE, category.name()); - return resource; - } - }); - return extractResults(response, AtlasClient.RESULTS, new ExtractOperation()); - } - - /** - * Return the list of type names in the type system which match the specified filter. - * - * @param category returns types whose category is the given typeCategory - * @param superType returns types which contain the given supertype - * @param notSupertype returns types which do not contain the given supertype - * - * Its possible to specify combination of these filters in one request and the conditions are combined with AND - * For example, typeCategory = TRAIT && supertype contains 'X' && supertype !contains 'Y' - * If there is no filter, all the types are returned - * @return list of type names - */ - public List listTypes(final DataTypes.TypeCategory category, final String superType, - final String notSupertype) throws AtlasServiceException { - final API api = API_V1.LIST_TYPES; - ObjectNode response = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - resource = resource.queryParam(TYPE, category.name()); - resource = resource.queryParam(SUPERTYPE, superType); - resource = resource.queryParam(NOT_SUPERTYPE, notSupertype); - return resource; - } - }); - return extractStringList(response); - } - - public TypesDef getType(String typeName) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.GET_TYPE, null, typeName); - String typeJson = AtlasType.toJson(response.get(DEFINITION)); - return AtlasType.fromV1Json(typeJson, TypesDef.class); - } - - /** - * Create the given entity - * @param entities entity(type instance) as json - * @return json array of guids - * @throws AtlasServiceException - */ - protected List createEntity(ArrayNode entities) throws AtlasServiceException { - LOG.debug("Creating entities: {}", entities); - ObjectNode response = callAPIWithBody(API_V1.CREATE_ENTITY, entities.toString()); - List results = extractEntityResult(response).getCreatedEntities(); - LOG.debug("Create entities returned results: {}", results); - return results; - } - - protected EntityResult extractEntityResult(ObjectNode response) throws AtlasServiceException { - return EntityResult.fromString(response.toString()); - } - - /** - * Create the given entity - * @param entitiesAsJson entity(type instance) as json - * @return json array of guids - * @throws AtlasServiceException - */ - public List createEntity(String... entitiesAsJson) throws AtlasServiceException { - try { - return createEntity(AtlasJson.parseToV1ArrayNode(Arrays.asList(entitiesAsJson))); - } catch (IOException excp) { - throw new AtlasServiceException(excp); - } - } - - public List createEntity(Referenceable... entities) throws AtlasServiceException { - return createEntity(Arrays.asList(entities)); - } - - public List createEntity(Collection entities) throws AtlasServiceException { - ArrayNode entityArray = getEntitiesArray(entities); - return createEntity(entityArray); - } - - private ArrayNode getEntitiesArray(Collection entities) { - ArrayNode entityArray = AtlasJson.createV1ArrayNode(); - for (Referenceable entity : entities) { - entityArray.add(AtlasType.toV1Json(entity)); - } - return entityArray; - } - - /** - * Replaces entity definitions identified by their guid or unique attribute - * Updates properties set in the definition for the entity corresponding to guid - * @param entities entities to be updated - * @return json array of guids which were updated/created - * @throws AtlasServiceException - */ - public EntityResult updateEntities(Referenceable... entities) throws AtlasServiceException { - return updateEntities(Arrays.asList(entities)); - } - - protected EntityResult updateEntities(ArrayNode entities) throws AtlasServiceException { - LOG.debug("Updating entities: {}", entities); - ObjectNode response = callAPIWithBody(API_V1.UPDATE_ENTITY, entities.toString()); - EntityResult results = extractEntityResult(response); - LOG.debug("Update entities returned results: {}", results); - return results; - } - - public EntityResult updateEntities(Collection entities) throws AtlasServiceException { - ArrayNode entitiesArray = getEntitiesArray(entities); - return updateEntities(entitiesArray); - } - - /** - * Supports Partial updates - * Updates property for the entity corresponding to guid - * @param guid guid - * @param attribute property key - * @param value property value - */ - public EntityResult updateEntityAttribute(final String guid, final String attribute, String value) - throws AtlasServiceException { - LOG.debug("Updating entity id: {}, attribute name: {}, attribute value: {}", guid, attribute, value); - final API api = API_V1.UPDATE_ENTITY_PARTIAL; - ObjectNode response = callAPIWithRetries(api, value, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api, guid); - resource = resource.queryParam(ATTRIBUTE_NAME, attribute); - return resource; - } - }); - return extractEntityResult(response); - } - - /** - * Supports Partial updates - * Updates properties set in the definition for the entity corresponding to guid - * @param guid guid - * @param entity entity definition - */ - public EntityResult updateEntity(String guid, Referenceable entity) throws AtlasServiceException { - String entityJson = AtlasType.toV1Json(entity); - LOG.debug("Updating entity id {} with {}", guid, entityJson); - ObjectNode response = callAPIWithBodyAndParams(API_V1.UPDATE_ENTITY_PARTIAL, entityJson, guid); - return extractEntityResult(response); - } - - /** - * Associate trait to an entity - * - * @param guid guid - * @param traitDefinition trait definition - */ - public void addTrait(String guid, Struct traitDefinition) throws AtlasServiceException { - String traitJson = AtlasType.toV1Json(traitDefinition); - LOG.debug("Adding trait to entity with id {} {}", guid, traitJson); - callAPIWithBodyAndParams(API_V1.ADD_TRAITS, traitJson, guid, URI_TRAITS); - } - - /** - * Delete a trait from the given entity - * @param guid guid of the entity - * @param traitName trait to be deleted - * @throws AtlasServiceException - */ - public void deleteTrait(String guid, String traitName) throws AtlasServiceException { - callAPIWithBodyAndParams(API_V1.DELETE_TRAITS, null, guid, TRAITS, traitName); - } - - /** - * Supports Partial updates - * Updates properties set in the definition for the entity corresponding to guid - * @param entityType Type of the entity being updated - * @param uniqueAttributeName Attribute Name that uniquely identifies the entity - * @param uniqueAttributeValue Attribute Value that uniquely identifies the entity - * @param entity entity definition - */ - public EntityResult updateEntity(final String entityType, final String uniqueAttributeName, - final String uniqueAttributeValue, - Referenceable entity) throws AtlasServiceException { - final API api = API_V1.UPDATE_ENTITY_PARTIAL; - String entityJson = AtlasType.toV1Json(entity); - LOG.debug("Updating entity type: {}, attributeName: {}, attributeValue: {}, entity: {}", entityType, - uniqueAttributeName, uniqueAttributeValue, entityJson); - ObjectNode response = callAPIWithRetries(api, entityJson, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api, QUALIFIED_NAME); - resource = resource.queryParam(TYPE, entityType); - resource = resource.queryParam(ATTRIBUTE_NAME, uniqueAttributeName); - resource = resource.queryParam(ATTRIBUTE_VALUE, uniqueAttributeValue); - return resource; - } - }); - EntityResult result = extractEntityResult(response); - LOG.debug("Update entity returned result: {}", result); - return result; - } - - protected String getString(ObjectNode jsonObject, String parameter) throws AtlasServiceException { - return jsonObject.get(parameter).asText(); - } - - /** - * Delete the specified entities from the repository - * - * @param guids guids of entities to delete - * @return List of entity ids updated/deleted - * @throws AtlasServiceException - */ - public EntityResult deleteEntities(final String... guids) throws AtlasServiceException { - LOG.debug("Deleting entities: {}", guids); - final API api = API_V1.DELETE_ENTITIES; - ObjectNode jsonResponse = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - for (String guid : guids) { - resource = resource.queryParam(GUID.toLowerCase(), guid); - } - return resource; - } - }); - EntityResult results = extractEntityResult(jsonResponse); - LOG.debug("Delete entities returned results: {}", results); - return results; - } - - /** - * Supports Deletion of an entity identified by its unique attribute value - * @param entityType Type of the entity being deleted - * @param uniqueAttributeName Attribute Name that uniquely identifies the entity - * @param uniqueAttributeValue Attribute Value that uniquely identifies the entity - * @return List of entity ids updated/deleted(including composite references from that entity) - */ - public EntityResult deleteEntity(String entityType, String uniqueAttributeName, String uniqueAttributeValue) - throws AtlasServiceException { - LOG.debug("Deleting entity type: {}, attributeName: {}, attributeValue: {}", entityType, uniqueAttributeName, - uniqueAttributeValue); - API api = API_V1.DELETE_ENTITIES; - WebResource resource = getResource(api); - resource = resource.queryParam(TYPE, entityType); - resource = resource.queryParam(ATTRIBUTE_NAME, uniqueAttributeName); - resource = resource.queryParam(ATTRIBUTE_VALUE, uniqueAttributeValue); - ObjectNode jsonResponse = callAPIWithResource(api, resource); - EntityResult results = extractEntityResult(jsonResponse); - LOG.debug("Delete entities returned results: {}", results); - return results; - } - - /** - * Get an entity given the entity id - * @param guid entity id - * @return result object - * @throws AtlasServiceException - */ - public Referenceable getEntity(String guid) throws AtlasServiceException { - ObjectNode jsonResponse = callAPIWithBodyAndParams(API_V1.GET_ENTITY, null, guid); - String entityInstanceDefinition = AtlasType.toJson(jsonResponse.get(AtlasClient.DEFINITION)); - return AtlasType.fromV1Json(entityInstanceDefinition, Referenceable.class); - } - - public static String toString(ArrayNode jsonArray) { - ArrayList resultsList = new ArrayList<>(); - for (int index = 0; index < jsonArray.size(); index++) { - resultsList.add(jsonArray.get(index).asText()); - } - return StringUtils.join(resultsList, ","); - } - - /** - * Get an entity given the entity id - * @param entityType entity type name - * @param attribute qualified name of the entity - * @param value - * @return result object - * @throws AtlasServiceException - */ - public Referenceable getEntity(final String entityType, final String attribute, final String value) - throws AtlasServiceException { - final API api = API_V1.GET_ENTITY; - ObjectNode jsonResponse = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - resource = resource.queryParam(TYPE, entityType); - resource = resource.queryParam(ATTRIBUTE_NAME, attribute); - resource = resource.queryParam(ATTRIBUTE_VALUE, value); - return resource; - } - }); - String entityInstanceDefinition = AtlasType.toJson(jsonResponse.get(AtlasClient.DEFINITION)); - return AtlasType.fromV1Json(entityInstanceDefinition, Referenceable.class); - } - - /** - * List entities for a given entity type - * @param entityType - * @return - * @throws AtlasServiceException - */ - public List listEntities(final String entityType) throws AtlasServiceException { - ObjectNode jsonResponse = callAPIWithRetries(API_V1.LIST_ENTITIES, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(API_V1.LIST_ENTITIES); - resource = resource.queryParam(TYPE, entityType); - return resource; - } - }); - return extractStringList(jsonResponse); - } - - /** - * List traits for a given entity identified by its GUID - * @param guid GUID of the entity - * @return List - traitnames associated with entity - * @throws AtlasServiceException - */ - public List listTraits(final String guid) throws AtlasServiceException { - ObjectNode jsonResponse = callAPIWithBodyAndParams(API_V1.LIST_TRAITS, null, guid, URI_TRAITS); - return extractStringList(jsonResponse); - } - - /** - * Get all trait definitions for an entity - * @param guid GUID of the entity - * @return List trait definitions of the traits associated to the entity - * @throws AtlasServiceException - */ - public List listTraitDefinitions(final String guid) throws AtlasServiceException { - ObjectNode jsonResponse = callAPIWithBodyAndParams(API_V1.GET_ALL_TRAIT_DEFINITIONS, null, guid, TRAIT_DEFINITIONS); - List traitDefList = extractResults(jsonResponse, AtlasClient.RESULTS, new ExtractOperation()); - ArrayList traitStructList = new ArrayList<>(); - for (ObjectNode traitDef : traitDefList) { - Struct traitStruct = AtlasType.fromV1Json(traitDef.toString(), Struct.class); - traitStructList.add(traitStruct); - } - return traitStructList; - } - - /** - * Get trait definition for a given entity and traitname - * @param guid GUID of the entity - * @param traitName - * @return trait definition - * @throws AtlasServiceException - */ - public Struct getTraitDefinition(final String guid, final String traitName) throws AtlasServiceException { - ObjectNode jsonResponse = callAPIWithBodyAndParams(API_V1.GET_TRAIT_DEFINITION, null, guid, TRAIT_DEFINITIONS, traitName); - - return AtlasType.fromV1Json(AtlasType.toJson(jsonResponse.get(AtlasClient.RESULTS)), Struct.class); - } - - protected class ExtractOperation { - T extractElement(U element) { - return (T) element; - } - } - - protected List extractResults(ObjectNode jsonResponse, String key, ExtractOperation extractInterafce) - throws AtlasServiceException { - ArrayNode results = (ArrayNode)jsonResponse.get(key); - ArrayList resultsList = new ArrayList<>(); - for (int index = 0; index < results.size(); index++) { - Object element = results.get(index); - resultsList.add(extractInterafce.extractElement((U) element)); - } - return resultsList; - } - - /** - * Get the latest numResults entity audit events in decreasing order of timestamp for the given entity id - * @param entityId entity id - * @param numResults number of results to be returned - * @return list of audit events for the entity id - * @throws AtlasServiceException - */ - public List getEntityAuditEvents(String entityId, short numResults) - throws AtlasServiceException { - return getEntityAuditEvents(entityId, null, numResults); - } - - /** - * Get the entity audit events in decreasing order of timestamp for the given entity id - * @param entityId entity id - * @param startKey key for the first event to be returned, used for pagination - * @param numResults number of results to be returned - * @return list of audit events for the entity id - * @throws AtlasServiceException - */ - public List getEntityAuditEvents(String entityId, String startKey, short numResults) - throws AtlasServiceException { - WebResource resource = getResource(API_V1.LIST_ENTITY_AUDIT, entityId, URI_ENTITY_AUDIT); - if (StringUtils.isNotEmpty(startKey)) { - resource = resource.queryParam(START_KEY, startKey); - } - resource = resource.queryParam(NUM_RESULTS, String.valueOf(numResults)); - - ObjectNode jsonResponse = callAPIWithResource(API_V1.LIST_ENTITY_AUDIT, resource); - return extractResults(jsonResponse, AtlasClient.EVENTS, new ExtractOperation() { - @Override - EntityAuditEvent extractElement(ObjectNode element) { - return AtlasType.fromV1Json(element.toString(), EntityAuditEvent.class); - } - }); - - } - - /** - * Search using dsl/full text - * @param searchQuery - * @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value - * @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0 - * @return Query results - * @throws AtlasServiceException - */ - public JsonNode search(final String searchQuery, final int limit, final int offset) throws AtlasServiceException { - final API api = API_V1.SEARCH; - ObjectNode result = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - resource = resource.queryParam(QUERY, searchQuery); - resource = resource.queryParam(LIMIT, String.valueOf(limit)); - resource = resource.queryParam(OFFSET, String.valueOf(offset)); - return resource; - } - }); - return result.get(RESULTS); - } - - /** - * Search given query DSL - * @param query DSL query - * @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value - * @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0 - * @return result json object - * @throws AtlasServiceException - */ - public ArrayNode searchByDSL(final String query, final int limit, final int offset) throws AtlasServiceException { - LOG.debug("DSL query: {}", query); - final API api = API_V1.SEARCH_DSL; - ObjectNode response = callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - resource = resource.queryParam(QUERY, query); - resource = resource.queryParam(LIMIT, String.valueOf(limit)); - resource = resource.queryParam(OFFSET, String.valueOf(offset)); - return resource; - } - }); - - JsonNode results = response.get(RESULTS); - - return (results.isNull()) ? AtlasJson.createV1ArrayNode(): (ArrayNode) response.get(RESULTS); - } - - /** - * Search given full text search - * @param query Query - * @param limit number of rows to be returned in the result, used for pagination. maxlimit > limit > 0. -1 maps to atlas.search.defaultlimit property value - * @param offset offset to the results returned, used for pagination. offset >= 0. -1 maps to offset 0 - * @return result json object - * @throws AtlasServiceException - */ - public ObjectNode searchByFullText(final String query, final int limit, final int offset) throws AtlasServiceException { - final API api = API_V1.SEARCH_FULL_TEXT; - return callAPIWithRetries(api, null, new ResourceCreator() { - @Override - public WebResource createResource() { - WebResource resource = getResource(api); - resource = resource.queryParam(QUERY, query); - resource = resource.queryParam(LIMIT, String.valueOf(limit)); - resource = resource.queryParam(OFFSET, String.valueOf(offset)); - return resource; - } - }); - } - - public ObjectNode getInputGraph(String datasetName) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.NAME_LINEAGE_INPUTS_GRAPH, null, datasetName, "/inputs/graph"); - return (ObjectNode)response.get(AtlasClient.RESULTS); - } - - public ObjectNode getOutputGraph(String datasetName) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.NAME_LINEAGE_OUTPUTS_GRAPH, null, datasetName, "/outputs/graph"); - return (ObjectNode)response.get(AtlasClient.RESULTS); - } - - public ObjectNode getInputGraphForEntity(String entityId) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.LINEAGE_INPUTS_GRAPH, null, entityId, "/inputs/graph"); - return (ObjectNode)response.get(AtlasClient.RESULTS); - } - - public ObjectNode getOutputGraphForEntity(String datasetId) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.LINEAGE_OUTPUTS_GRAPH, null, datasetId, "/outputs/graph"); - return (ObjectNode) response.get(AtlasClient.RESULTS); - } - - public ObjectNode getSchemaForEntity(String datasetId) throws AtlasServiceException { - ObjectNode response = callAPIWithBodyAndParams(API_V1.LINEAGE_OUTPUTS_GRAPH, null, datasetId, "/schema"); - return (ObjectNode) response.get(AtlasClient.RESULTS); - } - - private List extractStringList(ObjectNode response) { - List ret = new ArrayList<>(); - JsonNode results = (response != null) ? response.get(AtlasClient.RESULTS) : null; - - if (results != null && results instanceof ArrayNode) { - for (JsonNode node : results) { - ret.add(node.asText()); - } - } - - return ret; - } - - // Wrapper methods for compatibility - @VisibleForTesting - public ObjectNode callAPIWithResource(API api, WebResource resource) throws AtlasServiceException { - return callAPIWithResource(api, resource, null, ObjectNode.class); - } - - @VisibleForTesting - public ObjectNode callAPIWithResource(API_V1 apiV1, WebResource resource) throws AtlasServiceException { - return callAPIWithResource(apiV1, resource, null, ObjectNode.class); - } - - @VisibleForTesting - public WebResource getResource(API api, String... params) { - return getResource(api.getNormalizedPath(), params); - } - - @VisibleForTesting - public WebResource getResource(API_V1 apiV1, String... params) { - return getResource(apiV1.getNormalizedPath(), params); - } - - @VisibleForTesting - public ObjectNode callAPIWithBody(API api, Object requestObject) throws AtlasServiceException { - return callAPI(api, ObjectNode.class, requestObject, (String[]) null); - } - - @VisibleForTesting - public ObjectNode callAPIWithBody(API_V1 apiV1, Object requestObject) throws AtlasServiceException { - return callAPI(apiV1, ObjectNode.class, requestObject, (String[]) null); - } - - @VisibleForTesting - public ObjectNode callAPIWithBodyAndParams(API api, Object requestObject, String... params) throws AtlasServiceException { - return callAPI(api, ObjectNode.class, requestObject, params); - } - - @VisibleForTesting - public ObjectNode callAPIWithBodyAndParams(API_V1 apiV1, Object requestObject, String... params) throws AtlasServiceException { - return callAPI(apiV1, ObjectNode.class, requestObject, params); - } - - @VisibleForTesting - public ObjectNode callAPIWithQueryParams(API api, MultivaluedMap queryParams) throws AtlasServiceException { - return callAPI(api, ObjectNode.class, queryParams); - } - - @VisibleForTesting - public ObjectNode callAPIWithQueryParams(API_V1 apiV1, MultivaluedMap queryParams) throws AtlasServiceException { - return callAPI(apiV1, ObjectNode.class, queryParams); - } - - @VisibleForTesting - ObjectNode callAPIWithRetries(API api, Object requestObject, ResourceCreator resourceCreator) throws AtlasServiceException { - return super.callAPIWithRetries(api, requestObject, resourceCreator); - } - - @VisibleForTesting - ObjectNode callAPIWithRetries(API_V1 apiV1, Object requestObject, ResourceCreator resourceCreator) throws AtlasServiceException { - return super.callAPIWithRetries(apiV1, requestObject, resourceCreator); - } -} diff --git a/client/client-v1/src/main/java/org/apache/atlas/CreateUpdateEntitiesResult.java b/client/client-v1/src/main/java/org/apache/atlas/CreateUpdateEntitiesResult.java deleted file mode 100644 index 5e6d6db3ae..0000000000 --- a/client/client-v1/src/main/java/org/apache/atlas/CreateUpdateEntitiesResult.java +++ /dev/null @@ -1,124 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas; - -import org.apache.atlas.model.instance.GuidMapping; -import org.apache.atlas.model.legacy.EntityResult; -import org.apache.atlas.type.AtlasType; - -import java.util.Collections; -import java.util.List; - -/** - * Result from creating or updating entities. - */ -@Deprecated -public class CreateUpdateEntitiesResult { - - /** - * Guid mapping for the entities that were created/updated - */ - private GuidMapping guidMapping; - - /** - * Entity result - */ - private EntityResult entityResult; - - /** - * Gets the guid mapping - */ - public GuidMapping getGuidMapping() { - return guidMapping; - } - - /** - * Sets the guid mapping - */ - public void setGuidMapping(GuidMapping guidMapping) { - this.guidMapping = guidMapping; - } - - /** - * Gets the entity result - */ - public EntityResult getEntityResult() { - return entityResult; - } - - /** - * Sets the entity result - */ - public void setEntityResult(EntityResult entityResult) { - this.entityResult = entityResult; - } - - /** - * Deserializes the given json into an instance of - * CreateUpdateEntitiesResult. - * - * @param json - * the (unmodified) json that comes back from Atlas. - * @return - * @throws AtlasServiceException - */ - public static CreateUpdateEntitiesResult fromJson(String json) throws AtlasServiceException { - - GuidMapping guidMapping = AtlasType.fromJson(json, GuidMapping.class); - EntityResult entityResult = EntityResult.fromString(json); - CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult(); - result.setEntityResult(entityResult); - result.setGuidMapping(guidMapping); - return result; - } - - /** - * Convenience method to get the guids of the created entities from - * the EntityResult. - */ - public List getCreatedEntities() { - if(entityResult == null) { - return Collections.emptyList(); - } - return getEntityResult().getCreatedEntities(); - } - - /** - * Convenience method to get the guids of the updated entities from - * the EntityResult. - */ - public List getUpdatedEntities() { - if(entityResult == null) { - return Collections.emptyList(); - } - return getEntityResult().getUpdateEntities(); - } - - - /** - * Convenience method to get the guids of the deleted entities - * from the EntityResult. - */ - public List getDeletedEntities() { - if (entityResult == null) { - return Collections.emptyList(); - } - return getEntityResult().getDeletedEntities(); - } - -} diff --git a/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java b/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java deleted file mode 100644 index 067bfdf287..0000000000 --- a/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java +++ /dev/null @@ -1,478 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas; - -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientHandlerException; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.WebResource; -import org.apache.atlas.model.legacy.EntityResult; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.commons.configuration.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.mockito.Matchers; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriBuilder; -import java.net.ConnectException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.List; - -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -public class AtlasClientTest { - - @Mock - private WebResource service; - @Mock - private WebResource.Builder resourceBuilderMock; - - @Mock - private Configuration configuration; - - @Mock - private Client client; - - @BeforeMethod - public void setup() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void shouldVerifyServerIsReady() throws AtlasServiceException { - setupRetryParams(); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.VERSION, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - when(response.getEntity(String.class)).thenReturn("{\"Version\":\"version-rrelease\",\"Name\":\"apache-atlas\"," + - "\"Description\":\"Metadata Management and Data Governance Platform over Hadoop\"}"); - when(builder.method(AtlasClient.API_V1.VERSION.getMethod(), ClientResponse.class, null)).thenReturn(response); - - assertTrue(atlasClient.isServerReady()); - } - - @Test - public void testCreateEntity() throws Exception { - setupRetryParams(); - AtlasClient atlasClient = new AtlasClient(service, configuration); - - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.CREATE_ENTITY, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.CREATED.getStatusCode()); - - String jsonResponse = AtlasType.toV1Json(new EntityResult(Arrays.asList("id"), null, null)); - when(response.getEntity(String.class)).thenReturn(jsonResponse.toString()); - when(response.getLength()).thenReturn(jsonResponse.length()); - - String entityJson = AtlasType.toV1Json(new Referenceable("type")); - when(builder.method(anyString(), Matchers.any(), anyString())).thenReturn(response); - - List ids = atlasClient.createEntity(entityJson); - assertEquals(ids.size(), 1); - assertEquals(ids.get(0), "id"); - } - - private WebResource.Builder setupBuilder(AtlasClient.API_V1 api, WebResource webResource) { - when(webResource.path(api.getPath())).thenReturn(service); - when(webResource.path(api.getNormalizedPath())).thenReturn(service); - return getBuilder(service); - } - - @Test - public void shouldReturnFalseIfServerIsNotReady() throws AtlasServiceException { - setupRetryParams(); - AtlasClient atlasClient = new AtlasClient(service, configuration); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.VERSION, service); - when(builder.method(AtlasClient.API_V1.VERSION.getMethod(), ClientResponse.class, null)).thenThrow( - new ClientHandlerException()); - assertFalse(atlasClient.isServerReady()); - } - - @Test - public void shouldReturnFalseIfServiceIsUnavailable() throws AtlasServiceException { - setupRetryParams(); - AtlasClient atlasClient = new AtlasClient(service, configuration); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.VERSION, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.SERVICE_UNAVAILABLE.getStatusCode()); - when(response.getClientResponseStatus()).thenReturn(ClientResponse.Status.SERVICE_UNAVAILABLE); - - when(builder.method(AtlasClient.API_V1.VERSION.getMethod(), ClientResponse.class, null)).thenReturn(response); - - assertFalse(atlasClient.isServerReady()); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void shouldThrowErrorIfAnyResponseOtherThanServiceUnavailable() throws AtlasServiceException { - setupRetryParams(); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.VERSION, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); - when(response.getClientResponseStatus()).thenReturn(ClientResponse.Status.INTERNAL_SERVER_ERROR); - - when(builder.method(AtlasClient.API_V1.VERSION.getMethod(), ClientResponse.class, null)).thenReturn(response); - - atlasClient.isServerReady(); - fail("Should throw exception"); - } - - @Test - public void shouldGetAdminStatus() throws AtlasServiceException { - setupRetryParams(); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"Active\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)).thenReturn(response); - -// Fix after AtlasBaseClient -// atlasClient.setService(); - - - String status = atlasClient.getAdminStatus(); - assertEquals(status, "Active"); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void shouldReturnStatusAsUnknownOnException() throws AtlasServiceException { - setupRetryParams(); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); - when(response.getClientResponseStatus()).thenReturn(ClientResponse.Status.INTERNAL_SERVER_ERROR); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)).thenReturn(response); - - String status = atlasClient.getAdminStatus(); - fail("Should fail with AtlasServiceException"); - } - - @Test - public void shouldReturnStatusAsUnknownIfJSONIsInvalid() throws AtlasServiceException { - setupRetryParams(); - AtlasClient atlasClient = new AtlasClient(service, configuration); - - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - when(response.getEntity(String.class)).thenReturn("{\"status\":\"Active\"}"); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)).thenReturn(response); - - String status = atlasClient.getAdminStatus(); - assertEquals(status, AtlasClient.UNKNOWN_STATUS); - } - - @Test - public void shouldReturnBaseURLAsPassedInURL() { - AtlasClient atlasClient = new AtlasClient(service, configuration); - - String serviceURL = atlasClient.determineActiveServiceURL(new String[]{"http://localhost:21000"}, client); - assertEquals(serviceURL, "http://localhost:21000"); - } - - @Test - public void shouldSelectActiveAmongMultipleServersIfHAIsEnabled() { - setupRetryParams(); - - when(client.resource(UriBuilder.fromUri("http://localhost:31000").build())).thenReturn(service); - when(client.resource(UriBuilder.fromUri("http://localhost:41000").build())).thenReturn(service); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse firstResponse = mock(ClientResponse.class); - when(firstResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String passiveStatus = "{\"Status\":\"PASSIVE\"}"; - when(firstResponse.getEntity(String.class)).thenReturn(passiveStatus); - when(firstResponse.getLength()).thenReturn(passiveStatus.length()); - ClientResponse secondResponse = mock(ClientResponse.class); - when(secondResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(secondResponse.getEntity(String.class)).thenReturn(activeStatus); - when(secondResponse.getLength()).thenReturn(activeStatus.length()); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)). - thenReturn(firstResponse).thenReturn(firstResponse).thenReturn(firstResponse). - thenReturn(secondResponse); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - String serviceURL = atlasClient.determineActiveServiceURL( - new String[]{"http://localhost:31000", "http://localhost:41000"}, - client); - assertEquals(serviceURL, "http://localhost:41000"); - } - - @Test - public void shouldRetryUntilServiceBecomesActive() { - setupRetryParams(); - - when(client.resource(UriBuilder.fromUri("http://localhost:31000").build())).thenReturn(service); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - when(response.getEntity(String.class)).thenReturn("{\"Status\":\"BECOMING_ACTIVE\"}"); - ClientResponse nextResponse = mock(ClientResponse.class); - when(nextResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)). - thenReturn(response).thenReturn(response).thenReturn(nextResponse); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - String serviceURL = atlasClient.determineActiveServiceURL( - new String[] {"http://localhost:31000","http://localhost:41000"}, - client); - assertEquals(serviceURL, "http://localhost:31000"); - } - - @Test - public void shouldRetryIfCannotConnectToServiceInitially() { - setupRetryParams(); - - when(client.resource(UriBuilder.fromUri("http://localhost:31000").build())).thenReturn(service); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - when(response.getEntity(String.class)).thenReturn("{\"Status\":\"BECOMING_ACTIVE\"}"); - ClientResponse nextResponse = mock(ClientResponse.class); - when(nextResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)). - thenThrow(new ClientHandlerException("Simulating connection exception")). - thenReturn(response). - thenReturn(nextResponse); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - atlasClient.setService(service); - atlasClient.setConfiguration(configuration); - - String serviceURL = atlasClient.determineActiveServiceURL( - new String[] {"http://localhost:31000","http://localhost:41000"}, - client); - assertEquals(serviceURL, "http://localhost:31000"); - } - - @Test(expectedExceptions = IllegalArgumentException.class) - public void shouldThrowExceptionIfActiveServerIsNotFound() { - setupRetryParams(); - - when(client.resource(UriBuilder.fromUri("http://localhost:31000").build())).thenReturn(service); - WebResource.Builder builder = setupBuilder(AtlasClient.API_V1.STATUS, service); - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - when(response.getEntity(String.class)).thenReturn("{\"Status\":\"BECOMING_ACTIVE\"}"); - when(builder.method(AtlasClient.API_V1.STATUS.getMethod(), ClientResponse.class, null)). - thenThrow(new ClientHandlerException("Simulating connection exception")). - thenReturn(response). - thenReturn(response); - - AtlasClient atlasClient = new AtlasClient(service, configuration); - - String serviceURL = atlasClient.determineActiveServiceURL( - new String[] {"http://localhost:31000","http://localhost:41000"}, - client); - assertNull(serviceURL); - } - - @Test - public void shouldRetryAPICallsOnClientHandlerException() throws AtlasServiceException, URISyntaxException { - setupRetryParams(); - - ResourceCreator resourceCreator = mock(ResourceCreator.class); - WebResource resourceObject = mock(WebResource.class); - when(resourceObject.getURI()). - thenReturn(new URI("http://localhost:31000/api/atlas/types")). - thenReturn(new URI("http://localhost:41000/api/atlas/types")). - thenReturn(new URI("http://localhost:41000/api/atlas/types")); - - WebResource.Builder builder = getBuilder(resourceObject); - - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - - when(builder.method(AtlasClient.API_V1.LIST_TYPES.getMethod(), ClientResponse.class, null)). - thenThrow(new ClientHandlerException("simulating exception in calling API", new ConnectException())). - thenReturn(response); - - when(resourceCreator.createResource()).thenReturn(resourceObject); - - AtlasClient atlasClient = getClientForTest("http://localhost:31000","http://localhost:41000"); - - atlasClient.setService(service); - atlasClient.setConfiguration(configuration); - - atlasClient.callAPIWithRetries(AtlasClient.API_V1.LIST_TYPES, null, resourceCreator); - - verify(client).destroy(); - verify(client).resource(UriBuilder.fromUri("http://localhost:31000").build()); - verify(client).resource(UriBuilder.fromUri("http://localhost:41000").build()); - } - - @Test - public void shouldRetryWithSameClientIfSingleAddressIsUsed() throws URISyntaxException, AtlasServiceException { - setupRetryParams(); - - ResourceCreator resourceCreator = mock(ResourceCreator.class); - WebResource resourceObject = mock(WebResource.class); - when(resourceObject.getURI()). - thenReturn(new URI("http://localhost:31000/api/atlas/types")); - - WebResource.Builder builder = getBuilder(resourceObject); - - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - - when(builder.method(AtlasClient.API_V1.LIST_TYPES.getMethod(), ClientResponse.class, null)). - thenThrow(new ClientHandlerException("simulating exception in calling API", new ConnectException())). - thenReturn(response); - - when(resourceCreator.createResource()).thenReturn(resourceObject); - when(configuration.getString("atlas.http.authentication.type", "simple")).thenReturn("simple"); - - AtlasClient atlasClient = getClientForTest("http://localhost:31000"); - - atlasClient.setService(resourceObject); - atlasClient.setConfiguration(configuration); - - atlasClient.callAPIWithRetries(AtlasClient.API_V1.LIST_TYPES, null, resourceCreator); - - verify(client).destroy(); - verify(client, times(2)).resource(UriBuilder.fromUri("http://localhost:31000").build()); - } - - @Test - public void shouldRetryAPICallsOnServiceUnavailable() throws AtlasServiceException, URISyntaxException { - setupRetryParams(); - - ResourceCreator resourceCreator = mock(ResourceCreator.class); - WebResource resourceObject = mock(WebResource.class); - when(resourceObject.getURI()). - thenReturn(new URI("http://localhost:31000/api/atlas/types")). - thenReturn(new URI("http://localhost:41000/api/atlas/types")). - thenReturn(new URI("http://localhost:41000/api/atlas/types")); - - WebResource.Builder builder = getBuilder(resourceObject); - - ClientResponse firstResponse = mock(ClientResponse.class); - when(firstResponse.getStatus()).thenReturn(Response.Status.SERVICE_UNAVAILABLE.getStatusCode()); - when(firstResponse.getClientResponseStatus()).thenReturn(ClientResponse.Status.SERVICE_UNAVAILABLE); - - ClientResponse response = mock(ClientResponse.class); - when(response.getStatus()).thenReturn(Response.Status.OK.getStatusCode()); - String activeStatus = "{\"Status\":\"ACTIVE\"}"; - when(response.getEntity(String.class)).thenReturn(activeStatus); - when(response.getLength()).thenReturn(activeStatus.length()); - - when(builder.method(AtlasClient.API_V1.LIST_TYPES.getMethod(), ClientResponse.class, null)). - thenThrow(new ClientHandlerException("simulating exception in calling API", new ConnectException())). - thenReturn(firstResponse). - thenReturn(response); - - when(resourceCreator.createResource()).thenReturn(resourceObject); - - AtlasClient atlasClient = getClientForTest("http://localhost:31000","http://localhost:41000"); - atlasClient.setService(resourceObject); - atlasClient.setConfiguration(configuration); - - atlasClient.callAPIWithRetries(AtlasClient.API_V1.LIST_TYPES, null, resourceCreator); - - - verify(client).destroy(); - verify(client).resource(UriBuilder.fromUri("http://localhost:31000").build()); - verify(client).resource(UriBuilder.fromUri("http://localhost:41000").build()); - } - - private WebResource.Builder getBuilder(WebResource resourceObject) { - when(resourceObject.getRequestBuilder()).thenReturn(resourceBuilderMock); - when(resourceObject.path(anyString())).thenReturn(resourceObject); - when(resourceBuilderMock.accept(AtlasBaseClient.JSON_MEDIA_TYPE)).thenReturn(resourceBuilderMock); - when(resourceBuilderMock.accept(MediaType.APPLICATION_JSON)).thenReturn(resourceBuilderMock); - when(resourceBuilderMock.type(AtlasBaseClient.JSON_MEDIA_TYPE)).thenReturn(resourceBuilderMock); - when(resourceBuilderMock.type(MediaType.MULTIPART_FORM_DATA)).thenReturn(resourceBuilderMock); - return resourceBuilderMock; - } - - private void setupRetryParams() { - when(configuration.getInt(AtlasClient.ATLAS_CLIENT_HA_RETRIES_KEY, AtlasClient.DEFAULT_NUM_RETRIES)). - thenReturn(3); - when(configuration.getInt(AtlasClient.ATLAS_CLIENT_HA_SLEEP_INTERVAL_MS_KEY, - AtlasClient.DEFAULT_SLEEP_BETWEEN_RETRIES_MS)). - thenReturn(1); - } - - private AtlasClient getClientForTest(final String... baseUrls) { - return new AtlasClient((UserGroupInformation)null, (String)null, baseUrls) { - boolean firstCall = true; - @Override - protected String determineActiveServiceURL(String[] baseUrls, Client client) { - String returnUrl = baseUrls[0]; - if (baseUrls.length > 1 && !firstCall) { - returnUrl = baseUrls[1]; - } - firstCall = false; - return returnUrl; - } - - @Override - protected Configuration getClientProperties() { - return configuration; - } - - @Override - protected Client getClient(Configuration configuration, UserGroupInformation ugi, String doAsUser) { - return client; - } - }; - } -} diff --git a/client/common/src/main/java/org/apache/atlas/AtlasBaseClient.java b/client/common/src/main/java/org/apache/atlas/AtlasBaseClient.java index cb35c94469..017763ac38 100644 --- a/client/common/src/main/java/org/apache/atlas/AtlasBaseClient.java +++ b/client/common/src/main/java/org/apache/atlas/AtlasBaseClient.java @@ -558,26 +558,6 @@ void handleClientHandlerException(ClientHandlerException che) { throw che; } - @VisibleForTesting - ObjectNode callAPIWithRetries(API api, Object requestObject, ResourceCreator resourceCreator) - throws AtlasServiceException { - for (int i = 0; i < getNumberOfRetries(); i++) { - WebResource resource = resourceCreator.createResource(); - try { - LOG.debug("Using resource {} for {} times", resource.getURI(), i + 1); - return callAPIWithResource(api, resource, requestObject, ObjectNode.class); - } catch (ClientHandlerException che) { - if (i == (getNumberOfRetries() - 1)) { - throw che; - } - LOG.warn("Handled exception in calling api {}", api.getNormalizedPath(), che); - LOG.warn("Exception's cause: {}", che.getCause().getClass()); - handleClientHandlerException(che); - } - } - throw new AtlasServiceException(api, new RuntimeException("Could not get response after retries.")); - } - @VisibleForTesting void setConfiguration(Configuration configuration) { this.configuration = configuration; diff --git a/client/common/src/main/java/org/apache/atlas/ResourceCreator.java b/client/common/src/main/java/org/apache/atlas/ResourceCreator.java deleted file mode 100644 index 2017065b39..0000000000 --- a/client/common/src/main/java/org/apache/atlas/ResourceCreator.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas; - -import com.sun.jersey.api.client.WebResource; - -/** - * An interface to capture the closure of how a WebResource is created. - */ -@Deprecated -public interface ResourceCreator { - WebResource createResource(); -} diff --git a/client/pom.xml b/client/pom.xml index 73eb73a6fd..2ef6c47ad8 100755 --- a/client/pom.xml +++ b/client/pom.xml @@ -28,7 +28,6 @@ common - client-v1 client-v2 diff --git a/common/src/main/java/org/apache/atlas/repository/Constants.java b/common/src/main/java/org/apache/atlas/repository/Constants.java index 4fc62e0e6e..2ba0d938e3 100644 --- a/common/src/main/java/org/apache/atlas/repository/Constants.java +++ b/common/src/main/java/org/apache/atlas/repository/Constants.java @@ -192,6 +192,11 @@ public final class Constants { public static final String ASSET_README_EDGE_LABEL = "__Asset.readme"; public static final String ASSET_LINK_EDGE_LABEL = "__Asset.links"; + public static final String DATA_SET_SUPER_TYPE = "Catalog"; + public static final String PROCESS_SUPER_TYPE = "Process"; + public static final String ERROR = "error"; + public static final String STATUS = "Status"; + /** * Contract */ diff --git a/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java index adfca599d1..28a0a78a8c 100644 --- a/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java +++ b/common/src/main/java/org/apache/atlas/service/FeatureFlagStore.java @@ -5,10 +5,14 @@ import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; +import javax.inject.Inject; + @Component public class FeatureFlagStore { private static RedisService redisService = null; - public FeatureFlagStore(@Qualifier("redisServiceImpl") RedisService redisService) { + + @Inject + public FeatureFlagStore(RedisService redisService) { FeatureFlagStore.redisService = redisService; } diff --git a/common/src/main/java/org/apache/atlas/utils/OnAtlasEnableCondition.java b/common/src/main/java/org/apache/atlas/utils/OnAtlasEnableCondition.java index b91510c5ad..04c66e8b3b 100644 --- a/common/src/main/java/org/apache/atlas/utils/OnAtlasEnableCondition.java +++ b/common/src/main/java/org/apache/atlas/utils/OnAtlasEnableCondition.java @@ -30,18 +30,21 @@ import org.springframework.core.type.AnnotatedTypeMetadata; import org.springframework.core.type.AnnotationMetadata; +import java.util.Map; + public class OnAtlasEnableCondition implements Condition { private final Logger LOG = LoggerFactory.getLogger(OnAtlasEnableCondition.class); @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { boolean matches = false; - String propertyName = (String) metadata.getAnnotationAttributes(EnableConditional.class.getName()).get("property"); + Map attributes = metadata.getAnnotationAttributes(EnableConditional.class.getName()); + String propertyName = (String) attributes.get("property"); if (metadata instanceof AnnotatedTypeMetadata) { try { Configuration configuration = ApplicationProperties.get(); - boolean enabled = configuration.getBoolean(propertyName, true); + boolean enabled = configuration.getBoolean(propertyName, (boolean) attributes.get("isDefault")); return enabled; } catch (AtlasException e) { LOG.error("Unable to load atlas properties. Dependent bean configuration may fail"); diff --git a/distro/pom.xml b/distro/pom.xml index 6f30eecabd..5bd183f69b 100644 --- a/distro/pom.xml +++ b/distro/pom.xml @@ -130,9 +130,9 @@ atlas.graph.storage.hbase.regions-per-server=1 src/main/assemblies/atlas-server-package.xml - src/main/assemblies/standalone-package.xml - src/main/assemblies/src-package.xml - src/main/assemblies/atlas-repair-index-package.xml + + + src/main/assemblies/classification-updater.xml @@ -307,95 +307,6 @@ atlas.graph.storage.lock.wait-time=300 - - - embedded-hbase-solr - - false - - - false - #Solr -#Solr cloud mode properties -atlas.graph.index.search.solr.mode=cloud -atlas.graph.index.search.solr.zookeeper-url=localhost:2181 -atlas.graph.index.search.solr.zookeeper-connect-timeout=60000 -atlas.graph.index.search.solr.zookeeper-session-timeout=60000 -atlas.graph.index.search.solr.wait-searcher=false - -#Solr http mode properties -#atlas.graph.index.search.solr.mode=http -#atlas.graph.index.search.solr.http-urls=http://localhost:8983/solr - - #Hbase -#For standalone mode , specify localhost -#for distributed mode, specify zookeeper quorum here -atlas.graph.storage.hostname=localhost -atlas.graph.storage.hbase.regions-per-server=1 - - ${project.build.directory}/hbase - true - hbase-${hbase.version} - https://archive.apache.org/dist/hbase/${hbase.version}/hbase-${hbase.version}-bin.tar.gz - ${project.build.directory}/solr - true - solr-${solr.version} - https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - - hbase - generate-resources - - run - - - - - - - - - - - - - - - - - - solr - generate-resources - - run - - - - - - - - - - - - - - - - - - - - embedded-cassandra-solr diff --git a/distro/src/main/assemblies/atlas-falcon-hook-package.xml b/distro/src/main/assemblies/atlas-falcon-hook-package.xml deleted file mode 100644 index 9ddc86d34a..0000000000 --- a/distro/src/main/assemblies/atlas-falcon-hook-package.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - tar.gz - - falcon-hook - apache-atlas-falcon-hook-${project.version} - - - - ../addons/falcon-bridge/target/dependency/bridge - bridge - - - - ../addons/falcon-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/atlas-hbase-hook-package.xml b/distro/src/main/assemblies/atlas-hbase-hook-package.xml deleted file mode 100644 index a51c20ac86..0000000000 --- a/distro/src/main/assemblies/atlas-hbase-hook-package.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - tar.gz - - hbase-hook - apache-atlas-hbase-hook-${project.version} - - - target/bin - hook-bin - - import-hbase.sh - - 0755 - 0755 - - - - - ../addons/hbase-bridge/src/bin - hook-bin - 0755 - 0755 - - - - - ../addons/hbase-bridge/target/dependency/bridge - bridge - - - - ../addons/hbase-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/atlas-hive-hook-package.xml b/distro/src/main/assemblies/atlas-hive-hook-package.xml deleted file mode 100644 index eaeb1b855b..0000000000 --- a/distro/src/main/assemblies/atlas-hive-hook-package.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - tar.gz - - hive-hook - apache-atlas-hive-hook-${project.version} - - - target/bin - hook-bin - - import-hive.sh - - 0755 - 0755 - - - - - ../addons/hive-bridge/src/bin - hook-bin - 0755 - 0755 - - - - - ../addons/hive-bridge/target/dependency/bridge - bridge - - - - ../addons/hive-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/atlas-impala-hook-package.xml b/distro/src/main/assemblies/atlas-impala-hook-package.xml deleted file mode 100644 index cb1bb0d9fa..0000000000 --- a/distro/src/main/assemblies/atlas-impala-hook-package.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - tar.gz - - impala-hook - apache-atlas-impala-hook-${project.version} - - - ../addons/impala-bridge/target/dependency/bridge - bridge - - - - ../addons/impala-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/atlas-kafka-hook-package.xml b/distro/src/main/assemblies/atlas-kafka-hook-package.xml deleted file mode 100644 index 38bf4c5295..0000000000 --- a/distro/src/main/assemblies/atlas-kafka-hook-package.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - tar.gz - - kafka-hook - apache-atlas-kafka-hook-${project.version} - - - target/bin - hook-bin - - import-kafka.sh - - 0755 - 0755 - - - - - ../addons/kafka-bridge/src/bin - hook-bin - 0755 - 0755 - - - - - ../addons/kafka-bridge/target/dependency/bridge - bridge - - - ../addons/kafka-bridge/target/dependency/hook - hook - - - diff --git a/distro/src/main/assemblies/atlas-sqoop-hook-package.xml b/distro/src/main/assemblies/atlas-sqoop-hook-package.xml deleted file mode 100644 index 5fc23ebf61..0000000000 --- a/distro/src/main/assemblies/atlas-sqoop-hook-package.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - tar.gz - - sqoop-hook - apache-atlas-sqoop-hook-${project.version} - - - - ../addons/sqoop-bridge/target/dependency/bridge - bridge - - - - ../addons/sqoop-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/atlas-storm-hook-package.xml b/distro/src/main/assemblies/atlas-storm-hook-package.xml deleted file mode 100644 index e5d4a93cf4..0000000000 --- a/distro/src/main/assemblies/atlas-storm-hook-package.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - tar.gz - - storm-hook - apache-atlas-storm-hook-${project.version} - - - - ../addons/storm-bridge/target/dependency/bridge - bridge - - - - ../addons/storm-bridge/target/dependency/hook - hook - - - - diff --git a/distro/src/main/assemblies/migration-exporter.xml b/distro/src/main/assemblies/migration-exporter.xml deleted file mode 100644 index 4907a683c6..0000000000 --- a/distro/src/main/assemblies/migration-exporter.xml +++ /dev/null @@ -1,59 +0,0 @@ - - - migration-exporter - - zip - - - atlas-migration-exporter - - - - - README* - - - - ../tools/atlas-migration-exporter/src/main/resources - . - - *.py - migrationContext.xml - atlas-log4j.xml - README - - 0755 - 0755 - - - ../tools/atlas-migration-exporter/target - . - - atlas-migration-*.jar - - - *-test-sources.jar - *-sources.jar - - - - diff --git a/distro/src/main/assemblies/standalone-package.xml b/distro/src/main/assemblies/standalone-package.xml index 3ef91c047e..b3c81f9a60 100755 --- a/distro/src/main/assemblies/standalone-package.xml +++ b/distro/src/main/assemblies/standalone-package.xml @@ -141,6 +141,11 @@ policies + + ../addons/override-policies + override-policies + + ../addons/hive-bridge/src/bin diff --git a/graphdb/janus-hbase2/pom.xml b/graphdb/janus-hbase2/pom.xml deleted file mode 100644 index 883c3444e9..0000000000 --- a/graphdb/janus-hbase2/pom.xml +++ /dev/null @@ -1,103 +0,0 @@ - - - - - 4.0.0 - - atlas-graphdb - org.apache.atlas - 3.0.0-SNAPSHOT - - atlas-janusgraph-hbase2 - Apache Atlas JanusGraph-HBase2 Module - Apache Atlas JanusGraph-HBase2 Module - jar - - - - org.janusgraph - janusgraph-core - ${janusgraph.version} - - - com.codahale.metrics - * - - - org.noggit - noggit - - - org.apache.tinkerpop - gremlin-shaded - - - org.apache.tinkerpop - gremlin-server - - - org.apache.tinkerpop - gremlin-groovy - - - org.apache.tinkerpop - gremlin-core - - - org.apache.tinkerpop - gremlin-driver - - - org.apache.tinkerpop - tinkergraph-gremlin - - - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - - org.apache.hbase - hbase-shaded-client - ${hbase.version} - true - - - avro - org.apache.avro - - - jruby-complete - org.jruby - - - asm - asm - - - - - - diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/AdminMask.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/AdminMask.java deleted file mode 100644 index 548860bcc3..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/AdminMask.java +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/** - * Copyright DataStax, Inc. - *

    - * Please see the included license file for details. - */ -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.ClusterStatus; -import org.apache.hadoop.hbase.TableNotFoundException; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.TableDescriptor; - -import java.io.Closeable; -import java.io.IOException; - -/** - * This interface hides ABI/API breaking changes that HBase has made to its Admin/HBaseAdmin over the course - * of development from 0.94 to 1.0 and beyond. - */ -public interface AdminMask extends Closeable -{ - - void clearTable(String tableName, long timestamp) throws IOException; - - /** - * Drop given table. Table can be either enabled or disabled. - * @param tableName Name of the table to delete - * @throws IOException - */ - void dropTable(String tableName) throws IOException; - - TableDescriptor getTableDescriptor(String tableName) throws TableNotFoundException, IOException; - - boolean tableExists(String tableName) throws IOException; - - void createTable(TableDescriptor desc) throws IOException; - - void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int numRegions) throws IOException; - - /** - * Estimate the number of regionservers in the HBase cluster. - * - * This is usually implemented by calling - * {@link HBaseAdmin#getClusterStatus()} and then - * {@link ClusterStatus#getServers()} and finally {@code size()} on the - * returned server list. - * - * @return the number of servers in the cluster or -1 if it could not be determined - */ - int getEstimatedRegionServerCount(); - - void disableTable(String tableName) throws IOException; - - void enableTable(String tableName) throws IOException; - - boolean isTableDisabled(String tableName) throws IOException; - - void addColumn(String tableName, ColumnFamilyDescriptor columnDescriptor) throws IOException; -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/ConnectionMask.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/ConnectionMask.java deleted file mode 100644 index 05ecd532fa..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/ConnectionMask.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/** - * Copyright DataStax, Inc. - *

    - * Please see the included license file for details. - */ -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.HRegionLocation; - -import java.io.Closeable; -import java.io.IOException; -import java.util.List; - -/** - * This interface hides ABI/API breaking changes that HBase has made to its (H)Connection class over the course - * of development from 0.94 to 1.0 and beyond. - */ -public interface ConnectionMask extends Closeable -{ - - /** - * Retrieve the TableMask compatibility layer object for the supplied table name. - * @return The TableMask for the specified table. - * @throws IOException in the case of backend exceptions. - */ - TableMask getTable(String name) throws IOException; - - /** - * Retrieve the AdminMask compatibility layer object for this Connection. - * @return The AdminMask for this Connection - * @throws IOException in the case of backend exceptions. - */ - AdminMask getAdmin() throws IOException; - - /** - * Retrieve the RegionLocations for the supplied table name. - * @return A map of HRegionInfo to ServerName that describes the storage regions for the named table. - * @throws IOException in the case of backend exceptions. - */ - List getRegionLocations(String tablename) throws IOException; -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseAdmin2_0.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseAdmin2_0.java deleted file mode 100644 index f93481e92c..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseAdmin2_0.java +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableNotFoundException; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -public class HBaseAdmin2_0 implements AdminMask -{ - - private static final Logger log = LoggerFactory.getLogger(HBaseAdmin2_0.class); - - private final Admin adm; - - public HBaseAdmin2_0(Admin adm) - { - this.adm = adm; - } - - /** - * Delete all rows from the given table. This method is intended only for development and testing use. - * @param tableString - * @param timestamp - * @throws IOException - */ - @Override - public void clearTable(String tableString, long timestamp) throws IOException - { - TableName tableName = TableName.valueOf(tableString); - - if (!adm.tableExists(tableName)) { - log.debug("Attempted to clear table {} before it exists (noop)", tableString); - return; - } - - // Unfortunately, linear scanning and deleting rows is faster in HBase when running integration tests than - // disabling and deleting/truncating tables. - final Scan scan = new Scan(); - scan.setCacheBlocks(false); - scan.setCaching(2000); - scan.setTimeRange(0, Long.MAX_VALUE); - scan.readVersions(1); - - try (final Table table = adm.getConnection().getTable(tableName); - final ResultScanner scanner = table.getScanner(scan)) { - final Iterator iterator = scanner.iterator(); - final int batchSize = 1000; - final List deleteList = new ArrayList<>(); - while (iterator.hasNext()) { - deleteList.add(new Delete(iterator.next().getRow(), timestamp)); - if (!iterator.hasNext() || deleteList.size() == batchSize) { - table.delete(deleteList); - deleteList.clear(); - } - } - } - } - - @Override - public void dropTable(String tableString) throws IOException { - final TableName tableName = TableName.valueOf(tableString); - - if (!adm.tableExists(tableName)) { - log.debug("Attempted to drop table {} before it exists (noop)", tableString); - return; - } - - if (adm.isTableEnabled(tableName)) { - adm.disableTable(tableName); - } - adm.deleteTable(tableName); - } - - @Override - public TableDescriptor getTableDescriptor(String tableString) throws TableNotFoundException, IOException - { - return adm.getDescriptor(TableName.valueOf(tableString)); - } - - @Override - public boolean tableExists(String tableString) throws IOException - { - return adm.tableExists(TableName.valueOf(tableString)); - } - - @Override - public void createTable(TableDescriptor desc) throws IOException - { - adm.createTable(desc); - } - - @Override - public void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int numRegions) throws IOException - { - adm.createTable(desc, startKey, endKey, numRegions); - } - - @Override - public int getEstimatedRegionServerCount() - { - int serverCount = -1; - try { - serverCount = adm.getClusterStatus().getServers().size(); - log.debug("Read {} servers from HBase ClusterStatus", serverCount); - } catch (IOException e) { - log.debug("Unable to retrieve HBase cluster status", e); - } - return serverCount; - } - - @Override - public void disableTable(String tableString) throws IOException - { - adm.disableTable(TableName.valueOf(tableString)); - } - - @Override - public void enableTable(String tableString) throws IOException - { - adm.enableTable(TableName.valueOf(tableString)); - } - - @Override - public boolean isTableDisabled(String tableString) throws IOException - { - return adm.isTableDisabled(TableName.valueOf(tableString)); - } - - @Override - public void addColumn(String tableString, ColumnFamilyDescriptor columnDescriptor) throws IOException - { - adm.addColumnFamily(TableName.valueOf(tableString), columnDescriptor); - } - - @Override - public void close() throws IOException - { - adm.close(); - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat.java deleted file mode 100644 index 553ad4606f..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.TableDescriptor; - -import java.io.IOException; - -public interface HBaseCompat { - - /** - * Configure the compression scheme {@code algo} on a column family - * descriptor {@code cd}. The {@code algo} parameter is a string value - * corresponding to one of the values of HBase's Compression enum. The - * Compression enum has moved between packages as HBase has evolved, which - * is why this method has a String argument in the signature instead of the - * enum itself. - * @param cd - * column family to configure - * @param algo - */ - public ColumnFamilyDescriptor setCompression(ColumnFamilyDescriptor cd, String algo); - - /** - * Create and return a HTableDescriptor instance with the given name. The - * constructors on this method have remained stable over HBase development - * so far, but the old HTableDescriptor(String) constructor & byte[] friends - * are now marked deprecated and may eventually be removed in favor of the - * HTableDescriptor(TableName) constructor. That constructor (and the - * TableName type) only exists in newer HBase versions. Hence this method. - * - * @param tableName - * HBase table name - * @return a new table descriptor instance - */ - public TableDescriptor newTableDescriptor(String tableName); - - ConnectionMask createConnection(Configuration conf) throws IOException; - - TableDescriptor addColumnFamilyToTableDescriptor(TableDescriptor tdesc, ColumnFamilyDescriptor cdesc); - - void setTimestamp(Delete d, long timestamp); -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat2_0.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat2_0.java deleted file mode 100644 index fdba24a3b6..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompat2_0.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -import org.apache.hadoop.hbase.io.compress.Compression; - -import java.io.IOException; - -public class HBaseCompat2_0 implements HBaseCompat { - - @Override - public ColumnFamilyDescriptor setCompression(ColumnFamilyDescriptor cd, String algo) { - return ColumnFamilyDescriptorBuilder.newBuilder(cd).setCompressionType(Compression.Algorithm.valueOf(algo)).build(); - } - - @Override - public TableDescriptor newTableDescriptor(String tableName) { - TableName tn = TableName.valueOf(tableName); - - return TableDescriptorBuilder.newBuilder(tn).build(); - } - - @Override - public ConnectionMask createConnection(Configuration conf) throws IOException - { - return new HConnection2_0(ConnectionFactory.createConnection(conf)); - } - - @Override - public TableDescriptor addColumnFamilyToTableDescriptor(TableDescriptor tdesc, ColumnFamilyDescriptor cdesc) - { - return TableDescriptorBuilder.newBuilder(tdesc).addColumnFamily(cdesc).build(); - } - - @Override - public void setTimestamp(Delete d, long timestamp) - { - d.setTimestamp(timestamp); - } - -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompatLoader.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompatLoader.java deleted file mode 100644 index d746b3db0c..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseCompatLoader.java +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.util.VersionInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class HBaseCompatLoader { - - private static final Logger log = LoggerFactory.getLogger(HBaseCompatLoader.class); - - private static final String DEFAULT_HBASE_COMPAT_VERSION = "1.2"; - - private static final String HBASE_VERSION_2_STRING = "2."; - - private static final String DEFAULT_HBASE_COMPAT_CLASS_NAME = - "org.janusgraph.diskstorage.hbase2.HBaseCompat2_0"; - - private static final String[] HBASE_SUPPORTED_VERSIONS = - new String[] { "0.98", "1.0", "1.1", "1.2", "1.3", "2.0" }; - - private static HBaseCompat cachedCompat; - - public synchronized static HBaseCompat getCompat(String classOverride) { - - if (null != cachedCompat) { - log.debug("Returning cached HBase compatibility layer: {}", cachedCompat); - return cachedCompat; - } - - HBaseCompat compat; - String className = null; - String classNameSource = null; - - if (null != classOverride) { - className = classOverride; - classNameSource = "from explicit configuration"; - } else { - String hbaseVersion = VersionInfo.getVersion(); - for (String supportedVersion : HBASE_SUPPORTED_VERSIONS) { - if (hbaseVersion.startsWith(supportedVersion + ".")) { - if (hbaseVersion.startsWith(HBASE_VERSION_2_STRING)) { - // All HBase 2.x maps to HBaseCompat2_0. - className = DEFAULT_HBASE_COMPAT_CLASS_NAME; - } - else { - className = "org.janusgraph.diskstorage.hbase2.HBaseCompat" + supportedVersion.replaceAll("\\.", "_"); - } - classNameSource = "supporting runtime HBase version " + hbaseVersion; - break; - } - } - if (null == className) { - log.info("The HBase version {} is not explicitly supported by JanusGraph. " + - "Loading JanusGraph's compatibility layer for its most recent supported HBase version ({})", - hbaseVersion, DEFAULT_HBASE_COMPAT_VERSION); - className = DEFAULT_HBASE_COMPAT_CLASS_NAME; - classNameSource = " by default"; - } - } - - final String errTemplate = " when instantiating HBase compatibility class " + className; - - try { - compat = (HBaseCompat)Class.forName(className).newInstance(); - log.info("Instantiated HBase compatibility layer {}: {}", classNameSource, compat.getClass().getCanonicalName()); - } catch (IllegalAccessException e) { - throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e); - } catch (InstantiationException e) { - throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getClass().getSimpleName() + errTemplate, e); - } - - return cachedCompat = compat; - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseKeyColumnValueStore.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseKeyColumnValueStore.java deleted file mode 100644 index ffafc8c4dd..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseKeyColumnValueStore.java +++ /dev/null @@ -1,391 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Iterators; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; -import org.apache.hadoop.hbase.filter.ColumnRangeFilter; -import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.filter.FilterList; -import org.apache.hadoop.hbase.util.Bytes; -import org.janusgraph.diskstorage.BackendException; -import org.janusgraph.diskstorage.Entry; -import org.janusgraph.diskstorage.EntryList; -import org.janusgraph.diskstorage.EntryMetaData; -import org.janusgraph.diskstorage.PermanentBackendException; -import org.janusgraph.diskstorage.StaticBuffer; -import org.janusgraph.diskstorage.TemporaryBackendException; -import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; -import org.janusgraph.diskstorage.keycolumnvalue.KCVSUtil; -import org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore; -import org.janusgraph.diskstorage.keycolumnvalue.KeyIterator; -import org.janusgraph.diskstorage.keycolumnvalue.KeyRangeQuery; -import org.janusgraph.diskstorage.keycolumnvalue.KeySliceQuery; -import org.janusgraph.diskstorage.keycolumnvalue.KeySlicesIterator; -import org.janusgraph.diskstorage.keycolumnvalue.SliceQuery; -import org.janusgraph.diskstorage.keycolumnvalue.MultiSlicesQuery; -import org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction; -import org.janusgraph.diskstorage.util.RecordIterator; -import org.janusgraph.diskstorage.util.StaticArrayBuffer; -import org.janusgraph.diskstorage.util.StaticArrayEntry; -import org.janusgraph.diskstorage.util.StaticArrayEntryList; -import org.janusgraph.util.system.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.Nullable; -import java.io.Closeable; -import java.io.IOException; -import java.io.InterruptedIOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NavigableMap; - -/** - * Here are some areas that might need work: - *

    - * - batching? (consider HTable#batch, HTable#setAutoFlush(false) - * - tuning HTable#setWriteBufferSize (?) - * - writing a server-side filter to replace ColumnCountGetFilter, which drops - * all columns on the row where it reaches its limit. This requires getSlice, - * currently, to impose its limit on the client side. That obviously won't - * scale. - * - RowMutations for combining Puts+Deletes (need a newer HBase than 0.92 for this) - * - (maybe) fiddle with HTable#setRegionCachePrefetch and/or #prewarmRegionCache - *

    - * There may be other problem areas. These are just the ones of which I'm aware. - */ -public class HBaseKeyColumnValueStore implements KeyColumnValueStore { - - private static final Logger logger = LoggerFactory.getLogger(HBaseKeyColumnValueStore.class); - - private final String tableName; - private final HBaseStoreManager storeManager; - - // When using shortened CF names, columnFamily is the shortname and storeName is the longname - // When not using shortened CF names, they are the same - //private final String columnFamily; - private final String storeName; - // This is columnFamily.getBytes() - private final byte[] columnFamilyBytes; - private final HBaseGetter entryGetter; - - private final ConnectionMask cnx; - - HBaseKeyColumnValueStore(HBaseStoreManager storeManager, ConnectionMask cnx, String tableName, String columnFamily, String storeName) { - this.storeManager = storeManager; - this.cnx = cnx; - this.tableName = tableName; - //this.columnFamily = columnFamily; - this.storeName = storeName; - this.columnFamilyBytes = Bytes.toBytes(columnFamily); - this.entryGetter = new HBaseGetter(storeManager.getMetaDataSchema(storeName)); - } - - @Override - public void close() throws BackendException { - } - - @Override - public EntryList getSlice(KeySliceQuery query, StoreTransaction txh) throws BackendException { - Map result = getHelper(Arrays.asList(query.getKey()), getFilter(query)); - return Iterables.getOnlyElement(result.values(), EntryList.EMPTY_LIST); - } - - @Override - public Map getSlice(List keys, SliceQuery query, StoreTransaction txh) throws BackendException { - return getHelper(keys, getFilter(query)); - } - - @Override - public void mutate(StaticBuffer key, List additions, List deletions, StoreTransaction txh) throws BackendException { - Map mutations = ImmutableMap.of(key, new KCVMutation(additions, deletions)); - mutateMany(mutations, txh); - } - - @Override - public void acquireLock(StaticBuffer key, - StaticBuffer column, - StaticBuffer expectedValue, - StoreTransaction txh) throws BackendException { - throw new UnsupportedOperationException(); - } - - @Override - public KeyIterator getKeys(KeyRangeQuery query, StoreTransaction txh) throws BackendException { - return executeKeySliceQuery(query.getKeyStart().as(StaticBuffer.ARRAY_FACTORY), - query.getKeyEnd().as(StaticBuffer.ARRAY_FACTORY), - new FilterList(FilterList.Operator.MUST_PASS_ALL), - query); - } - - @Override - public String getName() { - return storeName; - } - - @Override - public KeyIterator getKeys(SliceQuery query, StoreTransaction txh) throws BackendException { - return executeKeySliceQuery(new FilterList(FilterList.Operator.MUST_PASS_ALL), query); - } - - @Override - public KeySlicesIterator getKeys(MultiSlicesQuery queries, StoreTransaction txh) throws BackendException { - throw new UnsupportedOperationException(); - } - - public static Filter getFilter(SliceQuery query) { - byte[] colStartBytes = query.getSliceStart().length() > 0 ? query.getSliceStart().as(StaticBuffer.ARRAY_FACTORY) : null; - byte[] colEndBytes = query.getSliceEnd().length() > 0 ? query.getSliceEnd().as(StaticBuffer.ARRAY_FACTORY) : null; - - Filter filter = new ColumnRangeFilter(colStartBytes, true, colEndBytes, false); - - if (query.hasLimit()) { - filter = new FilterList(FilterList.Operator.MUST_PASS_ALL, - filter, - new ColumnPaginationFilter(query.getLimit(), 0)); - } - - logger.debug("Generated HBase Filter {}", filter); - - return filter; - } - - private Map getHelper(List keys, Filter getFilter) throws BackendException { - List requests = new ArrayList(keys.size()); - { - for (StaticBuffer key : keys) { - Get g = new Get(key.as(StaticBuffer.ARRAY_FACTORY)).addFamily(columnFamilyBytes).setFilter(getFilter); - try { - g.setTimeRange(0, Long.MAX_VALUE); - } catch (IOException e) { - throw new PermanentBackendException(e); - } - requests.add(g); - } - } - - Map resultMap = new HashMap(keys.size()); - - try { - TableMask table = null; - Result[] results = null; - - try { - table = cnx.getTable(tableName); - results = table.get(requests); - } finally { - IOUtils.closeQuietly(table); - } - - if (results == null) - return KCVSUtil.emptyResults(keys); - - assert results.length==keys.size(); - - for (int i = 0; i < results.length; i++) { - Result result = results[i]; - NavigableMap>> f = result.getMap(); - - if (f == null) { // no result for this key - resultMap.put(keys.get(i), EntryList.EMPTY_LIST); - continue; - } - - // actual key with - NavigableMap> r = f.get(columnFamilyBytes); - resultMap.put(keys.get(i), (r == null) - ? EntryList.EMPTY_LIST - : StaticArrayEntryList.ofBytes(r.entrySet(), entryGetter)); - } - - return resultMap; - } catch (InterruptedIOException e) { - // added to support traversal interruption - Thread.currentThread().interrupt(); - throw new PermanentBackendException(e); - } catch (IOException e) { - throw new TemporaryBackendException(e); - } - } - - private void mutateMany(Map mutations, StoreTransaction txh) throws BackendException { - storeManager.mutateMany(ImmutableMap.of(storeName, mutations), txh); - } - - private KeyIterator executeKeySliceQuery(FilterList filters, @Nullable SliceQuery columnSlice) throws BackendException { - return executeKeySliceQuery(null, null, filters, columnSlice); - } - - private KeyIterator executeKeySliceQuery(@Nullable byte[] startKey, - @Nullable byte[] endKey, - FilterList filters, - @Nullable SliceQuery columnSlice) throws BackendException { - Scan scan = new Scan().addFamily(columnFamilyBytes); - - try { - scan.setTimeRange(0, Long.MAX_VALUE); - } catch (IOException e) { - throw new PermanentBackendException(e); - } - - if (startKey != null) - scan.withStartRow(startKey); - - if (endKey != null) - scan.withStopRow(endKey); - - if (columnSlice != null) { - filters.addFilter(getFilter(columnSlice)); - } - - TableMask table = null; - - try { - table = cnx.getTable(tableName); - return new RowIterator(table, table.getScanner(scan.setFilter(filters)), columnFamilyBytes); - } catch (IOException e) { - IOUtils.closeQuietly(table); - throw new PermanentBackendException(e); - } - } - - private class RowIterator implements KeyIterator { - private final Closeable table; - private final Iterator rows; - private final byte[] columnFamilyBytes; - - private Result currentRow; - private boolean isClosed; - - public RowIterator(Closeable table, ResultScanner rows, byte[] columnFamilyBytes) { - this.table = table; - this.columnFamilyBytes = Arrays.copyOf(columnFamilyBytes, columnFamilyBytes.length); - this.rows = Iterators.filter(rows.iterator(), result -> null != result && null != result.getRow()); - } - - @Override - public RecordIterator getEntries() { - ensureOpen(); - - return new RecordIterator() { - private final Iterator>> kv; - { - final Map>> map = currentRow.getMap(); - Preconditions.checkNotNull(map); - kv = map.get(columnFamilyBytes).entrySet().iterator(); - } - - @Override - public boolean hasNext() { - ensureOpen(); - return kv.hasNext(); - } - - @Override - public Entry next() { - ensureOpen(); - return StaticArrayEntry.ofBytes(kv.next(), entryGetter); - } - - @Override - public void close() { - isClosed = true; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - @Override - public boolean hasNext() { - ensureOpen(); - return rows.hasNext(); - } - - @Override - public StaticBuffer next() { - ensureOpen(); - - currentRow = rows.next(); - return StaticArrayBuffer.of(currentRow.getRow()); - } - - @Override - public void close() { - IOUtils.closeQuietly(table); - isClosed = true; - logger.debug("RowIterator closed table {}", table); - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - private void ensureOpen() { - if (isClosed) - throw new IllegalStateException("Iterator has been closed."); - } - } - - private static class HBaseGetter implements StaticArrayEntry.GetColVal>, byte[]> { - - private final EntryMetaData[] schema; - - private HBaseGetter(EntryMetaData[] schema) { - this.schema = schema; - } - - @Override - public byte[] getColumn(Map.Entry> element) { - return element.getKey(); - } - - @Override - public byte[] getValue(Map.Entry> element) { - return element.getValue().lastEntry().getValue(); - } - - @Override - public EntryMetaData[] getMetaSchema(Map.Entry> element) { - return schema; - } - - @Override - public Object getMetaData(Map.Entry> element, EntryMetaData meta) { - switch(meta) { - case TIMESTAMP: - return element.getValue().lastEntry().getKey(); - default: - throw new UnsupportedOperationException("Unsupported meta data: " + meta); - } - } - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseStoreManager.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseStoreManager.java deleted file mode 100644 index f98fa7b766..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseStoreManager.java +++ /dev/null @@ -1,986 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.BiMap; -import com.google.common.collect.ImmutableBiMap; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.HRegionLocation; -import org.apache.hadoop.hbase.MasterNotRunningException; -import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.TableNotEnabledException; -import org.apache.hadoop.hbase.TableNotFoundException; -import org.apache.hadoop.hbase.ZooKeeperConnectionException; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Mutation; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.Pair; -import org.apache.hadoop.hbase.util.VersionInfo; -import org.janusgraph.core.JanusGraphException; -import org.janusgraph.diskstorage.BackendException; -import org.janusgraph.diskstorage.BaseTransactionConfig; -import org.janusgraph.diskstorage.Entry; -import org.janusgraph.diskstorage.EntryMetaData; -import org.janusgraph.diskstorage.PermanentBackendException; -import org.janusgraph.diskstorage.StaticBuffer; -import org.janusgraph.diskstorage.StoreMetaData; -import org.janusgraph.diskstorage.TemporaryBackendException; -import org.janusgraph.diskstorage.common.DistributedStoreManager; -import org.janusgraph.diskstorage.configuration.ConfigElement; -import org.janusgraph.diskstorage.configuration.ConfigNamespace; -import org.janusgraph.diskstorage.configuration.ConfigOption; -import org.janusgraph.diskstorage.configuration.Configuration; -import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; -import org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore; -import org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStoreManager; -import org.janusgraph.diskstorage.keycolumnvalue.KeyRange; -import org.janusgraph.diskstorage.keycolumnvalue.StandardStoreFeatures; -import org.janusgraph.diskstorage.keycolumnvalue.StoreFeatures; -import org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction; -import org.janusgraph.diskstorage.util.BufferUtil; -import org.janusgraph.diskstorage.util.StaticArrayBuffer; -import org.janusgraph.diskstorage.util.time.TimestampProviders; -import org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration; -import org.janusgraph.graphdb.configuration.PreInitializeConfigOptions; -import org.janusgraph.util.system.IOUtils; -import org.janusgraph.util.system.NetworkUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import static org.janusgraph.diskstorage.Backend.EDGESTORE_NAME; -import static org.janusgraph.diskstorage.Backend.INDEXSTORE_NAME; -import static org.janusgraph.diskstorage.Backend.LOCK_STORE_SUFFIX; -import static org.janusgraph.diskstorage.Backend.SYSTEM_MGMT_LOG_NAME; -import static org.janusgraph.diskstorage.Backend.SYSTEM_TX_LOG_NAME; -import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.DROP_ON_CLEAR; -import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.GRAPH_NAME; -import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.IDS_STORE_NAME; -import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.SYSTEM_PROPERTIES_STORE_NAME; - -/** - * Storage Manager for HBase - */ -@PreInitializeConfigOptions -public class HBaseStoreManager extends DistributedStoreManager implements KeyColumnValueStoreManager { - - private static final Logger logger = LoggerFactory.getLogger(HBaseStoreManager.class); - - public static final ConfigNamespace HBASE_NS = - new ConfigNamespace(GraphDatabaseConfiguration.STORAGE_NS, "hbase", "HBase storage options"); - - public static final ConfigOption SHORT_CF_NAMES = - new ConfigOption<>(HBASE_NS, "short-cf-names", - "Whether to shorten the names of JanusGraph's column families to one-character mnemonics " + - "to conserve storage space", ConfigOption.Type.FIXED, true); - - public static final String COMPRESSION_DEFAULT = "-DEFAULT-"; - - public static final ConfigOption COMPRESSION = - new ConfigOption<>(HBASE_NS, "compression-algorithm", - "An HBase Compression.Algorithm enum string which will be applied to newly created column families. " + - "The compression algorithm must be installed and available on the HBase cluster. JanusGraph cannot install " + - "and configure new compression algorithms on the HBase cluster by itself.", - ConfigOption.Type.MASKABLE, "GZ"); - - public static final ConfigOption SKIP_SCHEMA_CHECK = - new ConfigOption<>(HBASE_NS, "skip-schema-check", - "Assume that JanusGraph's HBase table and column families already exist. " + - "When this is true, JanusGraph will not check for the existence of its table/CFs, " + - "nor will it attempt to create them under any circumstances. This is useful " + - "when running JanusGraph without HBase admin privileges.", - ConfigOption.Type.MASKABLE, false); - - public static final ConfigOption HBASE_TABLE = - new ConfigOption<>(HBASE_NS, "table", - "The name of the table JanusGraph will use. When " + ConfigElement.getPath(SKIP_SCHEMA_CHECK) + - " is false, JanusGraph will automatically create this table if it does not already exist." + - " If this configuration option is not provided but graph.graphname is, the table will be set" + - " to that value.", - ConfigOption.Type.LOCAL, "janusgraph"); - - /** - * Related bug fixed in 0.98.0, 0.94.7, 0.95.0: - * - * https://issues.apache.org/jira/browse/HBASE-8170 - */ - public static final int MIN_REGION_COUNT = 3; - - /** - * The total number of HBase regions to create with JanusGraph's table. This - * setting only effects table creation; this normally happens just once when - * JanusGraph connects to an HBase backend for the first time. - */ - public static final ConfigOption REGION_COUNT = - new ConfigOption(HBASE_NS, "region-count", - "The number of initial regions set when creating JanusGraph's HBase table", - ConfigOption.Type.MASKABLE, Integer.class, input -> null != input && MIN_REGION_COUNT <= input); - - /** - * This setting is used only when {@link #REGION_COUNT} is unset. - *

    - * If JanusGraph's HBase table does not exist, then it will be created with total - * region count = (number of servers reported by ClusterStatus) * (this - * value). - *

    - * The Apache HBase manual suggests an order-of-magnitude range of potential - * values for this setting: - * - *

      - *
    • - * 2.5.2.7. Managed Splitting: - *
      - * What's the optimal number of pre-split regions to create? Mileage will - * vary depending upon your application. You could start low with 10 - * pre-split regions / server and watch as data grows over time. It's - * better to err on the side of too little regions and rolling split later. - *
      - *
    • - *
    • - * 9.7 Regions: - *
      - * In general, HBase is designed to run with a small (20-200) number of - * relatively large (5-20Gb) regions per server... Typically you want to - * keep your region count low on HBase for numerous reasons. Usually - * right around 100 regions per RegionServer has yielded the best results. - *
      - *
    • - *
    - * - * These considerations may differ for other HBase implementations (e.g. MapR). - */ - public static final ConfigOption REGIONS_PER_SERVER = - new ConfigOption<>(HBASE_NS, "regions-per-server", - "The number of regions per regionserver to set when creating JanusGraph's HBase table", - ConfigOption.Type.MASKABLE, Integer.class); - - /** - * If this key is present in either the JVM system properties or the process - * environment (checked in the listed order, first hit wins), then its value - * must be the full package and class name of an implementation of - * {@link HBaseCompat} that has a no-arg public constructor. - *

    - * When this is not set, JanusGraph attempts to automatically detect the - * HBase runtime version by calling {@link VersionInfo#getVersion()}. JanusGraph - * then checks the returned version string against a hard-coded list of - * supported version prefixes and instantiates the associated compat layer - * if a match is found. - *

    - * When this is set, JanusGraph will not call - * {@code VersionInfo.getVersion()} or read its hard-coded list of supported - * version prefixes. JanusGraph will instead attempt to instantiate the class - * specified (via the no-arg constructor which must exist) and then attempt - * to cast it to HBaseCompat and use it as such. JanusGraph will assume the - * supplied implementation is compatible with the runtime HBase version and - * make no attempt to verify that assumption. - *

    - * Setting this key incorrectly could cause runtime exceptions at best or - * silent data corruption at worst. This setting is intended for users - * running exotic HBase implementations that don't support VersionInfo or - * implementations which return values from {@code VersionInfo.getVersion()} - * that are inconsistent with Apache's versioning convention. It may also be - * useful to users who want to run against a new release of HBase that JanusGraph - * doesn't yet officially support. - * - */ - public static final ConfigOption COMPAT_CLASS = - new ConfigOption<>(HBASE_NS, "compat-class", - "The package and class name of the HBaseCompat implementation. HBaseCompat masks version-specific HBase API differences. " + - "When this option is unset, JanusGraph calls HBase's VersionInfo.getVersion() and loads the matching compat class " + - "at runtime. Setting this option forces JanusGraph to instead reflectively load and instantiate the specified class.", - ConfigOption.Type.MASKABLE, String.class); - - public static final int PORT_DEFAULT = 9160; - - public static final TimestampProviders PREFERRED_TIMESTAMPS = TimestampProviders.MICRO; - - public static final ConfigNamespace HBASE_CONFIGURATION_NAMESPACE = - new ConfigNamespace(HBASE_NS, "ext", "Overrides for hbase-{site,default}.xml options", true); - - private static final StaticBuffer FOUR_ZERO_BYTES = BufferUtil.zeroBuffer(4); - - // Immutable instance fields - private final BiMap shortCfNameMap; - private final String tableName; - private final String compression; - private final int regionCount; - private final int regionsPerServer; - private final ConnectionMask cnx; - private final org.apache.hadoop.conf.Configuration hconf; - private final boolean shortCfNames; - private final boolean skipSchemaCheck; - private final String compatClass; - private final HBaseCompat compat; - // Cached return value of getDeployment() as requesting it can be expensive. - private Deployment deployment = null; - - private static final ConcurrentHashMap openManagers = new ConcurrentHashMap<>(); - - // Mutable instance state - private final ConcurrentMap openStores; - - public HBaseStoreManager(org.janusgraph.diskstorage.configuration.Configuration config) throws BackendException { - super(config, PORT_DEFAULT); - - shortCfNameMap = createShortCfMap(config); - - Preconditions.checkArgument(null != shortCfNameMap); - Collection shorts = shortCfNameMap.values(); - Preconditions.checkArgument(Sets.newHashSet(shorts).size() == shorts.size()); - - checkConfigDeprecation(config); - - this.tableName = determineTableName(config); - this.compression = config.get(COMPRESSION); - this.regionCount = config.has(REGION_COUNT) ? config.get(REGION_COUNT) : -1; - this.regionsPerServer = config.has(REGIONS_PER_SERVER) ? config.get(REGIONS_PER_SERVER) : -1; - this.skipSchemaCheck = config.get(SKIP_SCHEMA_CHECK); - this.compatClass = config.has(COMPAT_CLASS) ? config.get(COMPAT_CLASS) : null; - this.compat = HBaseCompatLoader.getCompat(compatClass); - - /* - * Specifying both region count options is permitted but may be - * indicative of a misunderstanding, so issue a warning. - */ - if (config.has(REGIONS_PER_SERVER) && config.has(REGION_COUNT)) { - logger.warn("Both {} and {} are set in JanusGraph's configuration, but " - + "the former takes precedence and the latter will be ignored.", - REGION_COUNT, REGIONS_PER_SERVER); - } - - /* This static factory calls HBaseConfiguration.addHbaseResources(), - * which in turn applies the contents of hbase-default.xml and then - * applies the contents of hbase-site.xml. - */ - this.hconf = HBaseConfiguration.create(); - - // Copy a subset of our commons config into a Hadoop config - int keysLoaded=0; - Map configSub = config.getSubset(HBASE_CONFIGURATION_NAMESPACE); - for (Map.Entry entry : configSub.entrySet()) { - logger.info("HBase configuration: setting {}={}", entry.getKey(), entry.getValue()); - if (entry.getValue()==null) continue; - hconf.set(entry.getKey(), entry.getValue().toString()); - keysLoaded++; - } - - // Special case for STORAGE_HOSTS - if (config.has(GraphDatabaseConfiguration.STORAGE_HOSTS)) { - String zkQuorumKey = "hbase.zookeeper.quorum"; - String csHostList = Joiner.on(",").join(config.get(GraphDatabaseConfiguration.STORAGE_HOSTS)); - hconf.set(zkQuorumKey, csHostList); - logger.info("Copied host list from {} to {}: {}", GraphDatabaseConfiguration.STORAGE_HOSTS, zkQuorumKey, csHostList); - } - - logger.debug("HBase configuration: set a total of {} configuration values", keysLoaded); - - this.shortCfNames = config.get(SHORT_CF_NAMES); - - try { - //this.cnx = HConnectionManager.createConnection(hconf); - this.cnx = compat.createConnection(hconf); - } catch (IOException e) { - throw new PermanentBackendException(e); - } - - if (logger.isTraceEnabled()) { - openManagers.put(this, new Throwable("Manager Opened")); - dumpOpenManagers(); - } - - logger.debug("Dumping HBase config key=value pairs"); - for (Map.Entry entry : hconf) { - logger.debug("[HBaseConfig] " + entry.getKey() + "=" + entry.getValue()); - } - logger.debug("End of HBase config key=value pairs"); - - openStores = new ConcurrentHashMap<>(); - } - - public static BiMap createShortCfMap(Configuration config) { - return ImmutableBiMap.builder() - .put(INDEXSTORE_NAME, "g") - .put(INDEXSTORE_NAME + LOCK_STORE_SUFFIX, "h") - .put(config.get(IDS_STORE_NAME), "i") - .put(EDGESTORE_NAME, "e") - .put(EDGESTORE_NAME + LOCK_STORE_SUFFIX, "f") - .put(SYSTEM_PROPERTIES_STORE_NAME, "s") - .put(SYSTEM_PROPERTIES_STORE_NAME + LOCK_STORE_SUFFIX, "t") - .put(SYSTEM_MGMT_LOG_NAME, "m") - .put(SYSTEM_TX_LOG_NAME, "l") - .build(); - } - - @Override - public Deployment getDeployment() { - if (null != deployment) { - return deployment; - } - - List local; - try { - local = getLocalKeyPartition(); - deployment = null != local && !local.isEmpty() ? Deployment.LOCAL : Deployment.REMOTE; - } catch (BackendException e) { - throw new RuntimeException(e); - } - return deployment; - } - - @Override - public String toString() { - return "hbase[" + tableName + "@" + super.toString() + "]"; - } - - public void dumpOpenManagers() { - int estimatedSize = openManagers.size(); - logger.trace("---- Begin open HBase store manager list ({} managers) ----", estimatedSize); - for (HBaseStoreManager m : openManagers.keySet()) { - logger.trace("Manager {} opened at:", m, openManagers.get(m)); - } - logger.trace("---- End open HBase store manager list ({} managers) ----", estimatedSize); - } - - @Override - public void close() { - openStores.clear(); - if (logger.isTraceEnabled()) - openManagers.remove(this); - IOUtils.closeQuietly(cnx); - } - - @Override - public StoreFeatures getFeatures() { - - Configuration c = GraphDatabaseConfiguration.buildGraphConfiguration(); - - StandardStoreFeatures.Builder fb = new StandardStoreFeatures.Builder() - .orderedScan(true).unorderedScan(true).batchMutation(true) - .multiQuery(true).distributed(true).keyOrdered(true).storeTTL(true) - .cellTTL(true).timestamps(true).preferredTimestamps(PREFERRED_TIMESTAMPS) - .optimisticLocking(true).keyConsistent(c); - - try { - fb.localKeyPartition(getDeployment() == Deployment.LOCAL); - } catch (Exception e) { - logger.warn("Unexpected exception during getDeployment()", e); - } - - return fb.build(); - } - - @Override - public void mutateMany(Map> mutations, StoreTransaction txh) throws BackendException { - final MaskedTimestamp commitTime = new MaskedTimestamp(txh); - // In case of an addition and deletion with identical timestamps, the - // deletion tombstone wins. - // http://hbase.apache.org/book/versions.html#d244e4250 - final Map, Delete>> commandsPerKey = - convertToCommands( - mutations, - commitTime.getAdditionTime(times), - commitTime.getDeletionTime(times)); - - final List batch = new ArrayList<>(commandsPerKey.size()); // actual batch operation - - // convert sorted commands into representation required for 'batch' operation - for (Pair, Delete> commands : commandsPerKey.values()) { - if (commands.getFirst() != null && !commands.getFirst().isEmpty()) - batch.addAll(commands.getFirst()); - - if (commands.getSecond() != null) - batch.add(commands.getSecond()); - } - - try { - TableMask table = null; - - try { - table = cnx.getTable(tableName); - table.batch(batch, new Object[batch.size()]); - } finally { - IOUtils.closeQuietly(table); - } - } catch (IOException e) { - throw new TemporaryBackendException(e); - } catch (InterruptedException e) { - throw new TemporaryBackendException(e); - } - - this.sleepAfterWrite(commitTime); - } - - @Override - public KeyColumnValueStore openDatabase(String longName, StoreMetaData.Container metaData) throws BackendException { - // HBase does not support retrieving cell-level TTL by the client. - Preconditions.checkArgument(!storageConfig.has(GraphDatabaseConfiguration.STORE_META_TTL, longName) - || !storageConfig.get(GraphDatabaseConfiguration.STORE_META_TTL, longName)); - - HBaseKeyColumnValueStore store = openStores.get(longName); - - if (store == null) { - final String cfName = getCfNameForStoreName(longName); - - HBaseKeyColumnValueStore newStore = new HBaseKeyColumnValueStore(this, cnx, tableName, cfName, longName); - - store = openStores.putIfAbsent(longName, newStore); // nothing bad happens if we loose to other thread - - if (store == null) { - if (!skipSchemaCheck) { - int cfTTLInSeconds = -1; - if (metaData.contains(StoreMetaData.TTL)) { - cfTTLInSeconds = metaData.get(StoreMetaData.TTL); - } - ensureColumnFamilyExists(tableName, cfName, cfTTLInSeconds); - } - - store = newStore; - } - } - - return store; - } - - @Override - public StoreTransaction beginTransaction(final BaseTransactionConfig config) throws BackendException { - return new HBaseTransaction(config); - } - - @Override - public String getName() { - return tableName; - } - - /** - * Deletes the specified table with all its columns. - * ATTENTION: Invoking this method will delete the table if it exists and therefore causes data loss. - */ - @Override - public void clearStorage() throws BackendException { - try (AdminMask adm = getAdminInterface()) { - if (this.storageConfig.get(DROP_ON_CLEAR)) { - adm.dropTable(tableName); - } else { - adm.clearTable(tableName, times.getTime(times.getTime())); - } - } catch (IOException e) - { - throw new TemporaryBackendException(e); - } - } - - @Override - public boolean exists() throws BackendException { - try (final AdminMask adm = getAdminInterface()) { - return adm.tableExists(tableName); - } catch (IOException e) { - throw new TemporaryBackendException(e); - } - } - - @Override - public List getLocalKeyPartition() throws BackendException { - List result = new LinkedList<>(); - try { - ensureTableExists( - tableName, getCfNameForStoreName(GraphDatabaseConfiguration.SYSTEM_PROPERTIES_STORE_NAME), 0); - Map normed = normalizeKeyBounds(cnx.getRegionLocations(tableName)); - - for (Map.Entry e : normed.entrySet()) { - if (NetworkUtil.isLocalConnection(e.getValue().getHostname())) { - result.add(e.getKey()); - logger.debug("Found local key/row partition {} on host {}", e.getKey(), e.getValue()); - } else { - logger.debug("Discarding remote {}", e.getValue()); - } - } - } catch (MasterNotRunningException e) { - logger.warn("Unexpected MasterNotRunningException", e); - } catch (ZooKeeperConnectionException e) { - logger.warn("Unexpected ZooKeeperConnectionException", e); - } catch (IOException e) { - logger.warn("Unexpected IOException", e); - } - return result; - } - - /** - * each key from an {@link HRegionInfo} to a {@link KeyRange} expressing the - * region's start and end key bounds using JanusGraph-partitioning-friendly - * conventions (start inclusive, end exclusive, zero bytes appended where - * necessary to make all keys at least 4 bytes long). - *

    - * This method iterates over the entries in its map parameter and performs - * the following conditional conversions on its keys. "Require" below means - * either a {@link Preconditions} invocation or an assertion. HRegionInfo - * sometimes returns start and end keys of zero length; this method replaces - * zero length keys with null before doing any of the checks described - * below. The parameter map and the values it contains are only read and - * never modified. - * - *

      - *
    • If an entry's HRegionInfo has null start and end keys, then first - * require that the parameter map is a singleton, and then return a - * single-entry map whose {@code KeyRange} has start and end buffers that - * are both four bytes of zeros.
    • - *
    • If the entry has a null end key (but non-null start key), put an - * equivalent entry in the result map with a start key identical to the - * input, except that zeros are appended to values less than 4 bytes long, - * and an end key that is four bytes of zeros. - *
    • If the entry has a null start key (but non-null end key), put an - * equivalent entry in the result map where the start key is four bytes of - * zeros, and the end key has zeros appended, if necessary, to make it at - * least 4 bytes long, after which one is added to the padded value in - * unsigned 32-bit arithmetic with overflow allowed.
    • - *
    • Any entry which matches none of the above criteria results in an - * equivalent entry in the returned map, except that zeros are appended to - * both keys to make each at least 4 bytes long, and the end key is then - * incremented as described in the last bullet point.
    • - *
    - * - * After iterating over the parameter map, this method checks that it either - * saw no entries with null keys, one entry with a null start key and a - * different entry with a null end key, or one entry with both start and end - * keys null. If any null keys are observed besides these three cases, the - * method will die with a precondition failure. - * - * @param locations A list of HRegionInfo - * @return JanusGraph-friendly expression of each region's rowkey boundaries - */ - private Map normalizeKeyBounds(List locations) { - - HRegionLocation nullStart = null; - HRegionLocation nullEnd = null; - - ImmutableMap.Builder b = ImmutableMap.builder(); - - for (HRegionLocation location : locations) { - HRegionInfo regionInfo = location.getRegionInfo(); - ServerName serverName = location.getServerName(); - byte startKey[] = regionInfo.getStartKey(); - byte endKey[] = regionInfo.getEndKey(); - - if (0 == startKey.length) { - startKey = null; - logger.trace("Converted zero-length HBase startKey byte array to null"); - } - - if (0 == endKey.length) { - endKey = null; - logger.trace("Converted zero-length HBase endKey byte array to null"); - } - - if (null == startKey && null == endKey) { - Preconditions.checkState(1 == locations.size()); - logger.debug("HBase table {} has a single region {}", tableName, regionInfo); - // Choose arbitrary shared value = startKey = endKey - return b.put(new KeyRange(FOUR_ZERO_BYTES, FOUR_ZERO_BYTES), serverName).build(); - } else if (null == startKey) { - logger.debug("Found HRegionInfo with null startKey on server {}: {}", serverName, regionInfo); - Preconditions.checkState(null == nullStart); - nullStart = location; - // I thought endBuf would be inclusive from the HBase javadoc, but in practice it is exclusive - StaticBuffer endBuf = StaticArrayBuffer.of(zeroExtend(endKey)); - // Replace null start key with zeroes - b.put(new KeyRange(FOUR_ZERO_BYTES, endBuf), serverName); - } else if (null == endKey) { - logger.debug("Found HRegionInfo with null endKey on server {}: {}", serverName, regionInfo); - Preconditions.checkState(null == nullEnd); - nullEnd = location; - // Replace null end key with zeroes - b.put(new KeyRange(StaticArrayBuffer.of(zeroExtend(startKey)), FOUR_ZERO_BYTES), serverName); - } else { - Preconditions.checkState(null != startKey); - Preconditions.checkState(null != endKey); - - // Convert HBase's inclusive end keys into exclusive JanusGraph end keys - StaticBuffer startBuf = StaticArrayBuffer.of(zeroExtend(startKey)); - StaticBuffer endBuf = StaticArrayBuffer.of(zeroExtend(endKey)); - - KeyRange kr = new KeyRange(startBuf, endBuf); - b.put(kr, serverName); - logger.debug("Found HRegionInfo with non-null end and start keys on server {}: {}", serverName, regionInfo); - } - } - - // Require either no null key bounds or a pair of them - Preconditions.checkState(!(null == nullStart ^ null == nullEnd)); - - // Check that every key in the result is at least 4 bytes long - Map result = b.build(); - for (KeyRange kr : result.keySet()) { - Preconditions.checkState(4 <= kr.getStart().length()); - Preconditions.checkState(4 <= kr.getEnd().length()); - } - - return result; - } - - /** - * If the parameter is shorter than 4 bytes, then create and return a new 4 - * byte array with the input array's bytes followed by zero bytes. Otherwise - * return the parameter. - * - * @param dataToPad non-null but possibly zero-length byte array - * @return either the parameter or a new array - */ - private final byte[] zeroExtend(byte[] dataToPad) { - assert null != dataToPad; - - final int targetLength = 4; - - if (targetLength <= dataToPad.length) - return dataToPad; - - byte padded[] = new byte[targetLength]; - - for (int i = 0; i < dataToPad.length; i++) - padded[i] = dataToPad[i]; - - for (int i = dataToPad.length; i < padded.length; i++) - padded[i] = (byte)0; - - return padded; - } - - public static String shortenCfName(BiMap shortCfNameMap, String longName) throws PermanentBackendException { - final String s; - if (shortCfNameMap.containsKey(longName)) { - s = shortCfNameMap.get(longName); - Preconditions.checkNotNull(s); - logger.debug("Substituted default CF name \"{}\" with short form \"{}\" to reduce HBase KeyValue size", longName, s); - } else { - if (shortCfNameMap.containsValue(longName)) { - String fmt = "Must use CF long-form name \"%s\" instead of the short-form name \"%s\" when configured with %s=true"; - String msg = String.format(fmt, shortCfNameMap.inverse().get(longName), longName, SHORT_CF_NAMES.getName()); - throw new PermanentBackendException(msg); - } - s = longName; - logger.debug("Kept default CF name \"{}\" because it has no associated short form", s); - } - return s; - } - - private TableDescriptor ensureTableExists(String tableName, String initialCFName, int ttlInSeconds) throws BackendException { - AdminMask adm = null; - - TableDescriptor desc; - - try { // Create our table, if necessary - adm = getAdminInterface(); - /* - * Some HBase versions/impls respond badly to attempts to create a - * table without at least one CF. See #661. Creating a CF along with - * the table avoids HBase carping. - */ - if (adm.tableExists(tableName)) { - desc = adm.getTableDescriptor(tableName); - // Check and warn if long and short cf names are mixedly used for the same table. - if (shortCfNames && initialCFName.equals(shortCfNameMap.get(SYSTEM_PROPERTIES_STORE_NAME))) { - String longCFName = shortCfNameMap.inverse().get(initialCFName); - if (desc.getColumnFamily(Bytes.toBytes(longCFName)) != null) { - logger.warn("Configuration {}=true, but the table \"{}\" already has column family with long name \"{}\".", - SHORT_CF_NAMES.getName(), tableName, longCFName); - logger.warn("Check {} configuration.", SHORT_CF_NAMES.getName()); - } - } - else if (!shortCfNames && initialCFName.equals(SYSTEM_PROPERTIES_STORE_NAME)) { - String shortCFName = shortCfNameMap.get(initialCFName); - if (desc.getColumnFamily(Bytes.toBytes(shortCFName)) != null) { - logger.warn("Configuration {}=false, but the table \"{}\" already has column family with short name \"{}\".", - SHORT_CF_NAMES.getName(), tableName, shortCFName); - logger.warn("Check {} configuration.", SHORT_CF_NAMES.getName()); - } - } - } else { - desc = createTable(tableName, initialCFName, ttlInSeconds, adm); - } - } catch (IOException e) { - throw new TemporaryBackendException(e); - } finally { - IOUtils.closeQuietly(adm); - } - - return desc; - } - - private TableDescriptor createTable(String tableName, String cfName, int ttlInSeconds, AdminMask adm) throws IOException { - TableDescriptor desc = compat.newTableDescriptor(tableName); - - ColumnFamilyDescriptor cdesc = ColumnFamilyDescriptorBuilder.of(cfName); - cdesc = setCFOptions(cdesc, ttlInSeconds); - - desc = compat.addColumnFamilyToTableDescriptor(desc, cdesc); - - int count; // total regions to create - String src; - - if (MIN_REGION_COUNT <= (count = regionCount)) { - src = "region count configuration"; - } else if (0 < regionsPerServer && - MIN_REGION_COUNT <= (count = regionsPerServer * adm.getEstimatedRegionServerCount())) { - src = "ClusterStatus server count"; - } else { - count = -1; - src = "default"; - } - - if (MIN_REGION_COUNT < count) { - adm.createTable(desc, getStartKey(count), getEndKey(count), count); - logger.debug("Created table {} with region count {} from {}", tableName, count, src); - } else { - adm.createTable(desc); - logger.debug("Created table {} with default start key, end key, and region count", tableName); - } - - return desc; - } - - /** - *

    - * From the {@code createTable} javadoc: - * "The start key specified will become the end key of the first region of - * the table, and the end key specified will become the start key of the - * last region of the table (the first region has a null start key and - * the last region has a null end key)" - *

    - * To summarize, the {@code createTable} argument called "startKey" is - * actually the end key of the first region. - */ - private byte[] getStartKey(int regionCount) { - ByteBuffer regionWidth = ByteBuffer.allocate(4); - regionWidth.putInt((int)(((1L << 32) - 1L) / regionCount)).flip(); - return StaticArrayBuffer.of(regionWidth).getBytes(0, 4); - } - - /** - * Companion to {@link #getStartKey(int)}. See its javadoc for details. - */ - private byte[] getEndKey(int regionCount) { - ByteBuffer regionWidth = ByteBuffer.allocate(4); - regionWidth.putInt((int)(((1L << 32) - 1L) / regionCount * (regionCount - 1))).flip(); - return StaticArrayBuffer.of(regionWidth).getBytes(0, 4); - } - - private void ensureColumnFamilyExists(String tableName, String columnFamily, int ttlInSeconds) throws BackendException { - AdminMask adm = null; - try { - adm = getAdminInterface(); - TableDescriptor desc = ensureTableExists(tableName, columnFamily, ttlInSeconds); - - Preconditions.checkNotNull(desc); - - ColumnFamilyDescriptor cf = desc.getColumnFamily(Bytes.toBytes(columnFamily)); - - // Create our column family, if necessary - if (cf == null) { - try { - if (!adm.isTableDisabled(tableName)) { - adm.disableTable(tableName); - } - } catch (TableNotEnabledException e) { - logger.debug("Table {} already disabled", tableName); - } catch (IOException e) { - throw new TemporaryBackendException(e); - } - - try { - ColumnFamilyDescriptor cdesc = ColumnFamilyDescriptorBuilder.of(columnFamily); - - setCFOptions(cdesc, ttlInSeconds); - - adm.addColumn(tableName, cdesc); - - try { - logger.debug("Added HBase ColumnFamily {}, waiting for 1 sec. to propogate.", columnFamily); - Thread.sleep(1000L); - } catch (InterruptedException ie) { - throw new TemporaryBackendException(ie); - } - - adm.enableTable(tableName); - } catch (TableNotFoundException ee) { - logger.error("TableNotFoundException", ee); - throw new PermanentBackendException(ee); - } catch (org.apache.hadoop.hbase.TableExistsException ee) { - logger.debug("Swallowing exception {}", ee); - } catch (IOException ee) { - throw new TemporaryBackendException(ee); - } - } - } finally { - IOUtils.closeQuietly(adm); - } - } - - private ColumnFamilyDescriptor setCFOptions(ColumnFamilyDescriptor cdesc, int ttlInSeconds) { - ColumnFamilyDescriptor ret = null; - - if (null != compression && !compression.equals(COMPRESSION_DEFAULT)) { - ret = compat.setCompression(cdesc, compression); - } - - if (ttlInSeconds > 0) { - ret = ColumnFamilyDescriptorBuilder.newBuilder(cdesc).setTimeToLive(ttlInSeconds).build(); - } - - return ret; - } - - /** - * Convert JanusGraph internal Mutation representation into HBase native commands. - * - * @param mutations Mutations to convert into HBase commands. - * @param putTimestamp The timestamp to use for Put commands. - * @param delTimestamp The timestamp to use for Delete commands. - * @return Commands sorted by key converted from JanusGraph internal representation. - * @throws org.janusgraph.diskstorage.PermanentBackendException - */ - @VisibleForTesting - Map, Delete>> convertToCommands(Map> mutations, - final long putTimestamp, - final long delTimestamp) throws PermanentBackendException { - // A map of rowkey to commands (list of Puts, Delete) - final Map, Delete>> commandsPerKey = new HashMap<>(); - - for (Map.Entry> entry : mutations.entrySet()) { - - String cfString = getCfNameForStoreName(entry.getKey()); - byte[] cfName = Bytes.toBytes(cfString); - - for (Map.Entry m : entry.getValue().entrySet()) { - final byte[] key = m.getKey().as(StaticBuffer.ARRAY_FACTORY); - KCVMutation mutation = m.getValue(); - - Pair, Delete> commands = commandsPerKey.get(m.getKey()); - - // The firt time we go through the list of input , - // create the holder for a particular rowkey - if (commands == null) { - commands = new Pair<>(); - // List of all the Puts for this rowkey, including the ones without TTL and with TTL. - final List putList = new ArrayList<>(); - commands.setFirst(putList); - commandsPerKey.put(m.getKey(), commands); - } - - if (mutation.hasDeletions()) { - if (commands.getSecond() == null) { - Delete d = new Delete(key); - compat.setTimestamp(d, delTimestamp); - commands.setSecond(d); - } - - for (StaticBuffer b : mutation.getDeletions()) { - // commands.getSecond() is a Delete for this rowkey. - commands.getSecond().addColumns(cfName, b.as(StaticBuffer.ARRAY_FACTORY), delTimestamp); - } - } - - if (mutation.hasAdditions()) { - // All the entries (column cells) with the rowkey use this one Put, except the ones with TTL. - final Put putColumnsWithoutTtl = new Put(key, putTimestamp); - // At the end of this loop, there will be one Put entry in the commands.getFirst() list that - // contains all additions without TTL set, and possible multiple Put entries for columns - // that have TTL set. - for (Entry e : mutation.getAdditions()) { - - // Deal with TTL within the entry (column cell) first - // HBase cell level TTL is actually set at the Mutation/Put level. - // Therefore we need to construct a new Put for each entry (column cell) with TTL. - // We can not combine them because column cells within the same rowkey may: - // 1. have no TTL - // 2. have TTL - // 3. have different TTL - final Integer ttl = (Integer) e.getMetaData().get(EntryMetaData.TTL); - if (null != ttl && ttl > 0) { - // Create a new Put - Put putColumnWithTtl = new Put(key, putTimestamp); - addColumnToPut(putColumnWithTtl, cfName, putTimestamp, e); - // Convert ttl from second (JanusGraph TTL) to millisec (HBase TTL) - // @see JanusGraphManagement#setTTL(JanusGraphSchemaType, Duration) - // Cast Put to Mutation for backward compatibility with HBase 0.98.x - // HBase supports cell-level TTL for versions 0.98.6 and above. - ((Mutation) putColumnWithTtl).setTTL(ttl * 1000); - // commands.getFirst() is the list of Puts for this rowkey. Add this - // Put column with TTL to the list. - commands.getFirst().add(putColumnWithTtl); - } else { - addColumnToPut(putColumnsWithoutTtl, cfName, putTimestamp, e); - } - } - // If there were any mutations without TTL set, add them to commands.getFirst() - if (!putColumnsWithoutTtl.isEmpty()) { - commands.getFirst().add(putColumnsWithoutTtl); - } - } - } - } - - return commandsPerKey; - } - - private void addColumnToPut(Put p, byte[] cfName, long putTimestamp, Entry e) { - p.addColumn(cfName, e.getColumnAs(StaticBuffer.ARRAY_FACTORY), putTimestamp, - e.getValueAs(StaticBuffer.ARRAY_FACTORY)); - } - - private String getCfNameForStoreName(String storeName) throws PermanentBackendException { - return shortCfNames ? shortenCfName(shortCfNameMap, storeName) : storeName; - } - - private void checkConfigDeprecation(org.janusgraph.diskstorage.configuration.Configuration config) { - if (config.has(GraphDatabaseConfiguration.STORAGE_PORT)) { - logger.warn("The configuration property {} is ignored for HBase. Set hbase.zookeeper.property.clientPort in hbase-site.xml or {}.hbase.zookeeper.property.clientPort in JanusGraph's configuration file.", - ConfigElement.getPath(GraphDatabaseConfiguration.STORAGE_PORT), ConfigElement.getPath(HBASE_CONFIGURATION_NAMESPACE)); - } - } - - private AdminMask getAdminInterface() { - try { - return cnx.getAdmin(); - } catch (IOException e) { - throw new JanusGraphException(e); - } - } - - private String determineTableName(org.janusgraph.diskstorage.configuration.Configuration config) { - if ((!config.has(HBASE_TABLE)) && (config.has(GRAPH_NAME))) { - return config.get(GRAPH_NAME); - } - return config.get(HBASE_TABLE); - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseTransaction.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseTransaction.java deleted file mode 100644 index 3b0d271bb6..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HBaseTransaction.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.janusgraph.diskstorage.BaseTransactionConfig; -import org.janusgraph.diskstorage.common.AbstractStoreTransaction; - -/** - * This class overrides and adds nothing compared with - * {@link org.janusgraph.diskstorage.locking.consistentkey.ExpectedValueCheckingTransaction}; however, it creates a transaction type specific - * to HBase, which lets us check for user errors like passing a Cassandra - * transaction into a HBase method. - */ -public class HBaseTransaction extends AbstractStoreTransaction { - - public HBaseTransaction(final BaseTransactionConfig config) { - super(config); - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HConnection2_0.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HConnection2_0.java deleted file mode 100644 index 66b8642dcf..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HConnection2_0.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.HRegionLocation; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Connection; - -import java.io.IOException; -import java.util.List; - -public class HConnection2_0 implements ConnectionMask -{ - - private final Connection cnx; - - public HConnection2_0(Connection cnx) - { - this.cnx = cnx; - } - - @Override - public TableMask getTable(String name) throws IOException - { - return new HTable2_0(cnx.getTable(TableName.valueOf(name))); - } - - @Override - public AdminMask getAdmin() throws IOException - { - return new HBaseAdmin2_0(cnx.getAdmin()); - } - - @Override - public void close() throws IOException - { - cnx.close(); - } - - @Override - public List getRegionLocations(String tableName) - throws IOException - { - return this.cnx.getRegionLocator(TableName.valueOf(tableName)).getAllRegionLocations(); - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HTable2_0.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HTable2_0.java deleted file mode 100644 index 0b4643a4e0..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/HTable2_0.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Table; - -import java.io.IOException; -import java.util.List; - -public class HTable2_0 implements TableMask -{ - private final Table table; - - public HTable2_0(Table table) - { - this.table = table; - } - - @Override - public ResultScanner getScanner(Scan filter) throws IOException - { - return table.getScanner(filter); - } - - @Override - public Result[] get(List gets) throws IOException - { - return table.get(gets); - } - - @Override - public void batch(List writes, Object[] results) throws IOException, InterruptedException - { - table.batch(writes, results); - /* table.flushCommits(); not needed anymore */ - } - - @Override - public void close() throws IOException - { - table.close(); - } -} diff --git a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/TableMask.java b/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/TableMask.java deleted file mode 100644 index 0309c39b0a..0000000000 --- a/graphdb/janus-hbase2/src/main/java/org/janusgraph/diskstorage/hbase2/TableMask.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/** - * Copyright DataStax, Inc. - *

    - * Please see the included license file for details. - */ -package org.janusgraph.diskstorage.hbase2; - -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.Scan; - -import java.io.Closeable; -import java.io.IOException; -import java.util.List; - -/** - * This interface hides ABI/API breaking changes that HBase has made to its Table/HTableInterface over the course - * of development from 0.94 to 1.0 and beyond. - */ -public interface TableMask extends Closeable -{ - - ResultScanner getScanner(Scan filter) throws IOException; - - Result[] get(List gets) throws IOException; - - void batch(List writes, Object[] results) throws IOException, InterruptedException; - -} diff --git a/graphdb/janus/pom.xml b/graphdb/janus/pom.xml index 75c9079eee..55f0fd62ac 100644 --- a/graphdb/janus/pom.xml +++ b/graphdb/janus/pom.xml @@ -51,12 +51,6 @@ provided - - org.apache.atlas - atlas-janusgraph-hbase2 - ${project.version} - - org.apache.atlas atlas-testtools @@ -160,18 +154,6 @@ - - org.janusgraph - janusgraph-hbase - ${janusgraph.version} - - - ch.qos.logback - * - - - - org.janusgraph janusgraph-solr diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphDatabase.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphDatabase.java index db69d77ee3..c2c5179a61 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphDatabase.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphDatabase.java @@ -36,8 +36,6 @@ import org.janusgraph.core.JanusGraphFactory; import org.janusgraph.core.schema.JanusGraphManagement; import org.janusgraph.diskstorage.StandardIndexProvider; -import org.janusgraph.diskstorage.StandardStoreManager; -import org.janusgraph.diskstorage.solr.Solr6Index; import org.janusgraph.graphdb.database.serialize.attribute.SerializableSerializer; import org.janusgraph.graphdb.tinkerpop.JanusGraphIoRegistry; import org.slf4j.Logger; @@ -123,52 +121,6 @@ public static Configuration getConfiguration() throws AtlasException { return janusConfig; } - static { - addHBase2Support(); - - addSolr6Index(); - } - - private static void addHBase2Support() { - try { - Field field = StandardStoreManager.class.getDeclaredField("ALL_MANAGER_CLASSES"); - field.setAccessible(true); - - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); - - Map customMap = new HashMap<>(StandardStoreManager.getAllManagerClasses()); - customMap.put("hbase2", org.janusgraph.diskstorage.hbase2.HBaseStoreManager.class.getName()); - ImmutableMap immap = ImmutableMap.copyOf(customMap); - field.set(null, immap); - - LOG.debug("Injected HBase2 support - {}", org.janusgraph.diskstorage.hbase2.HBaseStoreManager.class.getName()); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private static void addSolr6Index() { - try { - Field field = StandardIndexProvider.class.getDeclaredField("ALL_MANAGER_CLASSES"); - field.setAccessible(true); - - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); - - Map customMap = new HashMap<>(StandardIndexProvider.getAllProviderClasses()); - customMap.put("solr", Solr6Index.class.getName()); - ImmutableMap immap = ImmutableMap.copyOf(customMap); - field.set(null, immap); - - LOG.debug("Injected solr6 index - {}", Solr6Index.class.getName()); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - public static JanusGraph getGraphInstance() { if (graphInstance == null) { synchronized (AtlasJanusGraphDatabase.class) { diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphIndexClient.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphIndexClient.java index bf816dbb0a..58499dc499 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphIndexClient.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraphIndexClient.java @@ -49,7 +49,6 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.rest.RestStatus; -import org.janusgraph.diskstorage.solr.Solr6Index; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -111,8 +110,9 @@ public boolean isHealthy() { @Override public void applySearchWeight(String collectionName, Map indexFieldName2SearchWeightMap) { SolrClient solrClient = null; + //TODO: j17 - try { + /*try { solrClient = Solr6Index.getSolrClient(); // get solr client using same settings as that of Janus Graph if (solrClient == null) { @@ -176,15 +176,16 @@ public void applySearchWeight(String collectionName, Map indexF LOG.debug("Releasing the solr client from usage."); Solr6Index.releaseSolrClient(solrClient); - } + }*/ } @Override public Map> getAggregatedMetrics(AggregationContext aggregationContext) { SolrClient solrClient = null; + //TODO: j17 - try { + /*try { solrClient = Solr6Index.getSolrClient(); // get solr client using same settings as that of Janus Graph if (solrClient == null) { @@ -282,7 +283,7 @@ public Map> getAggregatedMetrics(Aggregation LOG.error("Error encountered in getting the aggregation metrics. Will return empty aggregation.", e); } finally { Solr6Index.releaseSolrClient(solrClient); - } + }*/ return Collections.EMPTY_MAP; } @@ -290,8 +291,9 @@ public Map> getAggregatedMetrics(Aggregation @Override public void applySuggestionFields(String collectionName, List suggestionProperties) { SolrClient solrClient = null; + //TODO: j17 - try { + /*try { solrClient = Solr6Index.getSolrClient(); // get solr client using same settings as that of Janus Graph if (solrClient == null) { @@ -313,7 +315,7 @@ public void applySuggestionFields(String collectionName, List suggestion LOG.error(msg, t); } finally { Solr6Index.releaseSolrClient(solrClient); - } + }*/ LOG.info("Applied suggestion fields request handler for collection {}.", collectionName); } @@ -321,8 +323,9 @@ public void applySuggestionFields(String collectionName, List suggestion @Override public List getSuggestions(String prefixString, String indexFieldName) { SolrClient solrClient = null; + //TODO: j17 - try { + /*try { solrClient = Solr6Index.getSolrClient(); // get solr client using same settings as that of Janus Graph if (solrClient == null) { @@ -373,15 +376,17 @@ public List getSuggestions(String prefixString, String indexFieldName) { LOG.error(msg); } finally { Solr6Index.releaseSolrClient(solrClient); - } + }*/ return Collections.EMPTY_LIST; } private boolean isSolrHealthy() throws SolrServerException, IOException { - SolrClient client = Solr6Index.getSolrClient(); + //TODO: j17 + /*SolrClient client = Solr6Index.getSolrClient(); - return client != null && client.ping(Constants.VERTEX_INDEX).getStatus() == SOLR_HEALTHY_STATUS; + return client != null && client.ping(Constants.VERTEX_INDEX).getStatus() == SOLR_HEALTHY_STATUS;*/ + return true; } private boolean isElasticsearchHealthy() throws ElasticsearchException, IOException { @@ -527,7 +532,8 @@ protected static String generateSuggestionsString(List suggestionIndexFi return ret.toString(); } - private SolrResponse updateFreeTextRequestHandler(SolrClient solrClient, String collectionName, + //TODO: j17 + /*private SolrResponse updateFreeTextRequestHandler(SolrClient solrClient, String collectionName, Map indexFieldName2SearchWeightMap, Solr6Index.Mode mode) throws IOException, SolrServerException, AtlasBaseException { String searchWeightString = generateSearchWeightString(indexFieldName2SearchWeightMap); @@ -543,9 +549,10 @@ private SolrResponse createFreeTextRequestHandler(SolrClient solrClient, String String payLoadString = generatePayLoadForFreeText("create-requesthandler", searchWeightString); return performRequestHandlerAction(collectionName, solrClient, payLoadString, mode); - } + }*/ - private SolrResponse performRequestHandlerAction(String collectionName, + //TODO: j17 + /*private SolrResponse performRequestHandlerAction(String collectionName, SolrClient solrClient, String actionPayLoad, Solr6Index.Mode mode) throws IOException, SolrServerException, AtlasBaseException { @@ -570,7 +577,7 @@ private SolrResponse performRequestHandlerAction(String collectionName, default: throw new IllegalArgumentException("Unsupported Solr operation mode: " + mode); } - } + }*/ private SolrResponse validateResponseForSuccess(SolrResponse solrResponse) throws AtlasBaseException { if(solrResponse == null) { diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java index f7e5718f19..fd05513a67 100644 --- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java +++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/SearchContextCache.java @@ -6,6 +6,9 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; + +import javax.inject.Inject; + @Component public class SearchContextCache { private static final Logger LOG = LoggerFactory.getLogger(SearchContextCache.class); @@ -13,8 +16,8 @@ public class SearchContextCache { public static final String INVALID_SEQUENCE = "invalid_sequence"; - - public SearchContextCache(@Qualifier("redisServiceImpl") RedisService redisService) { + @Inject + public SearchContextCache(RedisService redisService) { SearchContextCache.redisService = redisService; } diff --git a/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java b/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java deleted file mode 100644 index 23c11de301..0000000000 --- a/graphdb/janus/src/main/java/org/janusgraph/diskstorage/solr/Solr6Index.java +++ /dev/null @@ -1,1291 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.janusgraph.diskstorage.solr; - -import static org.janusgraph.diskstorage.solr.SolrIndex.*; -import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.INDEX_MAX_RESULT_SET_SIZE; - -import java.io.IOException; -import java.io.StringReader; -import java.io.UncheckedIOException; -import java.lang.reflect.Constructor; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.util.AbstractMap.SimpleEntry; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.TimeZone; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpEntityEnclosingRequest; -import org.apache.http.HttpException; -import org.apache.http.HttpRequest; -import org.apache.http.HttpRequestInterceptor; -import org.apache.http.client.HttpClient; -import org.apache.http.entity.BufferedHttpEntity; -import org.apache.http.impl.auth.KerberosScheme; -import org.apache.http.protocol.HttpContext; -import org.apache.lucene.analysis.CachingTokenFilter; -import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.HttpClientUtil; -import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder; -import org.apache.solr.client.solrj.impl.LBHttpSolrClient; -import org.apache.solr.client.solrj.impl.PreemptiveAuth; -import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.request.UpdateRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.util.ClientUtils; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.cloud.ClusterState; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.zookeeper.KeeperException; -import org.janusgraph.core.Cardinality; -import org.janusgraph.core.JanusGraphElement; -import org.janusgraph.core.attribute.Cmp; -import org.janusgraph.core.attribute.Geo; -import org.janusgraph.core.attribute.Geoshape; -import org.janusgraph.core.attribute.Text; -import org.janusgraph.core.schema.Mapping; -import org.janusgraph.core.schema.Parameter; -import org.janusgraph.diskstorage.BackendException; -import org.janusgraph.diskstorage.BaseTransaction; -import org.janusgraph.diskstorage.BaseTransactionConfig; -import org.janusgraph.diskstorage.BaseTransactionConfigurable; -import org.janusgraph.diskstorage.PermanentBackendException; -import org.janusgraph.diskstorage.TemporaryBackendException; -import org.janusgraph.diskstorage.configuration.ConfigOption; -import org.janusgraph.diskstorage.configuration.Configuration; -import org.janusgraph.diskstorage.indexing.IndexEntry; -import org.janusgraph.diskstorage.indexing.IndexFeatures; -import org.janusgraph.diskstorage.indexing.IndexMutation; -import org.janusgraph.diskstorage.indexing.IndexProvider; -import org.janusgraph.diskstorage.indexing.IndexQuery; -import org.janusgraph.diskstorage.indexing.KeyInformation; -import org.janusgraph.diskstorage.indexing.RawQuery; -import org.janusgraph.diskstorage.solr.transform.GeoToWktConverter; -import org.janusgraph.diskstorage.util.DefaultTransaction; -import org.janusgraph.graphdb.configuration.PreInitializeConfigOptions; -import org.janusgraph.graphdb.database.serialize.AttributeUtils; -import org.janusgraph.graphdb.internal.Order; -import org.janusgraph.graphdb.query.JanusGraphPredicate; -import org.janusgraph.graphdb.query.condition.And; -import org.janusgraph.graphdb.query.condition.Condition; -import org.janusgraph.graphdb.query.condition.Not; -import org.janusgraph.graphdb.query.condition.Or; -import org.janusgraph.graphdb.query.condition.PredicateCondition; -import org.janusgraph.graphdb.types.ParameterType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; - -/** - * NOTE: Copied from JanusGraph for supporting Kerberos and adding support for multiple zookeeper clients. Do not change - * This is a copy of SolrIndex.java from org.janusgraph.diskstorage.solr - */ -@PreInitializeConfigOptions -public class Solr6Index implements IndexProvider { - - private static final Logger logger = LoggerFactory.getLogger(Solr6Index.class); - - private static final String DEFAULT_ID_FIELD = "id"; - private static final char CHROOT_START_CHAR = '/'; - - private static Solr6Index instance = null; - public static final ConfigOption CREATE_SOLR_CLIENT_PER_REQUEST = new ConfigOption(SOLR_NS, "create-client-per-request", "when false, allows the sharing of solr client across other components.", org.janusgraph.diskstorage.configuration.ConfigOption.Type.LOCAL, false); - - public enum Mode { - HTTP, CLOUD; - - public static Mode parse(String mode) { - for (final Mode m : Mode.values()) { - if (m.toString().equalsIgnoreCase(mode)) return m; - } - throw new IllegalArgumentException("Unrecognized mode: "+mode); - } - - } - - public static final ConfigOption ZOOKEEPER_URLS = new ConfigOption<>(SOLR_NS,"zookeeper-urls", - "URL of the Zookeeper instance coordinating the SolrCloud cluster", - ConfigOption.Type.MASKABLE, new String[]{"localhost:2181"}); - - private static final IndexFeatures SOLR_FEATURES = new IndexFeatures.Builder() - .supportsDocumentTTL() - .setDefaultStringMapping(Mapping.TEXT) - .supportedStringMappings(Mapping.TEXT, Mapping.STRING) - .supportsCardinality(Cardinality.SINGLE) - .supportsCardinality(Cardinality.LIST) - .supportsCardinality(Cardinality.SET) - .supportsCustomAnalyzer() - .supportsGeoContains() - .build(); - - private static final Map SPATIAL_PREDICATES = spatialPredicates(); - private static boolean createSolrClientPerRequest; - - private final SolrClient solrClient; - private final Configuration configuration; - private final Mode mode; - private final boolean dynFields; - private final Map keyFieldIds; - private final String ttlField; - private final int batchSize; - private final boolean waitSearcher; - private final boolean kerberosEnabled; - - public Solr6Index(final Configuration config) throws BackendException { - // Add Kerberos-enabled SolrHttpClientBuilder - HttpClientUtil.setHttpClientBuilder(new Krb5HttpClientBuilder().getBuilder()); - - Preconditions.checkArgument(config!=null); - configuration = config; - mode = Mode.parse(config.get(SOLR_MODE)); - kerberosEnabled = config.get(KERBEROS_ENABLED); - dynFields = config.get(DYNAMIC_FIELDS); - keyFieldIds = parseKeyFieldsForCollections(config); - batchSize = config.get(INDEX_MAX_RESULT_SET_SIZE); - ttlField = config.get(TTL_FIELD); - waitSearcher = config.get(WAIT_SEARCHER); - - if (kerberosEnabled) { - logger.debug("Kerberos is enabled. Configuring SOLR for Kerberos."); - configureSolrClientsForKerberos(); - } else { - logger.debug("Kerberos is NOT enabled."); - logger.debug("KERBEROS_ENABLED name is " + KERBEROS_ENABLED.getName() + " and it is" + (KERBEROS_ENABLED.isOption() ? " " : " not") + " an option."); - logger.debug("KERBEROS_ENABLED type is " + KERBEROS_ENABLED.getType().name()); - } - - solrClient = createSolrClient(); - createSolrClientPerRequest = config.get(CREATE_SOLR_CLIENT_PER_REQUEST); - if(createSolrClientPerRequest) { - logger.info("A new Solr Client will be created for direct interation with SOLR."); - } else { - logger.info("Solr Client will be shared for direct interation with SOLR."); - } - Solr6Index.instance = this; - } - - public static Mode getSolrMode() { - Solr6Index solr6Index = Solr6Index.instance; - Mode ret = (solr6Index != null) ? Mode.parse(solr6Index.configuration.get(SOLR_MODE)) : null; - - if (ret == null) { - logger.warn("SolrMode is not set. Assuming {}", Mode.CLOUD); - - ret = Mode.CLOUD; - } - - return ret; - } - - public static SolrClient getSolrClient() { - if (Solr6Index.instance != null) { - if (createSolrClientPerRequest) { - logger.debug("Creating a new Solr Client."); - return Solr6Index.instance.createSolrClient(); - } else { - logger.debug("Returning the solr client owned by Solr6Index."); - return Solr6Index.instance.solrClient; - } - } else { - logger.debug(" No Solr6Index available. Will return null"); - return null; - } - } - - public static void releaseSolrClient(SolrClient solrClient) { - if(createSolrClientPerRequest) { - if (solrClient != null) { - try { - solrClient.close(); - - if(logger.isDebugEnabled()) { - logger.debug("Closed the solr client successfully."); - } - } catch (IOException excp) { - logger.warn("Failed to close SolrClient.", excp); - } - } - } else { - if(logger.isDebugEnabled()) { - logger.debug("Ignoring the closing of solr client as it is owned by Solr6Index."); - } - } - } - - private SolrClient createSolrClient() { - if(logger.isDebugEnabled()) { - logger.debug("HttpClientBuilder = {}", HttpClientUtil.getHttpClientBuilder(), new Exception()); - } - final ModifiableSolrParams clientParams = new ModifiableSolrParams(); - SolrClient solrClient = null; - - Mode mode = Mode.parse(configuration.get(SOLR_MODE)); - switch (mode) { - case CLOUD: - /* ATLAS-2920: Update JanusGraph Solr clients to use all zookeeper entries – start */ - List zkHosts = new ArrayList<>(); - String chroot = null; - String[] zkUrls = configuration.get(ZOOKEEPER_URLS); - - if (zkUrls != null) { - for (int i = zkUrls.length - 1; i >= 0; i--) { - String zkUrl = zkUrls[i]; - int idxChroot = zkUrl.indexOf(CHROOT_START_CHAR); - - if (idxChroot != -1) { - if (chroot == null) { - chroot = zkUrl.substring(idxChroot); - } - - zkUrl = zkUrl.substring(0, idxChroot); - } - - zkHosts.add(zkUrl); - } - } - /* ATLAS-2920: - end */ - - final CloudSolrClient cloudServer = new CloudSolrClient.Builder().withZkHost(zkHosts).withZkChroot(chroot) - .withLBHttpSolrClientBuilder( - new LBHttpSolrClient.Builder() - .withHttpSolrClientBuilder(new HttpSolrClient.Builder().withInvariantParams(clientParams)) - .withBaseSolrUrls(configuration.get(HTTP_URLS)) - ) - .sendUpdatesOnlyToShardLeaders() - .build(); - cloudServer.connect(); - solrClient = cloudServer; - logger.info("Created solr client using Cloud based configuration."); - break; - case HTTP: - clientParams.add(HttpClientUtil.PROP_ALLOW_COMPRESSION, configuration.get(HTTP_ALLOW_COMPRESSION).toString()); - clientParams.add(HttpClientUtil.PROP_CONNECTION_TIMEOUT, configuration.get(HTTP_CONNECTION_TIMEOUT).toString()); - clientParams.add(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, configuration.get(HTTP_MAX_CONNECTIONS_PER_HOST).toString()); - clientParams.add(HttpClientUtil.PROP_MAX_CONNECTIONS, configuration.get(HTTP_GLOBAL_MAX_CONNECTIONS).toString()); - final HttpClient client = HttpClientUtil.createClient(clientParams); - solrClient = new LBHttpSolrClient.Builder() - .withHttpClient(client) - .withBaseSolrUrls(configuration.get(HTTP_URLS)) - .build(); - logger.info("Created solr client using HTTP based configuration."); - break; - default: - throw new IllegalArgumentException("Unsupported Solr operation mode: " + mode); - } - return solrClient; - } - - private void configureSolrClientsForKerberos() throws PermanentBackendException { - String kerberosConfig = System.getProperty("java.security.auth.login.config"); - if(kerberosConfig == null) { - throw new PermanentBackendException("Unable to configure kerberos for solr client. System property 'java.security.auth.login.config' is not set."); - } - logger.debug("Using kerberos configuration file located at '{}'.", kerberosConfig); - try(Krb5HttpClientBuilder krbBuild = new Krb5HttpClientBuilder()) { - - SolrHttpClientBuilder kb = krbBuild.getBuilder(); - HttpClientUtil.setHttpClientBuilder(kb); - HttpRequestInterceptor bufferedEntityInterceptor = new HttpRequestInterceptor() { - @Override - public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { - if(request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest enclosingRequest = ((HttpEntityEnclosingRequest) request); - HttpEntity requestEntity = enclosingRequest.getEntity(); - enclosingRequest.setEntity(new BufferedHttpEntity(requestEntity)); - } - } - }; - HttpClientUtil.addRequestInterceptor(bufferedEntityInterceptor); - - HttpRequestInterceptor preemptiveAuth = new PreemptiveAuth(new KerberosScheme()); - HttpClientUtil.addRequestInterceptor(preemptiveAuth); - } - } - - private Map parseKeyFieldsForCollections(Configuration config) throws BackendException { - final Map keyFieldNames = new HashMap<>(); - final String[] collectionFieldStatements = config.has(KEY_FIELD_NAMES) ? config.get(KEY_FIELD_NAMES) : new String[0]; - for (final String collectionFieldStatement : collectionFieldStatements) { - final String[] parts = collectionFieldStatement.trim().split("="); - if (parts.length != 2) { - throw new PermanentBackendException( - "Unable to parse the collection name / key field name pair. It should be of the format collection=field"); - } - final String collectionName = parts[0]; - final String keyFieldName = parts[1]; - keyFieldNames.put(collectionName, keyFieldName); - } - return keyFieldNames; - } - - private String getKeyFieldId(String collection) { - String field = keyFieldIds.get(collection); - if (field==null) field = DEFAULT_ID_FIELD; - return field; - } - - /** - * Unlike the ElasticSearch Index, which is schema free, Solr requires a schema to - * support searching. This means that you will need to modify the solr schema with the - * appropriate field definitions in order to work properly. If you have a running instance - * of Solr and you modify its schema with new fields, don't forget to re-index! - * @param store Index store - * @param key New key to register - * @param information data type to register for the key - * @param tx enclosing transaction - * @throws BackendException in case an exception is thrown when - * creating a collection. - */ - @SuppressWarnings("unchecked") - @Override - public void register(String store, String key, KeyInformation information, BaseTransaction tx) - throws BackendException { - if (mode== Mode.CLOUD) { - final CloudSolrClient client = (CloudSolrClient) solrClient; - try { - createCollectionIfNotExists(client, configuration, store); - } catch (final IOException | SolrServerException | InterruptedException | KeeperException e) { - throw new PermanentBackendException(e); - } - } - //Since all data types must be defined in the schema.xml, pre-registering a type does not work - //But we check Analyse feature - String analyzer = ParameterType.STRING_ANALYZER.findParameter(information.getParameters(), null); - if (analyzer != null) { - //If the key have a tokenizer, we try to get it by reflection - // some referred classes might not be able to be found via SystemClassLoader - // since they might be associated with other classloader, in this situation - // ClassNotFound exception will be thrown. instead of using SystemClassLoader - // for all classes, we find its classloader first and then load the class, please - // call - instantiateUsingClassLoader() - try { - ((Constructor) ClassLoader.getSystemClassLoader().loadClass(analyzer) - .getConstructor()).newInstance(); - } catch (final ReflectiveOperationException e) { - throw new PermanentBackendException(e.getMessage(),e); - } - } - analyzer = ParameterType.TEXT_ANALYZER.findParameter(information.getParameters(), null); - if (analyzer != null) { - //If the key have a tokenizer, we try to get it by reflection - try { - ((Constructor) ClassLoader.getSystemClassLoader().loadClass(analyzer) - .getConstructor()).newInstance(); - } catch (final ReflectiveOperationException e) { - throw new PermanentBackendException(e.getMessage(),e); - } - } - } - - private void instantiateUsingClassLoader(String analyzer) throws PermanentBackendException { - if (analyzer == null) return; - try { - Class analyzerClass = Class.forName(analyzer); - ClassLoader cl = analyzerClass.getClassLoader(); - ((Constructor) cl.loadClass(analyzer).getConstructor()).newInstance(); - } catch (final ReflectiveOperationException e) { - throw new PermanentBackendException(e.getMessage(),e); - } - } - - @Override - public void mutate(Map> mutations, KeyInformation.IndexRetriever information, - BaseTransaction tx) throws BackendException { - logger.debug("Mutating SOLR"); - try { - for (final Map.Entry> stores : mutations.entrySet()) { - final String collectionName = stores.getKey(); - final String keyIdField = getKeyFieldId(collectionName); - - final List deleteIds = new ArrayList<>(); - final Collection changes = new ArrayList<>(); - - for (final Map.Entry entry : stores.getValue().entrySet()) { - final String docId = entry.getKey(); - final IndexMutation mutation = entry.getValue(); - Preconditions.checkArgument(!(mutation.isNew() && mutation.isDeleted())); - Preconditions.checkArgument(!mutation.isNew() || !mutation.hasDeletions()); - Preconditions.checkArgument(!mutation.isDeleted() || !mutation.hasAdditions()); - - //Handle any deletions - if (mutation.hasDeletions()) { - if (mutation.isDeleted()) { - logger.trace("Deleting entire document {}", docId); - deleteIds.add(docId); - } else { - final List fieldDeletions = new ArrayList<>(mutation.getDeletions()); - if (mutation.hasAdditions()) { - for (final IndexEntry indexEntry : mutation.getAdditions()) { - fieldDeletions.remove(indexEntry); - } - } - handleRemovalsFromIndex(collectionName, keyIdField, docId, fieldDeletions, information); - } - } - - if (mutation.hasAdditions()) { - final int ttl = mutation.determineTTL(); - - final SolrInputDocument doc = new SolrInputDocument(); - doc.setField(keyIdField, docId); - - final boolean isNewDoc = mutation.isNew(); - - if (isNewDoc) - logger.trace("Adding new document {}", docId); - final Map adds = collectFieldValues(mutation.getAdditions(), collectionName, - information); - // If cardinality is not single then we should use the "add" operation to update - // the index so we don't overwrite existing values. - adds.keySet().forEach(v-> { - final KeyInformation keyInformation = information.get(collectionName, v); - final String solrOp = keyInformation.getCardinality() == Cardinality.SINGLE ? "set" : "add"; - doc.setField(v, isNewDoc ? adds.get(v) : - new HashMap(1) {{put(solrOp, adds.get(v));}} - ); - }); - if (ttl>0) { - Preconditions.checkArgument(isNewDoc, - "Solr only supports TTL on new documents [%s]", docId); - doc.setField(ttlField, String.format("+%dSECONDS", ttl)); - } - changes.add(doc); - } - } - - commitDeletes(collectionName, deleteIds); - commitChanges(collectionName, changes); - } - } catch (final IllegalArgumentException e) { - throw new PermanentBackendException("Unable to complete query on Solr.", e); - } catch (final Exception e) { - throw storageException(e); - } - } - - private void handleRemovalsFromIndex(String collectionName, String keyIdField, String docId, - List fieldDeletions, KeyInformation.IndexRetriever information) - throws SolrServerException, IOException, BackendException { - final Map fieldDeletes = new HashMap<>(1); - fieldDeletes.put("set", null); - final SolrInputDocument doc = new SolrInputDocument(); - doc.addField(keyIdField, docId); - for(final IndexEntry v: fieldDeletions) { - final KeyInformation keyInformation = information.get(collectionName, v.field); - // If the cardinality is a Set or List, we just need to remove the individual value - // received in the mutation and not set the field to null, but we still consolidate the values - // in the event of multiple removals in one mutation. - final Map deletes = collectFieldValues(fieldDeletions, collectionName, information); - deletes.keySet().forEach(vertex -> { - final Map remove; - if (keyInformation.getCardinality() == Cardinality.SINGLE) { - remove = (Map) fieldDeletes; - } else { - remove = new HashMap<>(1); - remove.put("remove", deletes.get(vertex)); - } - doc.setField(vertex, remove); - }); - } - - final UpdateRequest singleDocument = newUpdateRequest(); - singleDocument.add(doc); - solrClient.request(singleDocument, collectionName); - - } - - private Object convertValue(Object value) throws BackendException { - if (value instanceof Geoshape) { - return GeoToWktConverter.convertToWktString((Geoshape) value); - } - if (value instanceof UUID) { - return value.toString(); - } - if(value instanceof Instant) { - if(Math.floorMod(((Instant) value).getNano(), 1000000) != 0) { - throw new IllegalArgumentException("Solr indexes do not support nanoseconds"); - } - return new Date(((Instant) value).toEpochMilli()); - } - return value; - } - - @Override - public void restore(Map>> documents, - KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { - try { - for (final Map.Entry>> stores : documents.entrySet()) { - final String collectionName = stores.getKey(); - - final List deleteIds = new ArrayList<>(); - final List newDocuments = new ArrayList<>(); - - for (final Map.Entry> entry : stores.getValue().entrySet()) { - final String docID = entry.getKey(); - final List content = entry.getValue(); - - if (content == null || content.isEmpty()) { - if (logger.isTraceEnabled()) - logger.trace("Deleting document [{}]", docID); - - deleteIds.add(docID); - continue; - } - final SolrInputDocument doc = new SolrInputDocument(); - doc.setField(getKeyFieldId(collectionName), docID); - final Map adds = collectFieldValues(content, collectionName, information); - adds.forEach(doc::setField); - newDocuments.add(doc); - } - commitDeletes(collectionName, deleteIds); - commitChanges(collectionName, newDocuments); - } - } catch (final Exception e) { - throw new TemporaryBackendException("Could not restore Solr index", e); - } - } - - // This method will create a map of field ids to values. In the case of multiValued fields, - // it will consolidate all the values into one List or Set so it can be updated with a single Solr operation - private Map collectFieldValues(List content, String collectionName, - KeyInformation.IndexRetriever information) throws BackendException { - final Map docs = new HashMap<>(); - for (final IndexEntry addition: content) { - final KeyInformation keyInformation = information.get(collectionName, addition.field); - switch (keyInformation.getCardinality()) { - case SINGLE: - docs.put(addition.field, convertValue(addition.value)); - break; - case SET: - if (!docs.containsKey(addition.field)) { - docs.put(addition.field, new HashSet<>()); - } - ((Set) docs.get(addition.field)).add(convertValue(addition.value)); - break; - case LIST: - if (!docs.containsKey(addition.field)) { - docs.put(addition.field, new ArrayList<>()); - } - ((List) docs.get(addition.field)).add(convertValue(addition.value)); - break; - } - } - return docs; - } - - private void commitChanges(String collectionName, - Collection documents) throws SolrServerException, IOException { - if (documents.size() == 0) return; - - try { - solrClient.request(newUpdateRequest().add(documents), collectionName); - } catch (final HttpSolrClient.RemoteSolrException rse) { - logger.error("Unable to save documents to Solr as one of the shape objects stored were not compatible with Solr.", rse); - logger.error("Details in failed document batch: "); - for (final SolrInputDocument d : documents) { - final Collection fieldNames = d.getFieldNames(); - for (final String name : fieldNames) { - logger.error(name + ":" + d.getFieldValue(name)); - } - } - - throw rse; - } - } - - private void commitDeletes(String collectionName, List deleteIds) throws SolrServerException, IOException { - if (deleteIds.size() == 0) return; - solrClient.request(newUpdateRequest().deleteById(deleteIds), collectionName); - } - - @Override - public Stream query(IndexQuery query, KeyInformation.IndexRetriever information, - BaseTransaction tx) throws BackendException { - final String collection = query.getStore(); - final String keyIdField = getKeyFieldId(collection); - final SolrQuery solrQuery = new SolrQuery("*:*"); - solrQuery.set(CommonParams.FL, keyIdField); - final String queryFilter = buildQueryFilter(query.getCondition(), information.get(collection)); - solrQuery.addFilterQuery(queryFilter); - if (!query.getOrder().isEmpty()) { - addOrderToQuery(solrQuery, query.getOrder()); - } - solrQuery.setStart(0); - if (query.hasLimit()) { - solrQuery.setRows(Math.min(query.getLimit(), batchSize)); - } else { - solrQuery.setRows(batchSize); - } - return executeQuery(query.hasLimit() ? query.getLimit() : null, 0, collection, solrQuery, - doc -> doc.getFieldValue(keyIdField).toString()); - } - - @Override - public Long queryCount(IndexQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { - try { - String collection = query.getStore(); - String keyIdField = this.getKeyFieldId(collection); - SolrQuery solrQuery = new SolrQuery("*:*"); - solrQuery.set("fl", new String[]{keyIdField}); - String queryFilter = this.buildQueryFilter(query.getCondition(), information.get(collection)); - solrQuery.addFilterQuery(new String[]{queryFilter}); - QueryResponse response = this.solrClient.query(collection, solrQuery); - logger.debug("Executed query [{}] in {} ms", query, response.getElapsedTime()); - return response.getResults().getNumFound(); - } catch (IOException ex) { - logger.error("Query did not complete : ", ex); - throw new PermanentBackendException(ex); - } catch (SolrServerException ex) { - logger.error("Unable to query Solr index.", ex); - throw new PermanentBackendException(ex); - } - } - - private void addOrderToQuery(SolrQuery solrQuery, List orders) { - for (final IndexQuery.OrderEntry order1 : orders) { - final String item = order1.getKey(); - final SolrQuery.ORDER order = order1.getOrder() == Order.ASC ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc; - solrQuery.addSort(new SolrQuery.SortClause(item, order)); - } - } - - private Stream executeQuery(Integer limit, int offset, String collection, SolrQuery solrQuery, - Function function) throws PermanentBackendException { - try { - final SolrResultIterator resultIterator = new SolrResultIterator<>(solrClient, limit, offset, - solrQuery.getRows(), collection, solrQuery, function); - return StreamSupport.stream(Spliterators.spliteratorUnknownSize(resultIterator, Spliterator.ORDERED), - false); - } catch (final IOException | UncheckedIOException e) { - logger.error("Query did not complete : ", e); - throw new PermanentBackendException(e); - } catch (final SolrServerException | UncheckedSolrException e) { - logger.error("Unable to query Solr index.", e); - throw new PermanentBackendException(e); - } - } - - - private SolrQuery runCommonQuery(RawQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx, - String collection, String keyIdField) throws BackendException { - final SolrQuery solrQuery = new SolrQuery(query.getQuery()) - .addField(keyIdField) - .setIncludeScore(true) - .setStart(query.getOffset()); - if (query.hasLimit()) { - solrQuery.setRows(Math.min(query.getLimit(), batchSize)); - } else { - solrQuery.setRows(batchSize); - } - if (!query.getOrders().isEmpty()) { - addOrderToQuery(solrQuery, query.getOrders()); - } - - for(final Parameter parameter: query.getParameters()) { - if (parameter.value() instanceof String[]) { - solrQuery.setParam(parameter.key(), (String[]) parameter.value()); - } else if (parameter.value() instanceof String) { - solrQuery.setParam(parameter.key(), (String) parameter.value()); - } - } - return solrQuery; - } - - @Override - public Stream> query(RawQuery query, KeyInformation.IndexRetriever information, - BaseTransaction tx) throws BackendException { - final String collection = query.getStore(); - final String keyIdField = getKeyFieldId(collection); - return executeQuery(query.hasLimit() ? query.getLimit() : null, query.getOffset(), collection, - runCommonQuery(query, information, tx, collection, keyIdField), doc -> { - final double score = Double.parseDouble(doc.getFieldValue("score").toString()); - return new RawQuery.Result<>(doc.getFieldValue(keyIdField).toString(), score); - }); - } - - @Override - public Long totals(RawQuery query, KeyInformation.IndexRetriever information, - BaseTransaction tx) throws BackendException { - try { - final String collection = query.getStore(); - final String keyIdField = getKeyFieldId(collection); - final QueryResponse response = solrClient.query(collection, runCommonQuery(query, information, tx, - collection, keyIdField)); - logger.debug("Executed query [{}] in {} ms", query.getQuery(), response.getElapsedTime()); - return response.getResults().getNumFound(); - } catch (final IOException e) { - logger.error("Query did not complete : ", e); - throw new PermanentBackendException(e); - } catch (final SolrServerException e) { - logger.error("Unable to query Solr index.", e); - throw new PermanentBackendException(e); - } - } - - private static String escapeValue(Object value) { - return ClientUtils.escapeQueryChars(value.toString()); - } - - public String buildQueryFilter(Condition condition, KeyInformation.StoreRetriever information) { - if (condition instanceof PredicateCondition) { - final PredicateCondition atom - = (PredicateCondition) condition; - final Object value = atom.getValue(); - final String key = atom.getKey(); - final JanusGraphPredicate predicate = atom.getPredicate(); - - if (value == null && predicate == Cmp.NOT_EQUAL) { - return key + ":*"; - } else if (value instanceof Number) { - final String queryValue = escapeValue(value); - Preconditions.checkArgument(predicate instanceof Cmp, - "Relation not supported on numeric types: %s", predicate); - final Cmp numRel = (Cmp) predicate; - switch (numRel) { - case EQUAL: - return (key + ":" + queryValue); - case NOT_EQUAL: - return ("-" + key + ":" + queryValue); - case LESS_THAN: - //use right curly to mean up to but not including value - return (key + ":[* TO " + queryValue + "}"); - case LESS_THAN_EQUAL: - return (key + ":[* TO " + queryValue + "]"); - case GREATER_THAN: - //use left curly to mean greater than but not including value - return (key + ":{" + queryValue + " TO *]"); - case GREATER_THAN_EQUAL: - return (key + ":[" + queryValue + " TO *]"); - default: throw new IllegalArgumentException("Unexpected relation: " + numRel); - } - } else if (value instanceof String) { - final Mapping map = getStringMapping(information.get(key)); - assert map==Mapping.TEXT || map==Mapping.STRING; - - if (map==Mapping.TEXT && !(Text.HAS_CONTAINS.contains(predicate) || predicate instanceof Cmp)) - throw new IllegalArgumentException("Text mapped string values only support CONTAINS and Compare queries and not: " + predicate); - if (map==Mapping.STRING && Text.HAS_CONTAINS.contains(predicate)) - throw new IllegalArgumentException("String mapped string values do not support CONTAINS queries: " + predicate); - - //Special case - if (predicate == Text.CONTAINS) { - return tokenize(information, value, key, predicate, - ParameterType.TEXT_ANALYZER.findParameter(information.get(key).getParameters(), null)); - } else if (predicate == Text.PREFIX || predicate == Text.CONTAINS_PREFIX) { - return (key + ":" + escapeValue(value) + "*"); - } else if (predicate == Text.REGEX || predicate == Text.CONTAINS_REGEX) { - return (key + ":/" + value + "/"); - } else if (predicate == Cmp.EQUAL || predicate == Cmp.NOT_EQUAL) { - final String tokenizer = - ParameterType.STRING_ANALYZER.findParameter(information.get(key).getParameters(), null); - if (tokenizer != null) { - return tokenize(information, value, key, predicate, tokenizer); - } else if (predicate == Cmp.EQUAL) { - return (key + ":\"" + escapeValue(value) + "\""); - } else { // Cmp.NOT_EQUAL case - return ("-" + key + ":\"" + escapeValue(value) + "\""); - } - } else if (predicate == Text.FUZZY || predicate == Text.CONTAINS_FUZZY) { - return (key + ":"+escapeValue(value)+"~"+Text.getMaxEditDistance(value.toString())); - } else if (predicate == Cmp.LESS_THAN) { - return (key + ":[* TO \"" + escapeValue(value) + "\"}"); - } else if (predicate == Cmp.LESS_THAN_EQUAL) { - return (key + ":[* TO \"" + escapeValue(value) + "\"]"); - } else if (predicate == Cmp.GREATER_THAN) { - return (key + ":{\"" + escapeValue(value) + "\" TO *]"); - } else if (predicate == Cmp.GREATER_THAN_EQUAL) { - return (key + ":[\"" + escapeValue(value) + "\" TO *]"); - } else { - throw new IllegalArgumentException("Relation is not supported for string value: " + predicate); - } - } else if (value instanceof Geoshape) { - final Mapping map = Mapping.getMapping(information.get(key)); - Preconditions.checkArgument(predicate instanceof Geo && predicate != Geo.DISJOINT, - "Relation not supported on geo types: %s", predicate); - Preconditions.checkArgument(map == Mapping.PREFIX_TREE || predicate == Geo.WITHIN || predicate == Geo.INTERSECT, - "Relation not supported on geopoint types: %s", predicate); - final Geoshape geo = (Geoshape)value; - if (geo.getType() == Geoshape.Type.CIRCLE && (predicate == Geo.INTERSECT || map == Mapping.DEFAULT)) { - final Geoshape.Point center = geo.getPoint(); - return ("{!geofilt sfield=" + key + - " pt=" + center.getLatitude() + "," + center.getLongitude() + - " d=" + geo.getRadius() + "} distErrPct=0"); //distance in kilometers - } else if (geo.getType() == Geoshape.Type.BOX && (predicate == Geo.INTERSECT || map == Mapping.DEFAULT)) { - final Geoshape.Point southwest = geo.getPoint(0); - final Geoshape.Point northeast = geo.getPoint(1); - return (key + ":[" + southwest.getLatitude() + "," + southwest.getLongitude() + - " TO " + northeast.getLatitude() + "," + northeast.getLongitude() + "]"); - } else if (map == Mapping.PREFIX_TREE) { - return key + ":\"" + SPATIAL_PREDICATES.get(predicate) + "(" + geo + ")\" distErrPct=0"; - } else { - throw new IllegalArgumentException("Unsupported or invalid search shape type: " + geo.getType()); - } - } else if (value instanceof Date || value instanceof Instant) { - final String s = value.toString(); - final String queryValue = escapeValue(value instanceof Date ? toIsoDate((Date) value) : value.toString()); - Preconditions.checkArgument(predicate instanceof Cmp, "Relation not supported on date types: %s", predicate); - final Cmp numRel = (Cmp) predicate; - - switch (numRel) { - case EQUAL: - return (key + ":" + queryValue); - case NOT_EQUAL: - return ("-" + key + ":" + queryValue); - case LESS_THAN: - //use right curly to mean up to but not including value - return (key + ":[* TO " + queryValue + "}"); - case LESS_THAN_EQUAL: - return (key + ":[* TO " + queryValue + "]"); - case GREATER_THAN: - //use left curly to mean greater than but not including value - return (key + ":{" + queryValue + " TO *]"); - case GREATER_THAN_EQUAL: - return (key + ":[" + queryValue + " TO *]"); - default: throw new IllegalArgumentException("Unexpected relation: " + numRel); - } - } else if (value instanceof Boolean) { - final Cmp numRel = (Cmp) predicate; - final String queryValue = escapeValue(value); - switch (numRel) { - case EQUAL: - return (key + ":" + queryValue); - case NOT_EQUAL: - return ("-" + key + ":" + queryValue); - default: - throw new IllegalArgumentException("Boolean types only support EQUAL or NOT_EQUAL"); - } - } else if (value instanceof UUID) { - if (predicate == Cmp.EQUAL) { - return (key + ":\"" + escapeValue(value) + "\""); - } else if (predicate == Cmp.NOT_EQUAL) { - return ("-" + key + ":\"" + escapeValue(value) + "\""); - } else { - throw new IllegalArgumentException("Relation is not supported for uuid value: " + predicate); - } - } else throw new IllegalArgumentException("Unsupported type: " + value); - } else if (condition instanceof Not) { - final String sub = buildQueryFilter(((Not)condition).getChild(),information); - if (StringUtils.isNotBlank(sub)) return "-("+sub+")"; - else return ""; - } else if (condition instanceof And) { - final int numChildren = ((And) condition).size(); - final StringBuilder sb = new StringBuilder(); - for (final Condition c : condition.getChildren()) { - final String sub = buildQueryFilter(c, information); - - if (StringUtils.isBlank(sub)) - continue; - - // we don't have to add "+" which means AND iff - // a. it's a NOT query, - // b. expression is a single statement in the AND. - if (!sub.startsWith("-") && numChildren > 1) - sb.append("+"); - - sb.append(sub).append(" "); - } - return sb.toString(); - } else if (condition instanceof Or) { - final StringBuilder sb = new StringBuilder(); - int element=0; - for (final Condition c : condition.getChildren()) { - final String sub = buildQueryFilter(c,information); - if (StringUtils.isBlank(sub)) continue; - if (element==0) sb.append("("); - else sb.append(" OR "); - sb.append(sub); - element++; - } - if (element>0) sb.append(")"); - return sb.toString(); - } else { - throw new IllegalArgumentException("Invalid condition: " + condition); - } - } - - private String tokenize(KeyInformation.StoreRetriever information, Object value, String key, - JanusGraphPredicate janusgraphPredicate, String tokenizer) { - List terms; - if(tokenizer != null){ - terms = customTokenize(tokenizer, (String) value); - } else { - terms = Text.tokenize((String) value); - } - if (terms.isEmpty()) { - return ""; - } else if (terms.size() == 1) { - if (janusgraphPredicate == Cmp.NOT_EQUAL) { - return ("-" + key + ":(" + escapeValue(terms.get(0)) + ")"); - } else { - return (key + ":(" + escapeValue(terms.get(0)) + ")"); - } - } else { - final And andTerms = new And<>(); - for (final String term : terms) { - andTerms.add(new PredicateCondition<>(key, janusgraphPredicate, term)); - } - return buildQueryFilter(andTerms, information); - } - } - - @SuppressWarnings("unchecked") - private List customTokenize(String tokenizerClass, String value){ - CachingTokenFilter stream = null; - try { - final List terms = new ArrayList<>(); - final Tokenizer tokenizer - = ((Constructor) ClassLoader.getSystemClassLoader().loadClass(tokenizerClass) - .getConstructor()).newInstance(); - tokenizer.setReader(new StringReader(value)); - stream = new CachingTokenFilter(tokenizer); - final TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); - stream.reset(); - while (stream.incrementToken()) { - terms.add(termAtt.getBytesRef().utf8ToString()); - } - return terms; - } catch ( ReflectiveOperationException | IOException e) { - throw new IllegalArgumentException(e.getMessage(),e); - } finally { - IOUtils.closeQuietly(stream); - } - } - - private String toIsoDate(Date value) { - final TimeZone tz = TimeZone.getTimeZone("UTC"); - final DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - df.setTimeZone(tz); - return df.format(value); - } - - /** - * Solr handles all transactions on the server-side. That means all - * commit, optimize, or rollback applies since the last commit/optimize/rollback. - * Solr documentation recommends best way to update Solr is in one process to avoid - * race conditions. - * - * @return New Transaction Handle - */ - @Override - public BaseTransactionConfigurable beginTransaction(BaseTransactionConfig config) { - return new DefaultTransaction(config); - } - - @Override - public void close() throws BackendException { - logger.trace("Shutting down connection to Solr {}", solrClient); - try { - solrClient.close(); - } catch (final IOException e) { - throw new TemporaryBackendException(e); - } - } - - @Override - public void clearStorage() throws BackendException { - try { - if (mode!= Mode.CLOUD) { - logger.error("Operation only supported for SolrCloud. Cores must be deleted manually through the Solr API when using HTTP mode."); - return; - } - logger.debug("Clearing storage from Solr: {}", solrClient); - final ZkStateReader zkStateReader = ((CloudSolrClient) solrClient).getZkStateReader(); - zkStateReader.forciblyRefreshAllClusterStateSlow(); - final ClusterState clusterState = zkStateReader.getClusterState(); - for (final String collection : clusterState.getCollectionsMap().keySet()) { - logger.debug("Clearing collection [{}] in Solr",collection); - // Collection is not dropped because it may have been created externally - final UpdateRequest deleteAll = newUpdateRequest(); - deleteAll.deleteByQuery("*:*"); - solrClient.request(deleteAll, collection); - } - - } catch (final SolrServerException e) { - logger.error("Unable to clear storage from index due to server error on Solr.", e); - throw new PermanentBackendException(e); - } catch (final IOException e) { - logger.error("Unable to clear storage from index due to low-level I/O error.", e); - throw new PermanentBackendException(e); - } catch (final Exception e) { - logger.error("Unable to clear storage from index due to general error.", e); - throw new PermanentBackendException(e); - } - } - - @Override - public boolean supports(KeyInformation information, JanusGraphPredicate predicate) { - final Class dataType = information.getDataType(); - final Mapping mapping = Mapping.getMapping(information); - if (mapping!=Mapping.DEFAULT && !AttributeUtils.isString(dataType) && - !(mapping==Mapping.PREFIX_TREE && AttributeUtils.isGeo(dataType))) return false; - - if (Number.class.isAssignableFrom(dataType)) { - return predicate instanceof Cmp; - } else if (dataType == Geoshape.class) { - switch(mapping) { - case DEFAULT: - return predicate == Geo.WITHIN || predicate == Geo.INTERSECT; - case PREFIX_TREE: - return predicate == Geo.INTERSECT || predicate == Geo.WITHIN || predicate == Geo.CONTAINS; - } - } else if (AttributeUtils.isString(dataType)) { - switch(mapping) { - case DEFAULT: - case TEXT: - return predicate == Text.CONTAINS || predicate == Text.CONTAINS_PREFIX - || predicate == Text.CONTAINS_REGEX || predicate == Text.CONTAINS_FUZZY; - case STRING: - return predicate instanceof Cmp || predicate==Text.REGEX || predicate==Text.PREFIX || predicate == Text.FUZZY; -// case TEXTSTRING: -// return (janusgraphPredicate instanceof Text) || janusgraphPredicate == Cmp.EQUAL || janusgraphPredicate==Cmp.NOT_EQUAL; - } - } else if (dataType == Date.class || dataType == Instant.class) { - return predicate instanceof Cmp; - } else if (dataType == Boolean.class) { - return predicate == Cmp.EQUAL || predicate == Cmp.NOT_EQUAL; - } else if (dataType == UUID.class) { - return predicate == Cmp.EQUAL || predicate==Cmp.NOT_EQUAL; - } - return false; - } - - @Override - public boolean supports(KeyInformation information) { - final Class dataType = information.getDataType(); - final Mapping mapping = Mapping.getMapping(information); - if (Number.class.isAssignableFrom(dataType) || dataType == Date.class || dataType == Instant.class - || dataType == Boolean.class || dataType == UUID.class) { - return mapping == Mapping.DEFAULT; - } else if (AttributeUtils.isString(dataType)) { - return mapping == Mapping.DEFAULT || mapping == Mapping.TEXT || mapping == Mapping.STRING; - } else if (AttributeUtils.isGeo(dataType)) { - return mapping == Mapping.DEFAULT || mapping == Mapping.PREFIX_TREE; - } - return false; - } - - @Override - public String mapKey2Field(String key, KeyInformation keyInfo) { - IndexProvider.checkKeyValidity(key); - key = key.replace(' ', REPLACEMENT_CHAR); - - if (!dynFields) return key; - if (ParameterType.MAPPED_NAME.hasParameter(keyInfo.getParameters())) return key; - String postfix; - final Class dataType = keyInfo.getDataType(); - if (AttributeUtils.isString(dataType)) { - final Mapping map = getStringMapping(keyInfo); - switch (map) { - case TEXT: postfix = "_t"; break; - case STRING: postfix = "_s"; break; - default: throw new IllegalArgumentException("Unsupported string mapping: " + map); - } - } else if (AttributeUtils.isWholeNumber(dataType)) { - if (dataType.equals(Long.class)) postfix = "_l"; - else postfix = "_i"; - } else if (AttributeUtils.isDecimal(dataType)) { - if (dataType.equals(Float.class)) postfix = "_f"; - else postfix = "_d"; - } else if (dataType.equals(BigInteger.class)) { - postfix = "_bi"; - } else if (dataType.equals(BigDecimal.class)) { - postfix = "_bd"; - } else if (dataType.equals(Geoshape.class)) { - postfix = "_g"; - } else if (dataType.equals(Date.class) || dataType.equals(Instant.class)) { - postfix = "_dt"; - } else if (dataType.equals(Boolean.class)) { - postfix = "_b"; - } else if (dataType.equals(UUID.class)) { - postfix = "_uuid"; - } else throw new IllegalArgumentException("Unsupported data type ["+dataType+"] for field: " + key); - - if (keyInfo.getCardinality() == Cardinality.SET || keyInfo.getCardinality() == Cardinality.LIST) { - postfix += "s"; - } - return key+postfix; - } - - @Override - public IndexFeatures getFeatures() { - return SOLR_FEATURES; - } - - @Override - public boolean exists() throws BackendException { - if (mode!= Mode.CLOUD) throw new UnsupportedOperationException("Operation only supported for SolrCloud"); - final CloudSolrClient server = (CloudSolrClient) solrClient; - try { - final ZkStateReader zkStateReader = server.getZkStateReader(); - zkStateReader.forciblyRefreshAllClusterStateSlow(); - final ClusterState clusterState = zkStateReader.getClusterState(); - final Map collections = clusterState.getCollectionsMap(); - return collections != null && !collections.isEmpty(); - } catch (KeeperException | InterruptedException e) { - throw new PermanentBackendException("Unable to check if index exists", e); - } - } - - /* - ################# UTILITY METHODS ####################### - */ - - private static Mapping getStringMapping(KeyInformation information) { - assert AttributeUtils.isString(information.getDataType()); - Mapping map = Mapping.getMapping(information); - if (map==Mapping.DEFAULT) map = Mapping.TEXT; - return map; - } - - private static Map spatialPredicates() { - return Collections.unmodifiableMap(Stream.of( - new SimpleEntry<>(Geo.WITHIN, "IsWithin"), - new SimpleEntry<>(Geo.CONTAINS, "Contains"), - new SimpleEntry<>(Geo.INTERSECT, "Intersects"), - new SimpleEntry<>(Geo.DISJOINT, "IsDisjointTo")) - .collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue))); - } - - private UpdateRequest newUpdateRequest() { - final UpdateRequest req = new UpdateRequest(); - if(waitSearcher) { - req.setAction(UpdateRequest.ACTION.COMMIT, true, true); - } - return req; - } - - private BackendException storageException(Exception solrException) { - return new TemporaryBackendException("Unable to complete query on Solr.", solrException); - } - - private static void createCollectionIfNotExists(CloudSolrClient client, Configuration config, String collection) - throws IOException, SolrServerException, KeeperException, InterruptedException { - if (!checkIfCollectionExists(client, collection)) { - final Integer numShards = config.get(NUM_SHARDS); - final Integer maxShardsPerNode = config.get(MAX_SHARDS_PER_NODE); - final Integer replicationFactor = config.get(REPLICATION_FACTOR); - - - // Ideally this property used so a new configset is not uploaded for every single - // index (collection) created in solr. - // if a generic configSet is not set, make the configset name the same as the collection. - // This was the default behavior before a default configSet could be specified - final String genericConfigSet = config.has(SOLR_DEFAULT_CONFIG) ? config.get(SOLR_DEFAULT_CONFIG):collection; - - final CollectionAdminRequest.Create createRequest = CollectionAdminRequest.createCollection(collection, genericConfigSet, numShards, replicationFactor); - createRequest.setMaxShardsPerNode(maxShardsPerNode); - - final CollectionAdminResponse createResponse = createRequest.process(client); - if (createResponse.isSuccess()) { - logger.trace("Collection {} successfully created.", collection); - } else { - throw new SolrServerException(Joiner.on("\n").join(createResponse.getErrorMessages())); - } - } - - waitForRecoveriesToFinish(client, collection); - } - - /** - * Checks if the collection has already been created in Solr. - */ - private static boolean checkIfCollectionExists(CloudSolrClient server, String collection) throws KeeperException, InterruptedException { - final ZkStateReader zkStateReader = server.getZkStateReader(); - zkStateReader.forceUpdateCollection(collection); - final ClusterState clusterState = zkStateReader.getClusterState(); - return clusterState.getCollectionOrNull(collection) != null; - } - - /** - * Wait for all the collection shards to be ready. - */ - private static void waitForRecoveriesToFinish(CloudSolrClient server, String collection) throws KeeperException, InterruptedException { - final ZkStateReader zkStateReader = server.getZkStateReader(); - try { - boolean cont = true; - - while (cont) { - boolean sawLiveRecovering = false; - zkStateReader.forceUpdateCollection(collection); - final ClusterState clusterState = zkStateReader.getClusterState(); - final Map slices = clusterState.getCollection(collection).getSlicesMap(); - Preconditions.checkNotNull(slices, "Could not find collection:" + collection); - - // change paths for Replica.State per Solr refactoring - // remove SYNC state per: http://tinyurl.com/pag6rwt - for (final Map.Entry entry : slices.entrySet()) { - final Map shards = entry.getValue().getReplicasMap(); - for (final Map.Entry shard : shards.entrySet()) { - final String state = shard.getValue().getStr(ZkStateReader.STATE_PROP).toUpperCase(); - if ((Replica.State.RECOVERING.name().equals(state) || Replica.State.DOWN.name().equals(state)) - && clusterState.liveNodesContain(shard.getValue().getStr( - ZkStateReader.NODE_NAME_PROP))) { - sawLiveRecovering = true; - } - } - } - - - if (!sawLiveRecovering) { - cont = false; - } else { - Thread.sleep(1000); - } - } - } finally { - logger.info("Exiting solr wait"); - } - } -} \ No newline at end of file diff --git a/graphdb/pom.xml b/graphdb/pom.xml index 98806d26e8..c4c85beed2 100644 --- a/graphdb/pom.xml +++ b/graphdb/pom.xml @@ -36,7 +36,6 @@ api common graphdb-impls - janus-hbase2 janus diff --git a/intg/pom.xml b/intg/pom.xml index c15929de35..8ec0638d7a 100644 --- a/intg/pom.xml +++ b/intg/pom.xml @@ -119,6 +119,11 @@ launchdarkly-java-server-sdk ${launch-darkly.version} + + jakarta.annotation + jakarta.annotation-api + 2.1.1 + @@ -138,9 +143,10 @@ org.apache.maven.plugins maven-compiler-plugin + ${maven-compiler-plugin} - 1.8 - 1.8 + 17 + 17 diff --git a/intg/src/main/java/org/apache/atlas/ApplicationProperties.java b/intg/src/main/java/org/apache/atlas/ApplicationProperties.java index 78b487a8dd..e2f04ff0a6 100644 --- a/intg/src/main/java/org/apache/atlas/ApplicationProperties.java +++ b/intg/src/main/java/org/apache/atlas/ApplicationProperties.java @@ -55,8 +55,6 @@ public final class ApplicationProperties extends PropertiesConfiguration { public static final String ENABLE_FREETEXT_SEARCH_CONF = "atlas.search.freetext.enable"; public static final String ATLAS_RUN_MODE = "atlas.run.mode"; public static final String GRAPHBD_BACKEND_JANUS = "janus"; - public static final String STORAGE_BACKEND_HBASE = "hbase"; - public static final String STORAGE_BACKEND_HBASE2 = "hbase2"; public static final String INDEX_BACKEND_SOLR = "solr"; public static final String LDAP_TYPE = "atlas.authentication.method.ldap.type"; public static final String LDAP = "LDAP"; @@ -321,10 +319,6 @@ private void setDefaults() { // setting value for 'atlas.graph.storage.backend' (default = 'hbase2') String storageBackend = getString(STORAGE_BACKEND_CONF); - if (StringUtils.isEmpty(storageBackend) || storageBackend.equalsIgnoreCase(STORAGE_BACKEND_HBASE)) { - storageBackend = STORAGE_BACKEND_HBASE2; - } - clearPropertyDirect(STORAGE_BACKEND_CONF); addPropertyDirect(STORAGE_BACKEND_CONF, storageBackend); LOG.info("Using storage backend '" + storageBackend + "'"); diff --git a/intg/src/main/java/org/apache/atlas/featureflag/AtlasFeatureFlagClient.java b/intg/src/main/java/org/apache/atlas/featureflag/AtlasFeatureFlagClient.java index fc0e86f90c..5326adb9a2 100644 --- a/intg/src/main/java/org/apache/atlas/featureflag/AtlasFeatureFlagClient.java +++ b/intg/src/main/java/org/apache/atlas/featureflag/AtlasFeatureFlagClient.java @@ -18,12 +18,12 @@ package org.apache.atlas.featureflag; import com.launchdarkly.sdk.server.*; +import jakarta.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; -import javax.annotation.PreDestroy; import java.io.IOException; import java.util.Objects; diff --git a/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java b/intg/src/main/java/org/apache/atlas/model/EntityAuditEvent.java similarity index 98% rename from client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java rename to intg/src/main/java/org/apache/atlas/model/EntityAuditEvent.java index 1b452a92ae..0024b1f1f0 100644 --- a/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java +++ b/intg/src/main/java/org/apache/atlas/model/EntityAuditEvent.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.atlas; +package org.apache.atlas.model; import com.fasterxml.jackson.annotation.JsonAutoDetect; @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import org.apache.atlas.AtlasException; import org.apache.atlas.v1.model.instance.Referenceable; import org.apache.atlas.type.AtlasType; @@ -212,4 +213,4 @@ public void setEntityDefinition(String entityDefinition) { public static EntityAuditEvent fromString(String eventString) { return AtlasType.fromV1Json(eventString, EntityAuditEvent.class); } -} +} \ No newline at end of file diff --git a/intg/src/main/java/org/apache/atlas/security/InMemoryJAASConfiguration.java b/intg/src/main/java/org/apache/atlas/security/InMemoryJAASConfiguration.java deleted file mode 100644 index 936311b084..0000000000 --- a/intg/src/main/java/org/apache/atlas/security/InMemoryJAASConfiguration.java +++ /dev/null @@ -1,401 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.security; - -import org.apache.atlas.AtlasException; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.configuration.ConfigurationConverter; -import org.apache.commons.lang.ArrayUtils; -import org.apache.hadoop.security.SecurityUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.SortedSet; -import java.util.StringTokenizer; -import java.util.TreeSet; - - -/** - * InMemoryJAASConfiguration - *

    - * An utility class - which has a static method init to load all JAAS configuration from Application - * properties file (eg: atlas.properties) and set it as part of the default lookup configuration for - * all JAAS configuration lookup. - *

    - * Example settings in jaas-application.properties: - * - *

    - * atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule
    - * atlas.jaas.KafkaClient.loginModuleControlFlag = required
    - * atlas.jaas.KafkaClient.option.useKeyTab = true
    - * atlas.jaas.KafkaClient.option.storeKey = true
    - * atlas.jaas.KafkaClient.option.serviceName = kafka
    - * atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/kafka_client.keytab
    - * atlas.jaas.KafkaClient.option.principal = kafka-client-1@EXAMPLE.COM
    -
    - * atlas.jaas.MyClient.0.loginModuleName = com.sun.security.auth.module.Krb5LoginModule
    - * atlas.jaas.MyClient.0.loginModuleControlFlag = required
    - * atlas.jaas.MyClient.0.option.useKeyTab = true
    - * atlas.jaas.MyClient.0.option.storeKey = true
    - * atlas.jaas.MyClient.0.option.serviceName = kafka
    - * atlas.jaas.MyClient.0.option.keyTab = /etc/security/keytabs/kafka_client.keytab
    - * atlas.jaas.MyClient.0.option.principal = kafka-client-1@EXAMPLE.COM
    - * atlas.jaas.MyClient.1.loginModuleName = com.sun.security.auth.module.Krb5LoginModule
    - * atlas.jaas.MyClient.1.loginModuleControlFlag = optional
    - * atlas.jaas.MyClient.1.option.useKeyTab = true
    - * atlas.jaas.MyClient.1.option.storeKey = true
    - * atlas.jaas.MyClient.1.option.serviceName = kafka
    - * atlas.jaas.MyClient.1.option.keyTab = /etc/security/keytabs/kafka_client.keytab
    - * atlas.jaas.MyClient.1.option.principal = kafka-client-1@EXAMPLE.COM 
    - * - *

    - * This will set the JAAS configuration - equivalent to the jaas.conf file entries: - * - *

    - *  KafkaClient {
    - *      com.sun.security.auth.module.Krb5LoginModule required
    - *          useKeyTab=true
    - *          storeKey=true
    - *          serviceName=kafka
    - *          keyTab="/etc/security/keytabs/kafka_client.keytab"
    - *          principal="kafka-client-1@EXAMPLE.COM";
    - *  };
    - *  MyClient {
    - *      com.sun.security.auth.module.Krb5LoginModule required
    - *          useKeyTab=true
    - *          storeKey=true
    - *          serviceName=kafka keyTab="/etc/security/keytabs/kafka_client.keytab"
    - *          principal="kafka-client-1@EXAMPLE.COM";
    - *  };
    - *  MyClient {
    - *      com.sun.security.auth.module.Krb5LoginModule optional
    - *          useKeyTab=true
    - *          storeKey=true
    - *          serviceName=kafka
    - *          keyTab="/etc/security/keytabs/kafka_client.keytab"
    - *          principal="kafka-client-1@EXAMPLE.COM";
    - *  }; 
    - *

    - * Here is the syntax for atlas.properties to add JAAS configuration: - *

    - * The property name has to begin with 'atlas.jaas.' + clientId (in case of Kafka client, - * it expects the clientId to be KafkaClient). - *

    - * The following property must be there to specify the JAAS loginModule name - *

             'atlas.jaas.' +  clientId  + '.loginModuleName' 
    - *

    - * The following optional property should be set to specify the loginModuleControlFlag - *

             'atlas.jaas.' + clientId + '.loginModuleControlFlag'
    - *          Default value :  required ,  Possible values:  required, optional, sufficient, requisite 
    - *

    - * Then you can add additional optional parameters as options for the configuration using the following - * syntax: - *

             'atlas.jaas.' + clientId + '.option.' +   =  
    - *

    - * The current setup will lookup JAAS configration from the atlas-application.properties first, - * if not available, it will delegate to the original configuration - * - */ - - -public final class InMemoryJAASConfiguration extends Configuration { - - private static final Logger LOG = LoggerFactory.getLogger(InMemoryJAASConfiguration.class); - - private static final String JAAS_CONFIG_PREFIX_PARAM = "atlas.jaas."; - private static final String JAAS_CONFIG_LOGIN_MODULE_NAME_PARAM = "loginModuleName"; - private static final String JAAS_CONFIG_LOGIN_MODULE_CONTROL_FLAG_PARAM = "loginModuleControlFlag"; - private static final String JAAS_CONFIG_LOGIN_OPTIONS_PREFIX = "option"; - private static final String JAAS_PRINCIPAL_PROP = "principal"; - private static final Map CONFIG_SECTION_REDIRECTS = new HashMap<>(); - - private Configuration parent = null; - private Map> applicationConfigEntryMap = new HashMap<>(); - - public static void init(String propFile) throws AtlasException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> InMemoryJAASConfiguration.init({})", propFile); - } - - InputStream in = null; - - try { - Properties properties = new Properties(); - in = ClassLoader.getSystemResourceAsStream(propFile); - if (in == null) { - if (!propFile.startsWith("/")) { - in = ClassLoader.getSystemResourceAsStream("/" + propFile); - } - if (in == null) { - in = new FileInputStream(new File(propFile)); - } - } - properties.load(in); - init(properties); - } catch (IOException e) { - throw new AtlasException("Failed to load JAAS application properties", e); - } finally { - if (in != null) { - try { - in.close(); - } catch (Exception exception) { - // Ignore - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== InMemoryJAASConfiguration.init({})", propFile); - } - } - - public static void init(org.apache.commons.configuration.Configuration atlasConfiguration) throws AtlasException { - LOG.debug("==> InMemoryJAASConfiguration.init()"); - - if (atlasConfiguration != null && !atlasConfiguration.isEmpty()) { - Properties properties = ConfigurationConverter.getProperties(atlasConfiguration); - init(properties); - } else { - throw new AtlasException("Failed to load JAAS application properties: configuration NULL or empty!"); - } - - LOG.debug("<== InMemoryJAASConfiguration.init()"); - } - - public static void init(Properties properties) throws AtlasException { - LOG.debug("==> InMemoryJAASConfiguration.init()"); - - if (properties != null && MapUtils.isNotEmpty(properties)) { - InMemoryJAASConfiguration conf = new InMemoryJAASConfiguration(properties); - Configuration.setConfiguration(conf); - } else { - throw new AtlasException("Failed to load JAAS application properties: properties NULL or empty!"); - } - - LOG.debug("<== InMemoryJAASConfiguration.init()"); - } - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> InMemoryJAASConfiguration.getAppConfigurationEntry({})", name); - } - - AppConfigurationEntry[] ret = null; - List retList = null; - String redirectedName = getConfigSectionRedirect(name); - - if (redirectedName != null) { - retList = applicationConfigEntryMap.get(redirectedName); - - if (LOG.isDebugEnabled()) { - LOG.debug("Redirected jaasConfigSection ({} -> {}): ", name, redirectedName, retList); - } - } - - if (retList == null || retList.size() == 0) { - retList = applicationConfigEntryMap.get(name); - } - - if (retList == null || retList.size() == 0) { - if (parent != null) { - ret = parent.getAppConfigurationEntry(name); - } - } else { - int sz = retList.size(); - ret = new AppConfigurationEntry[sz]; - ret = retList.toArray(ret); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== InMemoryJAASConfiguration.getAppConfigurationEntry({}): {}", name, ArrayUtils.toString(ret)); - } - - return ret; - } - - private InMemoryJAASConfiguration(Properties prop) { - parent = Configuration.getConfiguration(); - initialize(prop); - } - - private void initialize(Properties properties) { - LOG.debug("==> InMemoryJAASConfiguration.initialize()"); - - int prefixLen = JAAS_CONFIG_PREFIX_PARAM.length(); - - Map> jaasClients = new HashMap<>(); - for (String key : properties.stringPropertyNames()) { - if (key.startsWith(JAAS_CONFIG_PREFIX_PARAM)) { - String jaasKey = key.substring(prefixLen); - StringTokenizer tokenizer = new StringTokenizer(jaasKey, "."); - int tokenCount = tokenizer.countTokens(); - if (tokenCount > 0) { - String clientId = tokenizer.nextToken(); - SortedSet indexList = jaasClients.get(clientId); - if (indexList == null) { - indexList = new TreeSet<>(); - jaasClients.put(clientId, indexList); - } - String indexStr = tokenizer.nextToken(); - - int indexId = isNumeric(indexStr) ? Integer.parseInt(indexStr) : -1; - - Integer clientIdIndex = Integer.valueOf(indexId); - - if (!indexList.contains(clientIdIndex)) { - indexList.add(clientIdIndex); - } - - } - } - } - for (String jaasClient : jaasClients.keySet()) { - - for (Integer index : jaasClients.get(jaasClient)) { - - String keyPrefix = JAAS_CONFIG_PREFIX_PARAM + jaasClient + "."; - - if (index > -1) { - keyPrefix = keyPrefix + String.valueOf(index) + "."; - } - - String keyParam = keyPrefix + JAAS_CONFIG_LOGIN_MODULE_NAME_PARAM; - String loginModuleName = properties.getProperty(keyParam); - - if (loginModuleName == null) { - LOG.error("Unable to add JAAS configuration for client [{}] as it is missing param [{}]. Skipping JAAS config for [{}]", jaasClient, keyParam, jaasClient); - continue; - } else { - loginModuleName = loginModuleName.trim(); - } - - keyParam = keyPrefix + JAAS_CONFIG_LOGIN_MODULE_CONTROL_FLAG_PARAM; - String controlFlag = properties.getProperty(keyParam); - - AppConfigurationEntry.LoginModuleControlFlag loginControlFlag = null; - if (controlFlag != null) { - controlFlag = controlFlag.trim().toLowerCase(); - switch (controlFlag) { - case "optional": - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL; - break; - case "requisite": - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUISITE; - break; - case "sufficient": - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT; - break; - case "required": - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - break; - default: - String validValues = "optional|requisite|sufficient|required"; - LOG.warn("Unknown JAAS configuration value for ({}) = [{}], valid value are [{}] using the default value, REQUIRED", keyParam, controlFlag, validValues); - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - break; - } - } else { - LOG.warn("Unable to find JAAS configuration ({}); using the default value, REQUIRED", keyParam); - loginControlFlag = AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; - } - - - Map options = new HashMap<>(); - String optionPrefix = keyPrefix + JAAS_CONFIG_LOGIN_OPTIONS_PREFIX + "."; - int optionPrefixLen = optionPrefix.length(); - for (String key : properties.stringPropertyNames()) { - if (key.startsWith(optionPrefix)) { - String optionKey = key.substring(optionPrefixLen); - String optionVal = properties.getProperty(key); - if (optionVal != null) { - optionVal = optionVal.trim(); - - try { - if (optionKey.equalsIgnoreCase(JAAS_PRINCIPAL_PROP)) { - optionVal = SecurityUtil.getServerPrincipal(optionVal, (String) null); - } - } catch (IOException e) { - LOG.warn("Failed to build serverPrincipal. Using provided value:[{}]", optionVal); - } - } - options.put(optionKey, optionVal); - } - } - - AppConfigurationEntry entry = new AppConfigurationEntry(loginModuleName, loginControlFlag, options); - - if (LOG.isDebugEnabled()) { - StringBuilder sb = new StringBuilder(); - sb.append("Adding client: [").append(jaasClient).append("{").append(index).append("}]\n"); - sb.append("\tloginModule: [").append(loginModuleName).append("]\n"); - sb.append("\tcontrolFlag: [").append(loginControlFlag).append("]\n"); - for (String key : options.keySet()) { - String val = options.get(key); - sb.append("\tOptions: [").append(key).append("] => [").append(val).append("]\n"); - } - LOG.debug(sb.toString()); - } - - List retList = applicationConfigEntryMap.get(jaasClient); - if (retList == null) { - retList = new ArrayList<>(); - applicationConfigEntryMap.put(jaasClient, retList); - } - - retList.add(entry); - } - } - - LOG.debug("<== InMemoryJAASConfiguration.initialize({})", applicationConfigEntryMap); - } - - private static boolean isNumeric(String str) { - return str.matches("-?\\d+(\\.\\d+)?"); //match a number with optional '-' and decimal. - } - - public static void setConfigSectionRedirect(String name, String redirectTo) { - if (LOG.isDebugEnabled()) { - LOG.debug("setConfigSectionRedirect({}, {})", name, redirectTo); - } - - if (name != null) { - if (redirectTo != null) { - CONFIG_SECTION_REDIRECTS.put(name, redirectTo); - } else { - CONFIG_SECTION_REDIRECTS.remove(name); - } - } - } - - private static String getConfigSectionRedirect(String name) { - return name != null ? CONFIG_SECTION_REDIRECTS.get(name) : null; - } -} diff --git a/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTest.java b/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTest.java deleted file mode 100644 index b0f24de528..0000000000 --- a/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.security; - -import org.apache.hadoop.util.StringUtils; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; - - -//Unstable test. Disabling -@Test(enabled=false) -public class InMemoryJAASConfigurationTest { - - private static final String ATLAS_JAAS_PROP_FILE = "atlas-jaas.properties"; - - @BeforeClass - protected void setUp() throws Exception { - InMemoryJAASConfiguration.init(ATLAS_JAAS_PROP_FILE); - } - - @Test(enabled=false) - public void testGetAppConfigurationEntryStringForKafkaClient() { - AppConfigurationEntry[] entries = - Configuration.getConfiguration().getAppConfigurationEntry("KafkaClient"); - Assert.assertNotNull(entries); - Assert.assertEquals(1, entries.length); - String principal = (String) entries[0].getOptions().get("principal"); - Assert.assertNotNull(principal); - String[] components = principal.split("[/@]"); - Assert.assertEquals(3, components.length); - Assert.assertEquals(false, StringUtils.equalsIgnoreCase(components[1], "_HOST")); - - } - - @Test(enabled=false) - public void testGetAppConfigurationEntryStringForMyClient() { - AppConfigurationEntry[] entries = - Configuration.getConfiguration().getAppConfigurationEntry("myClient"); - Assert.assertNotNull(entries); - Assert.assertEquals(2, entries.length); - String principal = (String) entries[0].getOptions().get("principal"); - Assert.assertNotNull(principal); - String[] components = principal.split("[/@]"); - Assert.assertEquals(3, components.length); - Assert.assertEquals(true, StringUtils.equalsIgnoreCase(components[1], "abcd")); - - principal = (String) entries[1].getOptions().get("principal"); - Assert.assertNotNull(principal); - components = principal.split("[/@]"); - Assert.assertEquals(2, components.length); - } - - @Test(enabled=false) - public void testGetAppConfigurationEntryStringForUnknownClient() { - AppConfigurationEntry[] entries = - Configuration.getConfiguration().getAppConfigurationEntry("UnknownClient"); - Assert.assertNull(entries); - } - -} - diff --git a/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTicketBasedKafkaClientTest.java b/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTicketBasedKafkaClientTest.java deleted file mode 100644 index 75c6e87b7f..0000000000 --- a/intg/src/test/java/org/apache/atlas/security/InMemoryJAASConfigurationTicketBasedKafkaClientTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.security; - -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; - - -@Test -public class InMemoryJAASConfigurationTicketBasedKafkaClientTest { - - private static final String ATLAS_JAAS_PROP_FILE = "atlas-jaas.properties"; - - @BeforeClass - public void setUp() throws Exception { - InMemoryJAASConfiguration.init(ATLAS_JAAS_PROP_FILE); - InMemoryJAASConfiguration.setConfigSectionRedirect("KafkaClient", "ticketBased-KafkaClient"); - } - - @Test - public void testGetAppConfigurationEntryStringForticketBasedKafkaClient() { - - AppConfigurationEntry[] entries = - Configuration.getConfiguration().getAppConfigurationEntry("KafkaClient"); - Assert.assertNotNull(entries); - Assert.assertEquals((String) entries[0].getOptions().get("useTicketCache"), "true"); - } - - -} - diff --git a/notification/pom.xml b/notification/pom.xml index 4d7c810708..c23ddc23b5 100644 --- a/notification/pom.xml +++ b/notification/pom.xml @@ -30,11 +30,6 @@ jar - - org.apache.atlas - atlas-client-v1 - - org.apache.atlas atlas-common diff --git a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java index 24ea6ea836..c22734bc92 100644 --- a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java +++ b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java @@ -122,7 +122,7 @@ public abstract class AtlasHook { new LinkedBlockingDeque<>(queueSize), new ThreadFactoryBuilder().setNameFormat("Atlas Notifier %d").setDaemon(true).build()); - ShutdownHookManager.get().addShutdownHook(new Thread() { + Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { @@ -138,7 +138,7 @@ public void run() { LOG.info("<== Shutdown of Atlas Hook"); } } - }, AtlasConstants.ATLAS_SHUTDOWN_HOOK_PRIORITY); + }); } LOG.info("Created Atlas Hook"); diff --git a/pom.xml b/pom.xml index 45bbee7382..5277fd795e 100644 --- a/pom.xml +++ b/pom.xml @@ -674,6 +674,9 @@ + 3.13.0 + 3.4.0 + 3.4.0 2.3.7 4.5 4.7 @@ -699,7 +702,8 @@ 3.2.2 7.16.2 org.apache.atlas.repository.audit.InMemoryEntityAuditRepository - 2.13.2 + + 2.17.0 2.18.1 0.8 6.5.16 @@ -710,12 +714,11 @@ 4.1.0 ${hadoop.version} 3.3.6 - 2.3.3 3.1.0 0.8.1 4.5.13 4.4.13 - 2.12.4 + 2.13.4.2 2.12.4 0.6.03 0.5.3 @@ -754,14 +757,16 @@ UTF-8 UTF-8 ${project.basedir} - false + + true true false - false + true true false false false + 1.7.30 8.6.3 8.6.3 @@ -798,7 +803,6 @@ dashboardv2 dashboardv3 - auth-agents-cred auth-agents-common auth-audits auth-plugin-atlas @@ -806,26 +810,9 @@ webapp docs - addons/hdfs-model plugin-classloader - addons/hive-bridge-shim - addons/hive-bridge - addons/falcon-bridge-shim - addons/falcon-bridge - addons/sqoop-bridge-shim - addons/sqoop-bridge - addons/hbase-bridge-shim - addons/hbase-bridge - addons/hbase-testing-util - addons/kafka-bridge - tools/classification-updater - tools/atlas-index-repair - addons/impala-hook-api - addons/impala-bridge-shim - addons/impala-bridge distro - atlas-examples @@ -846,14 +833,14 @@ true - + apache.snapshots.repo https://repository.apache.org/content/groups/snapshots @@ -896,6 +883,18 @@ + + io.grpc + grpc-core + 1.38.1 + + + + jakarta.annotation + jakarta.annotation-api + 2.1.1 + + org.antlr antlr4-runtime @@ -1325,76 +1324,6 @@ 1.3.2 - - org.apache.hbase - hbase-client - ${hbase.version} - - - avro - org.apache.avro - - - jruby-complete - org.jruby - - - asm - asm - - - * - org.apache.hadoop - - - * - org.codehaus.jackson - - - * - org.mortbay.jetty - - - junit - junit - - - - - - org.apache.hbase - hbase-server - ${hbase.version} - tests - - - junit - junit - - - - - - org.apache.hbase - hbase-server - ${hbase.version} - provided - - - org.mortbay.jetty - * - - - tomcat - * - - - junit - junit - - - - javax.servlet javax.servlet-api @@ -1515,90 +1444,6 @@ pom - - org.apache.atlas - hive-bridge - ${project.version} - - - - org.apache.atlas - storm-bridge - ${project.version} - - - - org.apache.atlas - falcon-bridge - ${project.version} - - - - org.apache.atlas - sqoop-bridge - ${project.version} - - - - org.apache.atlas - hive-bridge-shim - ${project.version} - - - - org.apache.atlas - storm-bridge-shim - ${project.version} - - - - org.apache.atlas - falcon-bridge-shim - ${project.version} - - - - org.apache.atlas - sqoop-bridge-shim - ${project.version} - - - - org.apache.atlas - hbase-bridge - ${project.version} - - - - org.apache.atlas - hbase-bridge-shim - ${project.version} - - - - org.apache.atlas - kafka-bridge - ${project.version} - - - - org.apache.atlas - impala-hook-api - ${project.version} - - - - org.apache.atlas - impala-bridge - ${project.version} - - - - org.apache.atlas - impala-bridge-shim - ${project.version} - - com.webcohesion.enunciate @@ -1784,10 +1629,10 @@ org.apache.maven.plugins maven-compiler-plugin - 3.7.0 + ${maven-compiler-plugin} - 1.8 - 1.8 + 17 + 17 @@ -1830,7 +1675,7 @@ org.apache.maven.plugins maven-war-plugin - 2.6 + ${maven-war-plugin} @@ -1851,12 +1696,6 @@ ${maven-site-plugin.version} - - org.codehaus.mojo - findbugs-maven-plugin - 3.0.5 - - org.codehaus.mojo @@ -2181,11 +2020,14 @@ - + + + + <!–debug>truetrue false ${skipCheck} @@ -2199,7 +2041,7 @@ verify - + --> diff --git a/repository/pom.xml b/repository/pom.xml index bbe15338f8..008d5fd2c0 100755 --- a/repository/pom.xml +++ b/repository/pom.xml @@ -157,48 +157,6 @@ ${project.version} - - org.apache.hbase - hbase-client - - - com.github.stephenc.findbugs - findbugs-annotations - - - io.netty - netty-handler - - - io.netty - netty-transport-native-epoll - - - - - - org.apache.hbase - hbase-server - - - javax.servlet - * - - - javax.ws.rs - * - - - org.eclipse.jetty - * - - - org.mortbay.jetty - servlet-api-2.5 - - - - org.springframework spring-aop diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java index a873105a6d..eba5da1db8 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java @@ -67,8 +67,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import static org.apache.atlas.AtlasClient.DATA_SET_SUPER_TYPE; -import static org.apache.atlas.AtlasClient.PROCESS_SUPER_TYPE; import static org.apache.atlas.AtlasErrorCode.INSTANCE_LINEAGE_QUERY_FAILED; import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; import static org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection.*; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/AbstractStorageBasedAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/AbstractStorageBasedAuditRepository.java index 538f150830..65d4ceb505 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/AbstractStorageBasedAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/AbstractStorageBasedAuditRepository.java @@ -17,18 +17,16 @@ */ package org.apache.atlas.repository.audit; -import com.google.common.annotations.VisibleForTesting; import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.listener.ActiveStateChangeHandler; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.model.audit.EntityAuditEventV2; import org.apache.atlas.model.audit.EntityAuditSearchResult; import org.apache.atlas.service.Service; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.configuration.Configuration; -import org.apache.hadoop.hbase.util.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,14 +36,13 @@ * This abstract base class should be used when adding support for an audit storage backend. */ public abstract class AbstractStorageBasedAuditRepository implements Service, EntityAuditRepository, ActiveStateChangeHandler { - private static final Logger LOG = LoggerFactory.getLogger(HBaseBasedAuditRepository.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractStorageBasedAuditRepository.class); private static final String AUDIT_REPOSITORY_MAX_SIZE_PROPERTY = "atlas.hbase.client.keyvalue.maxsize"; private static final String AUDIT_EXCLUDE_ATTRIBUTE_PROPERTY = "atlas.audit.hbase.entity"; private static final long ATLAS_HBASE_KEYVALUE_DEFAULT_SIZE = 1024 * 1024; public static final String CONFIG_PREFIX = "atlas.audit"; public static final String CONFIG_PERSIST_ENTITY_DEFINITION = CONFIG_PREFIX + ".persistEntityDefinition"; - protected static final String FIELD_SEPARATOR = ":"; protected static Configuration APPLICATION_PROPERTIES = null; protected Map> auditExcludedAttributesCache = new HashMap<>(); @@ -143,22 +140,5 @@ protected void initApplicationProperties() { } } - /** - * Only should be used to initialize Application properties for testing. - * - * @param config - */ - @VisibleForTesting - protected void setApplicationProperties(Configuration config) { - APPLICATION_PROPERTIES = config; - } - - protected byte[] getKey(String id, Long ts, int index) { - assert id != null : "entity id can't be null"; - assert ts != null : "timestamp can't be null"; - String keyStr = id + FIELD_SEPARATOR + ts + FIELD_SEPARATOR + index + FIELD_SEPARATOR + System.currentTimeMillis(); - return Bytes.toBytes(keyStr); - } - public abstract EntityAuditSearchResult searchEvents(String queryString) throws AtlasBaseException; } diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/CassandraBasedAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/CassandraBasedAuditRepository.java deleted file mode 100644 index 2fdb205bad..0000000000 --- a/repository/src/main/java/org/apache/atlas/repository/audit/CassandraBasedAuditRepository.java +++ /dev/null @@ -1,262 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.repository.audit; - -import com.datastax.driver.core.BatchStatement; -import com.datastax.driver.core.BoundStatement; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.PreparedStatement; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.annotation.ConditionalOnAtlasProperty; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.audit.EntityAuditEventV2; -import org.apache.atlas.model.audit.EntityAuditSearchResult; -import org.apache.commons.lang.NotImplementedException; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.inject.Singleton; - -/** - * This class provides cassandra support as the backend for audit storage support. - */ -@Singleton -@Component -@ConditionalOnAtlasProperty(property = "atlas.EntityAuditRepository.impl") -public class CassandraBasedAuditRepository extends AbstractStorageBasedAuditRepository { - private static final Logger LOG = LoggerFactory.getLogger(CassandraBasedAuditRepository.class); - - // Default keyspace to store the audit entries - private static final String DEFAULT_KEYSPACE = "atlas_audit"; - // When running in embedded cassandra mode, this is the default cluster name used - private static final String DEFAULT_CLUSTER_NAME = "JanusGraph"; - // Default cassandra port - private static final int DEFAULT_PORT = 9042; - private static final int DEFAULT_REPLICATION_FACTOR = 3; - // The environment variable that tells us we are running in embedded mode - public static final String MANAGE_EMBEDDED_CASSANDRA = "MANAGE_EMBEDDED_CASSANDRA"; - - // Application properties - public static final String CASSANDRA_HOSTNAME_PROPERTY = "atlas.graph.storage.hostname"; - public static final String CASSANDRA_CLUSTERNAME_PROPERTY = "atlas.graph.storage.clustername"; - public static final String CASSANDRA_PORT_PROPERTY = "atlas.graph.storage.port"; - public static final String CASSANDRA_REPLICATION_FACTOR_PROPERTY = "atlas.EntityAuditRepository.replicationFactor"; - public static final String CASSANDRA_AUDIT_KEYSPACE_PROPERTY = "atlas.EntityAuditRepository.keyspace"; - - private static final String AUDIT_TABLE_SCHEMA = - "CREATE TABLE audit(entityid text, " - + "created bigint, " - + "action text, " - + "user text, " - + "detail text, " - + "entity text, " - + "PRIMARY KEY (entityid, created)" - + ") WITH CLUSTERING ORDER BY (created DESC);"; - - private static final String ENTITYID = "entityid"; - private static final String CREATED = "created"; - private static final String ACTION = "action"; - private static final String USER = "user"; - private static final String DETAIL = "detail"; - private static final String ENTITY = "entity"; - - private static final String INSERT_STATEMENT_TEMPLATE = "INSERT INTO audit (entityid,created,action,user,detail,entity) VALUES (?,?,?,?,?,?)"; - private static final String SELECT_STATEMENT_TEMPLATE = "select * from audit where entityid=? order by created desc limit 10;"; - private static final String SELECT_DATE_STATEMENT_TEMPLATE = "select * from audit where entityid=? and created<=? order by created desc limit 10;"; - - private static final int AUDITS_INSERT_BATCH_SIZE = 20; - - private String keyspace; - private int replicationFactor; - private Session cassSession; - private String clusterName; - private int port; - - private Map> auditExcludedAttributesCache = new HashMap<>(); - private PreparedStatement insertStatement; - private PreparedStatement selectStatement; - private PreparedStatement selectDateStatement; - - @Override - public void putEventsV1(List events) throws AtlasException { - BoundStatement stmt = new BoundStatement(insertStatement); - BatchStatement batch = new BatchStatement(); - events.forEach(event -> batch.add(stmt.bind(event.getEntityId(), event.getTimestamp(), - event.getAction().toString(), event.getUser(), event.getDetails(), - (persistEntityDefinition ? event.getEntityDefinitionString() : null)))); - cassSession.execute(batch); - } - - @Override - public void putEventsV2(List events) throws AtlasBaseException { - BatchStatement batch = new BatchStatement(); - for (EntityAuditEventV2 event : events) { - BoundStatement stmt = new BoundStatement(insertStatement); - batch.add(stmt.bind(event.getEntityId(), event.getTimestamp(), - event.getAction().toString(), event.getUser(), event.getDetails(), - (persistEntityDefinition ? event.getEntityDefinitionString() : null))); - } - cassSession.execute(batch); - } - - private BoundStatement getSelectStatement(String entityId, String startKey) { - BoundStatement stmt; - if (StringUtils.isEmpty(startKey)) { - stmt = new BoundStatement(selectStatement).bind(entityId); - } else { - stmt = new BoundStatement(selectDateStatement).bind(entityId, Long.valueOf(startKey.split(FIELD_SEPARATOR)[1])); - } - return stmt; - } - - @Override - public List listEventsV1(String entityId, String startKey, short maxResults) throws AtlasException { - if (LOG.isDebugEnabled()) { - LOG.debug("Listing events for entity id {}, starting timestamp {}, #records {}", entityId, startKey, maxResults); - } - - ResultSet rs = cassSession.execute(getSelectStatement(entityId, startKey)); - List entityResults = new ArrayList<>(); - for (Row row : rs) { - String rowEntityId = row.getString(ENTITYID); - if (!entityId.equals(rowEntityId)) { - continue; - } - EntityAuditEvent event = new EntityAuditEvent(); - event.setEntityId(rowEntityId); - event.setAction(EntityAuditEvent.EntityAuditAction.fromString(row.getString(ACTION))); - event.setDetails(row.getString(DETAIL)); - event.setUser(row.getString(USER)); - event.setTimestamp(row.getLong(CREATED)); - event.setEventKey(rowEntityId + ":" + event.getTimestamp()); - if (persistEntityDefinition) { - event.setEntityDefinition(row.getString(ENTITY)); - } - entityResults.add(event); - } - return entityResults; - } - - @Override - public List listEventsV2(String entityId, EntityAuditEventV2.EntityAuditActionV2 auditAction, String startKey, short maxResults) throws AtlasBaseException { - if (LOG.isDebugEnabled()) { - LOG.debug("Listing events for entity id {}, starting timestamp {}, #records {}", entityId, startKey, maxResults); - } - - ResultSet rs = cassSession.execute(getSelectStatement(entityId, startKey)); - List entityResults = new ArrayList<>(); - for (Row row : rs) { - String rowEntityId = row.getString(ENTITYID); - if (!entityId.equals(rowEntityId)) { - continue; - } - EntityAuditEventV2 event = new EntityAuditEventV2(); - event.setEntityId(rowEntityId); - event.setAction(EntityAuditEventV2.EntityAuditActionV2.fromString(row.getString(ACTION))); - event.setDetails(row.getString(DETAIL)); - event.setUser(row.getString(USER)); - event.setTimestamp(row.getLong(CREATED)); - event.setEventKey(rowEntityId + ":" + event.getTimestamp()); - if (persistEntityDefinition) { - event.setEntityDefinition(row.getString(ENTITY)); - } - entityResults.add(event); - } - return entityResults; - } - - @Override - public List listEventsV2(String entityId, EntityAuditEventV2.EntityAuditActionV2 auditAction, String sortByColumn, boolean sortOrderDesc, int offset, short limit) throws AtlasBaseException { - throw new NotImplementedException(); - } - - @Override - public EntityAuditSearchResult searchEvents(String queryString) throws AtlasBaseException { - return null; - } - - @Override - public Set getEntitiesWithTagChanges(long fromTimestamp, long toTimestamp) throws AtlasBaseException { - throw new NotImplementedException(); - } - - @Override - public void start() throws AtlasException { - initApplicationProperties(); - initializeSettings(); - startInternal(); - } - - void initializeSettings() { - keyspace = APPLICATION_PROPERTIES.getString(CASSANDRA_AUDIT_KEYSPACE_PROPERTY, DEFAULT_KEYSPACE); - replicationFactor = APPLICATION_PROPERTIES.getInt(CASSANDRA_REPLICATION_FACTOR_PROPERTY, DEFAULT_REPLICATION_FACTOR); - clusterName = APPLICATION_PROPERTIES.getString(CASSANDRA_CLUSTERNAME_PROPERTY, DEFAULT_CLUSTER_NAME); - port = APPLICATION_PROPERTIES.getInt(CASSANDRA_PORT_PROPERTY, DEFAULT_PORT); - } - - @VisibleForTesting - void startInternal() throws AtlasException { - createSession(); - } - - void createSession() throws AtlasException { - Cluster.Builder cassandraClusterBuilder = Cluster.builder(); - - String hostname = APPLICATION_PROPERTIES.getString(CASSANDRA_HOSTNAME_PROPERTY, "localhost"); - Cluster cluster = cassandraClusterBuilder.addContactPoint(hostname).withClusterName(clusterName).withPort(port).build(); - try { - cassSession = cluster.connect(); - if (cluster.getMetadata().getKeyspace(keyspace) == null) { - String query = "CREATE KEYSPACE " + keyspace + " WITH replication " - + "= {'class':'SimpleStrategy', 'replication_factor':" + replicationFactor + "}; "; - cassSession.execute(query); - cassSession.close(); - cassSession = cluster.connect(keyspace); - // create the audit table - cassSession.execute(AUDIT_TABLE_SCHEMA); - } else { - cassSession.close(); - cassSession = cluster.connect(keyspace); - } - - insertStatement = cassSession.prepare(INSERT_STATEMENT_TEMPLATE.replace("KEYSPACE", keyspace)); - selectStatement = cassSession.prepare(SELECT_STATEMENT_TEMPLATE.replace("KEYSPACE", keyspace)); - selectDateStatement = cassSession.prepare(SELECT_DATE_STATEMENT_TEMPLATE.replace("KEYSPACE", keyspace)); - } catch (Exception e) { - throw new AtlasException(e); - } - } - - @Override - public void stop() throws AtlasException { - cassSession.close(); - } -} diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java index cbab135606..1797736ea5 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/ESBasedAuditRepository.java @@ -23,10 +23,10 @@ import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; import org.apache.atlas.RequestContext; import org.apache.atlas.annotation.ConditionalOnAtlasProperty; import org.apache.atlas.exception.AtlasBaseException; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.model.audit.EntityAuditEventV2; import org.apache.atlas.model.audit.EntityAuditSearchResult; import org.apache.atlas.type.AtlasType; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java index 6c484c130b..03da3a8068 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java @@ -19,8 +19,8 @@ package org.apache.atlas.repository.audit; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.EntityAuditEvent.EntityAuditAction; +import org.apache.atlas.model.EntityAuditEvent; +import org.apache.atlas.model.EntityAuditEvent.EntityAuditAction; import org.apache.atlas.RequestContext; import org.apache.atlas.listener.EntityChangeListener; import org.apache.atlas.model.glossary.AtlasGlossaryTerm; @@ -46,8 +46,8 @@ import java.util.Map; import java.util.Set; -import static org.apache.atlas.EntityAuditEvent.EntityAuditAction.TERM_ADD; -import static org.apache.atlas.EntityAuditEvent.EntityAuditAction.TERM_DELETE; +import static org.apache.atlas.model.EntityAuditEvent.EntityAuditAction.TERM_ADD; +import static org.apache.atlas.model.EntityAuditEvent.EntityAuditAction.TERM_DELETE; /** * Listener on entity create/update/delete, tag add/delete. Adds the corresponding audit event to the audit repository. diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java index d0dbecad38..e6708340ff 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListenerV2.java @@ -20,7 +20,7 @@ import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent.EntityAuditAction; +import org.apache.atlas.model.EntityAuditEvent.EntityAuditAction; import org.apache.atlas.RequestContext; import org.apache.atlas.annotation.EnableConditional; import org.apache.atlas.model.audit.EntityAuditEventV2; @@ -102,7 +102,7 @@ public EntityAuditListenerV2(Set auditRepositories, Atlas } private long getAuditMaxSize(EntityAuditRepository auditRepository, int entityCount) { - boolean isCassandraRepository = auditRepository.getClass().equals(CassandraBasedAuditRepository.class); + boolean isCassandraRepository = false; // Subtracting 150 for other details in the Insert statement. long auditMaxSize = isCassandraRepository ? ((CASSANDRA_AUDIT_REPOSITORY_MAX_SIZE_DEFAULT / entityCount) - 150): AUDIT_REPOSITORY_MAX_SIZE_DEFAULT; return auditMaxSize; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditRepository.java index 7add32d217..11dd2d773c 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditRepository.java @@ -19,7 +19,7 @@ package org.apache.atlas.repository.audit; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.model.audit.EntityAuditEventV2; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.audit.EntityAuditSearchResult; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepository.java deleted file mode 100644 index 556cf8a740..0000000000 --- a/repository/src/main/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepository.java +++ /dev/null @@ -1,772 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.repository.audit; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.RequestContext; -import org.apache.atlas.annotation.ConditionalOnAtlasProperty; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.ha.HAConfiguration; -import org.apache.atlas.model.audit.EntityAuditEventV2; -import org.apache.atlas.model.audit.EntityAuditEventV2.EntityAuditActionV2; -import org.apache.atlas.model.audit.EntityAuditSearchResult; -import org.apache.atlas.utils.AtlasPerfMetrics; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; -import org.apache.hadoop.hbase.filter.PageFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; -import org.apache.hadoop.hbase.io.compress.Compression; -import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.hbase.util.Bytes; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; -import org.springframework.core.annotation.Order; - -import javax.inject.Singleton; -import java.io.Closeable; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - - -/** - * HBase based repository for entity audit events - *

    - * Table -> 1, ATLAS_ENTITY_EVENTS
    - * Key -> entity id + timestamp
    - * Column Family -> 1,dt
    - * Columns -> action, user, detail
    - * versions -> 1
    - *

    - * Note: The timestamp in the key is assumed to be timestamp in milli seconds. Since the key is - * entity id + timestamp, and only 1 version is kept, there can be just 1 audit event per entity - * id + timestamp. This is ok for one atlas server. But if there are more than one atlas servers, - * we should use server id in the key - */ -@Singleton -@Component -@ConditionalOnAtlasProperty(property = "atlas.EntityAuditRepository.impl", isDefault = true) -@Order(0) -public class HBaseBasedAuditRepository extends AbstractStorageBasedAuditRepository { - private static final Logger LOG = LoggerFactory.getLogger(HBaseBasedAuditRepository.class); - - public static final String CONFIG_PREFIX = "atlas.audit"; - public static final String CONFIG_TABLE_NAME = CONFIG_PREFIX + ".hbase.tablename"; - public static final String DEFAULT_TABLE_NAME = "ATLAS_ENTITY_AUDIT_EVENTS"; - public static final String CONFIG_PERSIST_ENTITY_DEFINITION = CONFIG_PREFIX + ".persistEntityDefinition"; - - public static final byte[] COLUMN_FAMILY = Bytes.toBytes("dt"); - public static final byte[] COLUMN_ACTION = Bytes.toBytes("a"); - public static final byte[] COLUMN_DETAIL = Bytes.toBytes("d"); - public static final byte[] COLUMN_USER = Bytes.toBytes("u"); - public static final byte[] COLUMN_DEFINITION = Bytes.toBytes("f"); - - private static final String AUDIT_REPOSITORY_MAX_SIZE_PROPERTY = "atlas.hbase.client.keyvalue.maxsize"; - private static final String AUDIT_EXCLUDE_ATTRIBUTE_PROPERTY = "atlas.audit.hbase.entity"; - private static final String FIELD_SEPARATOR = ":"; - private static final long ATLAS_HBASE_KEYVALUE_DEFAULT_SIZE = 1024 * 1024; - private static Configuration APPLICATION_PROPERTIES = null; - private static final int DEFAULT_CACHING = 200; - - private static boolean persistEntityDefinition; - - private Map> auditExcludedAttributesCache = new HashMap<>(); - - static { - try { - persistEntityDefinition = ApplicationProperties.get().getBoolean(CONFIG_PERSIST_ENTITY_DEFINITION, false); - } catch (AtlasException e) { - throw new RuntimeException(e); - } - } - private TableName tableName; - private Connection connection; - - /** - * Add events to the event repository - * @param events events to be added - * @throws AtlasException - */ - @Override - public void putEventsV1(EntityAuditEvent... events) throws AtlasException { - putEventsV1(Arrays.asList(events)); - } - - /** - * Add events to the event repository - * @param events events to be added - * @throws AtlasException - */ - @Override - public void putEventsV1(List events) throws AtlasException { - if (LOG.isDebugEnabled()) { - LOG.debug("Putting {} events", events.size()); - } - - Table table = null; - - try { - table = connection.getTable(tableName); - List puts = new ArrayList<>(events.size()); - - for (int index = 0; index < events.size(); index++) { - EntityAuditEvent event = events.get(index); - - if (LOG.isDebugEnabled()) { - LOG.debug("Adding entity audit event {}", event); - } - - Put put = new Put(getKey(event.getEntityId(), event.getTimestamp(), index)); - - addColumn(put, COLUMN_ACTION, event.getAction()); - addColumn(put, COLUMN_USER, event.getUser()); - addColumn(put, COLUMN_DETAIL, event.getDetails()); - if (persistEntityDefinition) { - addColumn(put, COLUMN_DEFINITION, event.getEntityDefinitionString()); - } - - puts.add(put); - } - - table.put(puts); - } catch (IOException e) { - throw new AtlasException(e); - } finally { - close(table); - } - } - - @Override - public void putEventsV2(EntityAuditEventV2... events) throws AtlasBaseException { - putEventsV2(Arrays.asList(events)); - } - - @Override - public void putEventsV2(List events) throws AtlasBaseException { - if (LOG.isDebugEnabled()) { - LOG.debug("Putting {} events", events.size()); - } - - Table table = null; - - try { - table = connection.getTable(tableName); - List puts = new ArrayList<>(events.size()); - - for (int index = 0; index < events.size(); index++) { - EntityAuditEventV2 event = events.get(index); - - if (LOG.isDebugEnabled()) { - LOG.debug("Adding entity audit event {}", event); - } - - Put put = new Put(getKey(event.getEntityId(), event.getTimestamp(), index)); - - addColumn(put, COLUMN_ACTION, event.getAction()); - addColumn(put, COLUMN_USER, event.getUser()); - addColumn(put, COLUMN_DETAIL, event.getDetails()); - - if (persistEntityDefinition) { - addColumn(put, COLUMN_DEFINITION, event.getEntityDefinitionString()); - } - - puts.add(put); - } - - table.put(puts); - } catch (IOException e) { - throw new AtlasBaseException(e); - } finally { - try { - close(table); - } catch (AtlasException e) { - throw new AtlasBaseException(e); - } - } - } - - @Override - public List listEventsV2(String entityId, EntityAuditActionV2 auditAction, String startKey, short maxResultCount) throws AtlasBaseException { - if (LOG.isDebugEnabled()) { - LOG.debug("Listing events for entity id {}, operation {}, starting key{}, maximum result count {}", entityId, auditAction, startKey, maxResultCount); - } - AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("listSortedEventsV2"); - Table table = null; - ResultScanner scanner = null; - - try { - table = connection.getTable(tableName); - - /** - * Scan Details: - * In hbase, the events are stored in increasing order of timestamp. So, doing reverse scan to get the latest event first - * Page filter is set to limit the number of results returned if needed - * Stop row is set to the entity id to avoid going past the current entity while scanning - * SingleColumnValueFilter is been used to match the operation at COLUMN_FAMILY->COLUMN_ACTION - * Small is set to true to optimise RPC calls as the scanner is created per request - * setCaching(DEFAULT_CACHING) will increase the payload size to DEFAULT_CACHING rows per remote call and - * both types of next() take these settings into account. - */ - Scan scan = new Scan().setReversed(true) - .setCaching(DEFAULT_CACHING) - .setSmall(true); - - if(maxResultCount > -1) { - scan.setFilter(new PageFilter(maxResultCount)); - } - - if (auditAction != null) { - Filter filterAction = new SingleColumnValueFilter(COLUMN_FAMILY, - COLUMN_ACTION, CompareFilter.CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes(auditAction.toString()))); - scan.setFilter(filterAction); - } - - if(StringUtils.isNotBlank(entityId)) { - scan.setStopRow(Bytes.toBytes(entityId)); - } - - if (StringUtils.isEmpty(startKey)) { - //Set start row to entity id + max long value - byte[] entityBytes = getKey(entityId, Long.MAX_VALUE, Integer.MAX_VALUE); - scan = scan.setStartRow(entityBytes); - } else { - scan = scan.setStartRow(Bytes.toBytes(startKey)); - } - - scanner = table.getScanner(scan); - List events = new ArrayList<>(); - - Result result; - - //PageFilter doesn't ensure maxResultCount results are returned. The filter is per region server. - //So, adding extra check on maxResultCount - while ((result = scanner.next()) != null && (maxResultCount == -1 || events.size() < maxResultCount)) { - - EntityAuditEventV2 event = fromKeyV2(result.getRow()); - - //In case the user sets random start key, guarding against random events if entityId is provided - if (StringUtils.isNotBlank(entityId) && !event.getEntityId().equals(entityId)) { - continue; - } - - event.setUser(getResultString(result, COLUMN_USER)); - event.setAction(EntityAuditActionV2.fromString(getResultString(result, COLUMN_ACTION))); - event.setDetails(getResultString(result, COLUMN_DETAIL)); - - if (persistEntityDefinition) { - String colDef = getResultString(result, COLUMN_DEFINITION); - - if (colDef != null) { - event.setEntityDefinition(colDef); - } - } - - events.add(event); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Got events for entity id {}, operation {}, starting key{}, maximum result count {}, #records returned {}", - entityId, auditAction.toString(), startKey, maxResultCount, events.size()); - } - - return events; - } catch (IOException e) { - throw new AtlasBaseException(e); - } finally { - try { - close(scanner); - close(table); - RequestContext.get().endMetricRecord(metric); - } catch (AtlasException e) { - throw new AtlasBaseException(e); - } - } - } - - @Override - public List listEventsV2(String entityId, EntityAuditEventV2.EntityAuditActionV2 auditAction, String sortByColumn, boolean sortOrderDesc, int offset, short limit) throws AtlasBaseException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HBaseBasedAuditRepository.listEventsV2(entityId={}, auditAction={}, sortByColumn={}, sortOrderDesc={}, offset={}, limit={})", entityId, auditAction, sortByColumn, offset, limit); - } - - AtlasPerfMetrics.MetricRecorder metric = RequestContext.get().startMetricRecord("listEventsV2"); - - if (sortByColumn == null) { - sortByColumn = EntityAuditEventV2.SORT_COLUMN_TIMESTAMP; - } - - if (offset < 0) { - offset = 0; - } - - if (limit < 0) { - limit = 100; - } - - try (Table table = connection.getTable(tableName)) { - /* - * HBase Does not support query with sorted results. To support this API inmemory sort has to be performed. - * Audit entry can potentially have entire entity dumped into it. Loading entire audit entries for an entity can be - * memory intensive. Therefore we load audit entries with limited columns first, perform sort on this light weight list, - * then get the relevant section by removing offsets and reducing to limits. With this reduced list we create - * MultiRowRangeFilter and then again scan the table to get all the columns from the table this time. - */ - Scan scan = new Scan().setReversed(true) - .setCaching(DEFAULT_CACHING) - .setSmall(true) - .setStopRow(Bytes.toBytes(entityId)) - .setStartRow(getKey(entityId, Long.MAX_VALUE, Integer.MAX_VALUE)) - .addColumn(COLUMN_FAMILY, COLUMN_ACTION) - .addColumn(COLUMN_FAMILY, COLUMN_USER); - - if (auditAction != null) { - Filter filterAction = new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_ACTION, CompareFilter.CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes(auditAction.toString()))); - - scan.setFilter(filterAction); - } - - List events = new ArrayList<>(); - - try (ResultScanner scanner = table.getScanner(scan)) { - for (Result result = scanner.next(); result != null; result = scanner.next()) { - EntityAuditEventV2 event = fromKeyV2(result.getRow()); - - event.setUser(getResultString(result, COLUMN_USER)); - event.setAction(EntityAuditActionV2.fromString(getResultString(result, COLUMN_ACTION))); - - events.add(event); - } - } - - EntityAuditEventV2.sortEvents(events, sortByColumn, sortOrderDesc); - - events = events.subList(Math.min(events.size(), offset), Math.min(events.size(), offset + limit)); - - if (events.size() > 0) { - List ranges = new ArrayList<>(); - - events.forEach(e -> { - ranges.add(new MultiRowRangeFilter.RowRange(e.getEventKey(), true, e.getEventKey(), true)); - }); - - scan = new Scan().setReversed(true) - .setCaching(DEFAULT_CACHING) - .setSmall(true) - .setStopRow(Bytes.toBytes(entityId)) - .setStartRow(getKey(entityId, Long.MAX_VALUE, Integer.MAX_VALUE)) - .setFilter(new MultiRowRangeFilter(ranges)); - - try (ResultScanner scanner = table.getScanner(scan)) { - events = new ArrayList<>(); - - for (Result result = scanner.next(); result != null; result = scanner.next()) { - EntityAuditEventV2 event = fromKeyV2(result.getRow()); - - event.setUser(getResultString(result, COLUMN_USER)); - event.setAction(EntityAuditActionV2.fromString(getResultString(result, COLUMN_ACTION))); - event.setDetails(getResultString(result, COLUMN_DETAIL)); - - if (persistEntityDefinition) { - String colDef = getResultString(result, COLUMN_DEFINITION); - - if (colDef != null) { - event.setEntityDefinition(colDef); - } - } - - events.add(event); - } - } - - EntityAuditEventV2.sortEvents(events, sortByColumn, sortOrderDesc); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== HBaseBasedAuditRepository.listEventsV2(entityId={}, auditAction={}, sortByColumn={}, sortOrderDesc={}, offset={}, limit={}): #recored returned {}", entityId, auditAction, sortByColumn, offset, limit, events.size()); - } - - return events; - } catch (IOException e) { - throw new AtlasBaseException(e); - } finally { - RequestContext.get().endMetricRecord(metric); - } - } - - @Override - public EntityAuditSearchResult searchEvents(String queryString) throws AtlasBaseException { - return null; - } - - @Override - public List listEvents(String entityId, String startKey, short maxResults) throws AtlasBaseException { - List ret = listEventsV2(entityId, null, startKey, maxResults); - - try { - if (CollectionUtils.isEmpty(ret)) { - ret = listEventsV1(entityId, startKey, maxResults); - } - } catch (AtlasException e) { - throw new AtlasBaseException(e); - } - - return ret; - } - - private void addColumn(Put put, byte[] columnName, T columnValue) { - if (columnValue != null && !columnValue.toString().isEmpty()) { - put.addColumn(COLUMN_FAMILY, columnName, Bytes.toBytes(columnValue.toString())); - } - } - - private byte[] getKey(String id, Long ts) { - assert id != null : "entity id can't be null"; - assert ts != null : "timestamp can't be null"; - String keyStr = id + FIELD_SEPARATOR + ts; - return Bytes.toBytes(keyStr); - } - - /** - * List events for the given entity id in decreasing order of timestamp, from the given startKey. Returns n results - * @param entityId entity id - * @param startKey key for the first event to be returned, used for pagination - * @param n number of events to be returned - * @return list of events - * @throws AtlasException - */ - public List listEventsV1(String entityId, String startKey, short n) - throws AtlasException { - if (LOG.isDebugEnabled()) { - LOG.debug("Listing events for entity id {}, starting timestamp {}, #records {}", entityId, startKey, n); - } - - Table table = null; - ResultScanner scanner = null; - try { - table = connection.getTable(tableName); - - /** - * Scan Details: - * In hbase, the events are stored in increasing order of timestamp. So, doing reverse scan to get the latest event first - * Page filter is set to limit the number of results returned. - * Stop row is set to the entity id to avoid going past the current entity while scanning - * small is set to true to optimise RPC calls as the scanner is created per request - */ - Scan scan = new Scan().setReversed(true).setFilter(new PageFilter(n)) - .setStopRow(Bytes.toBytes(entityId)) - .setCaching(n) - .setSmall(true); - if (StringUtils.isEmpty(startKey)) { - //Set start row to entity id + max long value - byte[] entityBytes = getKey(entityId, Long.MAX_VALUE, Integer.MAX_VALUE); - scan = scan.setStartRow(entityBytes); - } else { - scan = scan.setStartRow(Bytes.toBytes(startKey)); - } - scanner = table.getScanner(scan); - Result result; - List events = new ArrayList<>(); - - //PageFilter doesn't ensure n results are returned. The filter is per region server. - //So, adding extra check on n here - while ((result = scanner.next()) != null && events.size() < n) { - EntityAuditEvent event = fromKey(result.getRow()); - - //In case the user sets random start key, guarding against random events - if (!event.getEntityId().equals(entityId)) { - continue; - } - event.setUser(getResultString(result, COLUMN_USER)); - event.setAction(EntityAuditEvent.EntityAuditAction.fromString(getResultString(result, COLUMN_ACTION))); - event.setDetails(getResultString(result, COLUMN_DETAIL)); - if (persistEntityDefinition) { - String colDef = getResultString(result, COLUMN_DEFINITION); - if (colDef != null) { - event.setEntityDefinition(colDef); - } - } - events.add(event); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Got events for entity id {}, starting timestamp {}, #records {}", entityId, startKey, events.size()); - } - - return events; - } catch (IOException e) { - throw new AtlasException(e); - } finally { - close(scanner); - close(table); - } - } - - @Override - public long repositoryMaxSize() { - long ret; - initApplicationProperties(); - - if (APPLICATION_PROPERTIES == null) { - ret = ATLAS_HBASE_KEYVALUE_DEFAULT_SIZE; - } else { - ret = APPLICATION_PROPERTIES.getLong(AUDIT_REPOSITORY_MAX_SIZE_PROPERTY, ATLAS_HBASE_KEYVALUE_DEFAULT_SIZE); - } - - return ret; - } - - @Override - public List getAuditExcludeAttributes(String entityType) { - List ret = null; - - initApplicationProperties(); - - if (auditExcludedAttributesCache.containsKey(entityType)) { - ret = auditExcludedAttributesCache.get(entityType); - } else if (APPLICATION_PROPERTIES != null) { - String[] excludeAttributes = APPLICATION_PROPERTIES.getStringArray(AUDIT_EXCLUDE_ATTRIBUTE_PROPERTY + "." + - entityType + "." + "attributes.exclude"); - - if (excludeAttributes != null) { - ret = Arrays.asList(excludeAttributes); - } - - auditExcludedAttributesCache.put(entityType, ret); - } - - return ret; - } - - - private String getResultString(Result result, byte[] columnName) { - byte[] rawValue = result.getValue(COLUMN_FAMILY, columnName); - if ( rawValue != null) { - return Bytes.toString(rawValue); - } - return null; - } - - private EntityAuditEvent fromKey(byte[] keyBytes) { - String key = Bytes.toString(keyBytes); - EntityAuditEvent event = new EntityAuditEvent(); - if (StringUtils.isNotEmpty(key)) { - String[] parts = key.split(FIELD_SEPARATOR); - event.setEntityId(parts[0]); - event.setTimestamp(Long.valueOf(parts[1])); - event.setEventKey(key); - } - return event; - } - - private EntityAuditEventV2 fromKeyV2(byte[] keyBytes) { - String key = Bytes.toString(keyBytes); - EntityAuditEventV2 event = new EntityAuditEventV2(); - - if (StringUtils.isNotEmpty(key)) { - String[] parts = key.split(FIELD_SEPARATOR); - event.setEntityId(parts[0]); - event.setTimestamp(Long.valueOf(parts[1])); - event.setEventKey(key); - } - - return event; - } - - private void close(Closeable closeable) throws AtlasException { - if (closeable != null) { - try { - closeable.close(); - } catch (IOException e) { - throw new AtlasException(e); - } - } - } - - /** - * Converts atlas' application properties to hadoop conf - * @return - * @throws AtlasException - * @param atlasConf - */ - public static org.apache.hadoop.conf.Configuration getHBaseConfiguration(Configuration atlasConf) throws AtlasException { - Properties properties = ApplicationProperties.getSubsetAsProperties(atlasConf, CONFIG_PREFIX); - org.apache.hadoop.conf.Configuration hbaseConf = HBaseConfiguration.create(); - - for (String key : properties.stringPropertyNames()) { - String value = properties.getProperty(key); - - LOG.info("adding HBase configuration: {}={}", key, value); - - hbaseConf.set(key, value); - } - - return hbaseConf; - } - - private void createTableIfNotExists() throws AtlasException { - Admin admin = null; - try { - admin = connection.getAdmin(); - LOG.info("Checking if table {} exists", tableName.getNameAsString()); - if (!admin.tableExists(tableName)) { - LOG.info("Creating table {}", tableName.getNameAsString()); - HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); - HColumnDescriptor columnFamily = new HColumnDescriptor(COLUMN_FAMILY); - columnFamily.setMaxVersions(1); - columnFamily.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); - columnFamily.setCompressionType(Compression.Algorithm.GZ); - columnFamily.setBloomFilterType(BloomType.ROW); - tableDescriptor.addFamily(columnFamily); - admin.createTable(tableDescriptor); - } else { - LOG.info("Table {} exists", tableName.getNameAsString()); - } - } catch (IOException e) { - throw new AtlasException(e); - } finally { - close(admin); - } - } - - @Override - public Set getEntitiesWithTagChanges(long fromTimestamp, long toTimestamp) throws AtlasBaseException { - final String classificationUpdatesAction = "CLASSIFICATION_"; - - if (LOG.isDebugEnabled()) { - LOG.debug("Listing events for fromTimestamp {}, toTimestamp {}, action {}", fromTimestamp, toTimestamp); - } - - Table table = null; - ResultScanner scanner = null; - - try { - Set guids = new HashSet<>(); - - table = connection.getTable(tableName); - - byte[] filterValue = Bytes.toBytes(classificationUpdatesAction); - BinaryPrefixComparator binaryPrefixComparator = new BinaryPrefixComparator(filterValue); - SingleColumnValueFilter filter = new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_ACTION, CompareFilter.CompareOp.EQUAL, binaryPrefixComparator); - Scan scan = new Scan().setFilter(filter).setTimeRange(fromTimestamp, toTimestamp); - - Result result; - scanner = table.getScanner(scan); - while ((result = scanner.next()) != null) { - EntityAuditEvent event = fromKey(result.getRow()); - - if (event == null) { - continue; - } - - guids.add(event.getEntityId()); - } - - return guids; - } catch (IOException e) { - throw new AtlasBaseException(e); - } finally { - try { - close(scanner); - close(table); - } catch (AtlasException e) { - throw new AtlasBaseException(e); - } - } - } - - @Override - public void start() throws AtlasException { - Configuration configuration = ApplicationProperties.get(); - startInternal(configuration, getHBaseConfiguration(configuration)); - } - - @VisibleForTesting - void startInternal(Configuration atlasConf, - org.apache.hadoop.conf.Configuration hbaseConf) throws AtlasException { - - String tableNameStr = atlasConf.getString(CONFIG_TABLE_NAME, DEFAULT_TABLE_NAME); - tableName = TableName.valueOf(tableNameStr); - - try { - connection = createConnection(hbaseConf); - } catch (IOException e) { - throw new AtlasException(e); - } - - if (!HAConfiguration.isHAEnabled(atlasConf)) { - LOG.info("HA is disabled. Hence creating table on startup."); - createTableIfNotExists(); - } - } - - @VisibleForTesting - protected Connection createConnection(org.apache.hadoop.conf.Configuration hbaseConf) throws IOException { - return ConnectionFactory.createConnection(hbaseConf); - } - - @Override - public void stop() throws AtlasException { - close(connection); - } - - @Override - public void instanceIsActive() throws AtlasException { - LOG.info("Reacting to active: Creating HBase table for Audit if required."); - createTableIfNotExists(); - } - - @Override - public void instanceIsPassive() { - LOG.info("Reacting to passive: No action for now."); - } - - @Override - public int getHandlerOrder() { - return HandlerOrder.AUDIT_REPOSITORY.getOrder(); - } -} diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/InMemoryEntityAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/InMemoryEntityAuditRepository.java index 4516beb1ea..fe0cb069ef 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/InMemoryEntityAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/InMemoryEntityAuditRepository.java @@ -19,7 +19,7 @@ package org.apache.atlas.repository.audit; import org.apache.atlas.AtlasException; -import org.apache.atlas.EntityAuditEvent; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.annotation.ConditionalOnAtlasProperty; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.audit.EntityAuditEventV2; diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/NoopEntityAuditRepository.java b/repository/src/main/java/org/apache/atlas/repository/audit/NoopEntityAuditRepository.java index c716aa6071..8b9a5c3dcb 100644 --- a/repository/src/main/java/org/apache/atlas/repository/audit/NoopEntityAuditRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/audit/NoopEntityAuditRepository.java @@ -18,7 +18,7 @@ package org.apache.atlas.repository.audit; -import org.apache.atlas.EntityAuditEvent; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.annotation.ConditionalOnAtlasProperty; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.audit.EntityAuditEventV2; diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java index 14fd5563df..3b5b365e63 100644 --- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java +++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java @@ -19,9 +19,8 @@ import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.AtlasException; -import org.apache.atlas.CreateUpdateEntitiesResult; -import org.apache.atlas.EntityAuditEvent; import org.apache.atlas.RequestContext; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.model.audit.EntityAuditEventV2; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.TypeCategory; @@ -225,63 +224,6 @@ private AtlasEntity fromV1toV2Entity(Referenceable referenceable, AtlasFormatCon return entity; } - public CreateUpdateEntitiesResult toCreateUpdateEntitiesResult(EntityMutationResponse reponse) { - CreateUpdateEntitiesResult ret = null; - - if (reponse != null) { - Map> mutatedEntities = reponse.getMutatedEntities(); - Map guidAssignments = reponse.getGuidAssignments(); - - ret = new CreateUpdateEntitiesResult(); - - if (MapUtils.isNotEmpty(guidAssignments)) { - ret.setGuidMapping(new GuidMapping(guidAssignments)); - } - - if (MapUtils.isNotEmpty(mutatedEntities)) { - EntityResult entityResult = new EntityResult(); - - for (Map.Entry> e : mutatedEntities.entrySet()) { - switch (e.getKey()) { - case CREATE: - List createdEntities = mutatedEntities.get(EntityOperation.CREATE); - if (CollectionUtils.isNotEmpty(createdEntities)) { - Collections.reverse(createdEntities); - entityResult.set(EntityResult.OP_CREATED, getGuids(createdEntities)); - } - break; - case UPDATE: - List updatedEntities = mutatedEntities.get(EntityOperation.UPDATE); - if (CollectionUtils.isNotEmpty(updatedEntities)) { - Collections.reverse(updatedEntities); - entityResult.set(EntityResult.OP_UPDATED, getGuids(updatedEntities)); - } - break; - case PARTIAL_UPDATE: - List partialUpdatedEntities = mutatedEntities.get(EntityOperation.PARTIAL_UPDATE); - if (CollectionUtils.isNotEmpty(partialUpdatedEntities)) { - Collections.reverse(partialUpdatedEntities); - entityResult.set(EntityResult.OP_UPDATED, getGuids(partialUpdatedEntities)); - } - break; - case DELETE: - List deletedEntities = mutatedEntities.get(EntityOperation.DELETE); - if (CollectionUtils.isNotEmpty(deletedEntities)) { - Collections.reverse(deletedEntities); - entityResult.set(EntityResult.OP_DELETED, getGuids(deletedEntities)); - } - break; - } - - } - - ret.setEntityResult(entityResult); - } - } - - return ret; - } - public List getGuids(List entities) { List ret = null; diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java index 68c6dacd9c..5e0c19a681 100644 --- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java +++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/DeleteHandlerV1.java @@ -64,8 +64,6 @@ import java.util.*; import java.util.stream.Collectors; -import static org.apache.atlas.AtlasClient.DATA_SET_SUPER_TYPE; -import static org.apache.atlas.AtlasClient.PROCESS_SUPER_TYPE; import static org.apache.atlas.model.TypeCategory.*; import static org.apache.atlas.model.instance.AtlasEntity.Status.ACTIVE; import static org.apache.atlas.model.instance.AtlasEntity.Status.DELETED; diff --git a/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java b/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java index 179fa43f3d..cbd25067db 100644 --- a/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java +++ b/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java @@ -19,8 +19,8 @@ import org.apache.atlas.ApplicationProperties; import org.apache.atlas.AtlasException; +import org.apache.atlas.repository.audit.ESBasedAuditRepository; import org.apache.atlas.repository.audit.EntityAuditRepository; -import org.apache.atlas.repository.audit.HBaseBasedAuditRepository; import org.apache.atlas.repository.graphdb.GraphDatabase; import org.apache.atlas.repository.store.graph.v1.DeleteHandlerV1; import org.apache.atlas.repository.store.graph.v1.SoftDeleteHandlerV1; @@ -96,7 +96,7 @@ public static Class getAuditRepositoryImpl() { try { Configuration config = ApplicationProperties.get(); return ApplicationProperties.getClass(config, - AUDIT_REPOSITORY_IMPLEMENTATION_PROPERTY, HBaseBasedAuditRepository.class.getName(), EntityAuditRepository.class); + AUDIT_REPOSITORY_IMPLEMENTATION_PROPERTY, ESBasedAuditRepository.class.getName(), EntityAuditRepository.class); } catch (AtlasException e) { throw new RuntimeException(e); } diff --git a/repository/src/main/java/org/apache/atlas/util/RepairIndex.java b/repository/src/main/java/org/apache/atlas/util/RepairIndex.java new file mode 100644 index 0000000000..44bd54f649 --- /dev/null +++ b/repository/src/main/java/org/apache/atlas/util/RepairIndex.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.util; + +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.repository.graphdb.AtlasVertex; +import org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase; +import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; +import org.janusgraph.core.JanusGraph; +import org.janusgraph.core.schema.JanusGraphIndex; +import org.janusgraph.diskstorage.BackendTransaction; +import org.janusgraph.diskstorage.indexing.IndexEntry; +import org.janusgraph.graphdb.database.IndexSerializer; +import org.janusgraph.graphdb.database.StandardJanusGraph; +import org.janusgraph.graphdb.database.management.ManagementSystem; +import org.janusgraph.graphdb.transaction.StandardJanusGraphTx; +import org.janusgraph.graphdb.types.MixedIndexType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class RepairIndex { + private static final Logger LOG = LoggerFactory.getLogger(RepairIndex.class); + + private static final int MAX_TRIES_ON_FAILURE = 3; + + private static final String INDEX_NAME_VERTEX_INDEX = "vertex_index"; + private static final String INDEX_NAME_FULLTEXT_INDEX = "fulltext_index"; + private static final String INDEX_NAME_EDGE_INDEX = "edge_index"; + + private static JanusGraph graph; + + public static void setupGraph() { + LOG.info("Initializing graph: "); + graph = AtlasJanusGraphDatabase.getGraphInstance(); + LOG.info("Graph Initialized!"); + } + + private static String[] getIndexes() { + return new String[]{ INDEX_NAME_VERTEX_INDEX, INDEX_NAME_EDGE_INDEX, INDEX_NAME_FULLTEXT_INDEX}; + } + + private static void reindexVertex(String indexName, IndexSerializer indexSerializer, Set entityGUIDs) throws Exception { + Map>> documentsPerStore = new java.util.HashMap<>(); + ManagementSystem mgmt = (ManagementSystem) graph.openManagement(); + StandardJanusGraphTx tx = mgmt.getWrappedTx(); + BackendTransaction mutator = tx.getTxHandle(); + JanusGraphIndex index = mgmt.getGraphIndex(indexName); + MixedIndexType indexType = (MixedIndexType) mgmt.getSchemaVertex(index).asIndexType(); + + for (String entityGuid : entityGUIDs){ + for (int attemptCount = 1; attemptCount <= MAX_TRIES_ON_FAILURE; attemptCount++) { + AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entityGuid); + try { + indexSerializer.reindexElement(vertex.getWrappedElement(), indexType, documentsPerStore); + break; + } catch (Exception e){ + LOG.info("Exception: " + e.getMessage()); + LOG.info("Pausing before retry.."); + Thread.sleep(2000 * attemptCount); + } + } + } + mutator.getIndexTransaction(indexType.getBackingIndexName()).restore(documentsPerStore); + } + + private static Set getEntityAndReferenceGuids(String guid, Map referredEntities) throws Exception { + Set set = new HashSet<>(); + set.add(guid); + if (referredEntities == null || referredEntities.isEmpty()) { + return set; + } + set.addAll(referredEntities.keySet()); + return set; + } + + public void restoreSelective(String guid, Map referredEntities) throws Exception { + Set referencedGUIDs = new HashSet<>(getEntityAndReferenceGuids(guid, referredEntities)); + LOG.info("processing referencedGuids => " + referencedGUIDs); + + StandardJanusGraph janusGraph = (StandardJanusGraph) graph; + IndexSerializer indexSerializer = janusGraph.getIndexSerializer(); + + for (String indexName : getIndexes()) { + LOG.info("Restoring: " + indexName); + long startTime = System.currentTimeMillis(); + reindexVertex(indexName, indexSerializer, referencedGUIDs); + + LOG.info(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); + } + } + + public void restoreByIds(Set guids) throws Exception { + + StandardJanusGraph janusGraph = (StandardJanusGraph) graph; + IndexSerializer indexSerializer = janusGraph.getIndexSerializer(); + + for (String indexName : getIndexes()) { + LOG.info("Restoring: " + indexName); + long startTime = System.currentTimeMillis(); + reindexVertex(indexName, indexSerializer, guids); + + LOG.info(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); + LOG.info(": Done!"); + } + } +} diff --git a/repository/src/test/java/org/apache/atlas/BasicTestSetup.java b/repository/src/test/java/org/apache/atlas/BasicTestSetup.java index a1d7b62a58..54ab57e14f 100644 --- a/repository/src/test/java/org/apache/atlas/BasicTestSetup.java +++ b/repository/src/test/java/org/apache/atlas/BasicTestSetup.java @@ -41,6 +41,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.apache.atlas.utils.TestLoadModelUtils.loadModelFromJson; import static org.testng.Assert.fail; @@ -335,7 +337,7 @@ private void createUpdateClassificationDef(AtlasTypesDef td) { AtlasEntity database(String name, String description, String owner, String locationUri, String... traitNames) { AtlasEntity database = new AtlasEntity(DATABASE_TYPE); database.setAttribute("name", name); - database.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name); + database.setAttribute(QUALIFIED_NAME, "qualified:" + name); database.setAttribute("description", description); database.setAttribute("userDescription", description); database.setAttribute("owner", owner); @@ -350,7 +352,7 @@ AtlasEntity database(String name, String description, String owner, String locat protected AtlasEntity storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List columns) { AtlasEntity storageDescriptor = new AtlasEntity(STORAGE_DESC_TYPE); storageDescriptor.setAttribute("location", location); - storageDescriptor.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + location); + storageDescriptor.setAttribute(QUALIFIED_NAME, "qualified:" + location); storageDescriptor.setAttribute("inputFormat", inputFormat); storageDescriptor.setAttribute("outputFormat", outputFormat); storageDescriptor.setAttribute("compressed", compressed); @@ -362,7 +364,7 @@ protected AtlasEntity storageDescriptor(String location, String inputFormat, Str protected AtlasEntity column(String name, String dataType, String comment, String... traitNames) { AtlasEntity column = new AtlasEntity(COLUMN_TYPE); column.setAttribute("name", name); - column.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name); + column.setAttribute(QUALIFIED_NAME, "qualified:" + name); column.setAttribute("type", dataType); column.setAttribute("comment", comment); column.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); @@ -372,12 +374,12 @@ protected AtlasEntity column(String name, String dataType, String comment, Strin protected AtlasEntity table(String name, String description, AtlasEntity db, AtlasEntity sd, String owner, String tableType, List columns, String... traitNames) { - String dbName = db.getAttribute(AtlasClient.NAME).toString(); + String dbName = db.getAttribute(NAME).toString(); String clusterName = db.getAttribute("clusterName").toString(); AtlasEntity table = new AtlasEntity(HIVE_TABLE_TYPE); table.setAttribute("name", name); - table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name); + table.setAttribute(QUALIFIED_NAME, dbName + "." + name); table.setAttribute("description", description); table.setAttribute("owner", owner); table.setAttribute("tableType", tableType); @@ -400,14 +402,14 @@ protected AtlasEntity table(String name, String description, AtlasEntity db, Atl table.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); } - sd.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "@" + clusterName + "_storage"); + sd.setAttribute(QUALIFIED_NAME, dbName + "." + name + "@" + clusterName + "_storage"); AtlasObjectId tableId = getAtlasObjectId(table); sd.setAttribute("table", tableId); for (AtlasEntity column : columns) { - column.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName + "." + name + "." + column.getAttribute(AtlasClient.NAME).toString() + "@" + clusterName); + column.setAttribute(QUALIFIED_NAME, dbName + "." + name + "." + column.getAttribute(NAME).toString() + "@" + clusterName); column.setAttribute("table", tableId); } @@ -432,7 +434,7 @@ protected AtlasEntity loadProcess(String name, String description, String user, String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames) { AtlasEntity process = new AtlasEntity(HIVE_PROCESS_TYPE); process.setAttribute("name", name); - process.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); + process.setAttribute(QUALIFIED_NAME, name); process.setAttribute("description", description); process.setAttribute("userName", user); process.setAttribute("startTime", System.currentTimeMillis()); @@ -455,7 +457,7 @@ protected AtlasEntity loadProcess(String name, String description, String user, AtlasEntity view(String name, AtlasEntity dbId, List inputTables, String... traitNames) { AtlasEntity view = new AtlasEntity(VIEW_TYPE); view.setAttribute("name", name); - view.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); + view.setAttribute(QUALIFIED_NAME, name); view.setAttribute("userName", "testUser"); view.setAttribute("startTime", System.currentTimeMillis()); view.setAttribute("endTime", System.currentTimeMillis() + 10000); @@ -474,8 +476,8 @@ AtlasEntity view(String name, AtlasEntity dbId, List inputTables, S AtlasEntity datasetSubType(final String name, String owner) { AtlasEntity datasetSubType = new AtlasEntity(DATASET_SUBTYPE); - datasetSubType.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - datasetSubType.setAttribute(AtlasClient.NAME, name); + datasetSubType.setAttribute(QUALIFIED_NAME, name); + datasetSubType.setAttribute(NAME, name); datasetSubType.setAttribute("owner", owner); return datasetSubType; @@ -484,7 +486,7 @@ AtlasEntity datasetSubType(final String name, String owner) { public EntityMutationResponse createDummyEntity(String name, String type, String... traitNames) throws AtlasBaseException { AtlasEntity entity = new AtlasEntity(type); entity.setAttribute("name", name); - entity.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); + entity.setAttribute(QUALIFIED_NAME, name); entity.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); EntityMutationResponse resp = entityStore.createOrUpdate(new AtlasEntityStream(new AtlasEntity.AtlasEntitiesWithExtInfo(entity)), false); return resp; diff --git a/repository/src/test/java/org/apache/atlas/discovery/AtlasDiscoveryServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/AtlasDiscoveryServiceTest.java index 8c257d8668..c941ca536c 100644 --- a/repository/src/test/java/org/apache/atlas/discovery/AtlasDiscoveryServiceTest.java +++ b/repository/src/test/java/org/apache/atlas/discovery/AtlasDiscoveryServiceTest.java @@ -18,7 +18,6 @@ package org.apache.atlas.discovery; import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; import org.apache.atlas.BasicTestSetup; import org.apache.atlas.SortOrder; import org.apache.atlas.TestModules; @@ -46,6 +45,7 @@ import java.util.Map; import static org.apache.atlas.model.discovery.SearchParameters.*; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.testng.Assert.*; import static org.testng.Assert.assertNotNull; @@ -725,7 +725,7 @@ public void createSpecialCharTestEntities() throws AtlasBaseException { for (String nameStr : nameList) { AtlasEntity entityToDelete = new AtlasEntity(HIVE_TABLE_TYPE); entityToDelete.setAttribute("name", nameStr); - entityToDelete.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualifiedName"+System.currentTimeMillis()); + entityToDelete.setAttribute(QUALIFIED_NAME, "qualifiedName"+System.currentTimeMillis()); //create entity EntityMutationResponse response = entityStore.createOrUpdate(new AtlasEntityStream(new AtlasEntity.AtlasEntitiesWithExtInfo(entityToDelete)), false); @@ -737,7 +737,7 @@ public void createSpecialCharTestEntities() throws AtlasBaseException { qfStr = qfStr.replace("name","qf"); AtlasEntity entityToDelete = new AtlasEntity(HIVE_TABLE_TYPE); entityToDelete.setAttribute("name", "name"+System.currentTimeMillis()); - entityToDelete.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qfStr); + entityToDelete.setAttribute(QUALIFIED_NAME, qfStr); //create entity EntityMutationResponse response = entityStore.createOrUpdate(new AtlasEntityStream(new AtlasEntity.AtlasEntitiesWithExtInfo(entityToDelete)), false); diff --git a/repository/src/test/java/org/apache/atlas/discovery/ClassificationSearchProcessorTest.java b/repository/src/test/java/org/apache/atlas/discovery/ClassificationSearchProcessorTest.java index 121dca9f53..f2eeae6cc8 100644 --- a/repository/src/test/java/org/apache/atlas/discovery/ClassificationSearchProcessorTest.java +++ b/repository/src/test/java/org/apache/atlas/discovery/ClassificationSearchProcessorTest.java @@ -17,7 +17,6 @@ */ package org.apache.atlas.discovery; -import org.apache.atlas.AtlasClient; import org.apache.atlas.BasicTestSetup; import org.apache.atlas.TestModules; import org.apache.atlas.exception.AtlasBaseException; @@ -47,6 +46,7 @@ import java.util.stream.Collectors; import static org.apache.atlas.model.discovery.SearchParameters.*; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.testng.Assert.assertEquals; import static org.testng.Assert.fail; @@ -323,7 +323,7 @@ public void searchByTagAndGraphSysFiltersMarker() throws AtlasBaseException { private void createDimensionTaggedEntityAndDelete() throws AtlasBaseException { AtlasEntity entityToDelete = new AtlasEntity(HIVE_TABLE_TYPE); entityToDelete.setAttribute("name", "entity to be deleted"); - entityToDelete.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "entity.tobedeleted"); + entityToDelete.setAttribute(QUALIFIED_NAME, "entity.tobedeleted"); List cls = new ArrayList<>(); cls.add(new AtlasClassification(DIMENSION_CLASSIFICATION)); @@ -341,7 +341,7 @@ private void createDimensionTaggedEntityAndDelete() throws AtlasBaseException { private void createDimensionalTaggedEntityWithAttr() throws AtlasBaseException { AtlasEntity entityToDelete = new AtlasEntity(HIVE_TABLE_TYPE); entityToDelete.setAttribute("name", "Entity1"); - entityToDelete.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "entity.one"); + entityToDelete.setAttribute(QUALIFIED_NAME, "entity.one"); List cls = new ArrayList<>(); cls.add(new AtlasClassification(DIMENSIONAL_CLASSIFICATION, new HashMap() {{ diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java b/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java index 679df3c5ca..64c4cfa3f9 100644 --- a/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java +++ b/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java @@ -18,7 +18,7 @@ package org.apache.atlas.repository.audit; -import org.apache.atlas.EntityAuditEvent; +import org.apache.atlas.model.EntityAuditEvent; import org.apache.atlas.TestUtilsV2; import org.apache.atlas.model.audit.EntityAuditEventV2; import org.apache.atlas.model.instance.AtlasEntity; diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/CassandraAuditRepositoryTest.java b/repository/src/test/java/org/apache/atlas/repository/audit/CassandraAuditRepositoryTest.java deleted file mode 100644 index 26d3a60426..0000000000 --- a/repository/src/test/java/org/apache/atlas/repository/audit/CassandraAuditRepositoryTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.repository.audit; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Session; -import org.apache.atlas.AtlasException; -import org.apache.cassandra.exceptions.ConfigurationException; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.MapConfiguration; -import org.apache.thrift.transport.TTransportException; -import org.cassandraunit.utils.EmbeddedCassandraServerHelper; -import org.testng.SkipException; -import org.testng.annotations.BeforeClass; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -public class CassandraAuditRepositoryTest extends AuditRepositoryTestBase { - private static final int MAX_RETRIES = 9; - private final String CLUSTER_HOST = "localhost"; - private final String CLUSTER_NAME_TEST = "Test Cluster"; - private final int CLUSTER_PORT = 9042; - - @BeforeClass - public void setup() { - try { - EmbeddedCassandraServerHelper.startEmbeddedCassandra("cassandra_test.yml"); - eventRepository = new CassandraBasedAuditRepository(); - Configuration atlasConf = new MapConfiguration(getClusterProperties()); - ((CassandraBasedAuditRepository) eventRepository).setApplicationProperties(atlasConf); - ((CassandraBasedAuditRepository) eventRepository).start(); - - ensureClusterCreation(); - } - catch (Exception ex) { - throw new SkipException("setup: failed!", ex); - } - } - - private Map getClusterProperties() { - Map props = new HashMap<>(); - props.put(CassandraBasedAuditRepository.MANAGE_EMBEDDED_CASSANDRA, Boolean.TRUE); - props.put(CassandraBasedAuditRepository.CASSANDRA_CLUSTERNAME_PROPERTY, CLUSTER_NAME_TEST); - props.put(CassandraBasedAuditRepository.CASSANDRA_HOSTNAME_PROPERTY, CLUSTER_HOST); - props.put(CassandraBasedAuditRepository.CASSANDRA_PORT_PROPERTY, CLUSTER_PORT); - return props; - } - - private void ensureClusterCreation() throws InterruptedException { - // Retry the connection until we either connect or timeout - Cluster.Builder cassandraClusterBuilder = Cluster.builder(); - Cluster cluster = - cassandraClusterBuilder.addContactPoint(CLUSTER_HOST).withClusterName(CLUSTER_NAME_TEST).withPort(CLUSTER_PORT) - .build(); - int retryCount = 0; - - while (retryCount < MAX_RETRIES) { - try { - Session cassSession = cluster.connect(); - if (cassSession.getState().getConnectedHosts().size() > 0) { - cassSession.close(); - return; - } - } catch (Exception e) { - Thread.sleep(1000); - } - retryCount++; - } - - throw new SkipException("Unable to connect to embedded Cassandra after " + MAX_RETRIES + " seconds."); - } -} diff --git a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportReactivateTableTest.java b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportReactivateTableTest.java index 23e0cdaa65..59a9849ee1 100644 --- a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportReactivateTableTest.java +++ b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportReactivateTableTest.java @@ -45,7 +45,7 @@ import java.io.InputStream; import java.util.List; -import static org.apache.atlas.AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.getInputStreamFrom; import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.getDefaultImportRequest; import static org.apache.atlas.utils.TestLoadModelUtils.loadFsModel; @@ -178,7 +178,7 @@ private EntityMutationResponse createColumn(AtlasEntity tblEntity) throws AtlasB String name = "new_column"; ret.setAttribute("name", name); - ret.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + REPL_FROM); + ret.setAttribute(QUALIFIED_NAME, name + REPL_FROM); ret.setAttribute("type", "int"); ret.setAttribute("comment", name); diff --git a/tools/atlas-index-repair/README b/tools/atlas-index-repair/README deleted file mode 100644 index 8c79c3c85e..0000000000 --- a/tools/atlas-index-repair/README +++ /dev/null @@ -1,28 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -Introduction - This feature takes care while in some cases it is possible that during entity creation, the entity is stored in the data store, but the corresponding indexes are not created in Solr. - Since Atlas relies heavily on Solr in the operation of its Basic Search, this will result in entity not being returned by a search. - -Steps to execute repair index in Atlas : - If the user needs to restore all the indexes, this can be accomplished by executing the repair-index.py with no command-line parameters. - To perform selective restore for an Atlas entity, specify the GUID of that entity: - eg : atlas-index-repair/repair_index.py [-g ] - For kerberos as authentication mode : - use: kinit -kt /atlas.keytab atlas/fqdn@DOMAIN. - Add "-Djava.security.auth.login.config=//conf/atlas_jaas.conf" to DEFAULT_JVM_OPTS in repair_index.py. diff --git a/tools/atlas-index-repair/pom.xml b/tools/atlas-index-repair/pom.xml deleted file mode 100644 index ab55f4c7f7..0000000000 --- a/tools/atlas-index-repair/pom.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - 4.0.0 - - apache-atlas - org.apache.atlas - 3.0.0-SNAPSHOT - ../../ - - atlas-index-repair-tool - Apache Atlas index repair Module - Apache Atlas index repair tool - jar - - - - - org.apache.atlas - atlas-client-v2 - ${project.version} - - - - org.apache.atlas - atlas-graphdb-janus - ${project.version} - - - - org.apache.atlas - atlas-repository - ${project.version} - - - - org.janusgraph - janusgraph-core - ${janusgraph.version} - - - - - diff --git a/tools/atlas-index-repair/src/main/java/org/apache/atlas/tools/RepairIndex.java b/tools/atlas-index-repair/src/main/java/org/apache/atlas/tools/RepairIndex.java deleted file mode 100644 index 1ee7248f1c..0000000000 --- a/tools/atlas-index-repair/src/main/java/org/apache/atlas/tools/RepairIndex.java +++ /dev/null @@ -1,291 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.tools; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasException; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.repository.graphdb.AtlasVertex; -import org.apache.atlas.repository.graphdb.janus.AtlasJanusGraphDatabase; -import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.configuration.Configuration; -import org.janusgraph.core.JanusGraph; -import org.janusgraph.core.schema.JanusGraphIndex; -import org.janusgraph.core.schema.SchemaAction; -import org.janusgraph.core.schema.SchemaStatus; -import org.janusgraph.diskstorage.BackendTransaction; -import org.janusgraph.diskstorage.indexing.IndexEntry; -import org.janusgraph.graphdb.database.IndexSerializer; -import org.janusgraph.graphdb.database.StandardJanusGraph; -import org.janusgraph.graphdb.database.management.ManagementSystem; -import org.janusgraph.graphdb.transaction.StandardJanusGraphTx; -import org.janusgraph.graphdb.types.MixedIndexType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Consumer; - -public class RepairIndex { - private static final Logger LOG = LoggerFactory.getLogger(RepairIndex.class); - - private static final int EXIT_CODE_SUCCESS = 0; - private static final int EXIT_CODE_FAILED = 1; - private static final int MAX_TRIES_ON_FAILURE = 3; - - private static final String INDEX_NAME_VERTEX_INDEX = "vertex_index"; - private static final String INDEX_NAME_FULLTEXT_INDEX = "fulltext_index"; - private static final String INDEX_NAME_EDGE_INDEX = "edge_index"; - private static final String DEFAULT_ATLAS_URL = "http://localhost:21000/"; - private static final String APPLICATION_PROPERTY_ATLAS_ENDPOINT = "atlas.rest.address"; - - private static JanusGraph graph; - private static AtlasClientV2 atlasClientV2; - private static boolean isSelectiveRestore; - - public static void main(String[] args) { - int exitCode = EXIT_CODE_FAILED; - LOG.info("Started index repair"); - - try { - CommandLine cmd = getCommandLine(args); - String guid = cmd.getOptionValue("g"); - - if(guid != null && !guid.isEmpty()){ - isSelectiveRestore = true; - String uid = cmd.getOptionValue("u"); - String pwd = cmd.getOptionValue("p"); - setupAtlasClient(uid, pwd); - } - - process(guid); - - LOG.info("Completed index repair!"); - exitCode = EXIT_CODE_SUCCESS; - } catch (Exception e) { - LOG.error("Failed!", e); - display("Failed: " + e.getMessage()); - } - - System.exit(exitCode); - } - - private static void process(String guid) throws Exception { - RepairIndex repairIndex = new RepairIndex(); - - setupGraph(); - - if (isSelectiveRestore) { - repairIndex.restoreSelective(guid); - }else{ - repairIndex.restoreAll(); - } - - displayCrlf("Repair Index: Done!"); - } - - private static CommandLine getCommandLine(String[] args) throws ParseException { - Options options = new Options(); - options.addOption("g", "guid", true, "guid for which update index should be executed."); - options.addOption("u", "user", true, "User name."); - options.addOption("p", "password", true, "Password name."); - - return new DefaultParser().parse(options, args); - } - - public static void setupGraph() { - display("Initializing graph: "); - graph = AtlasJanusGraphDatabase.getGraphInstance(); - displayCrlf("Graph Initialized!"); - } - - private static String[] getIndexes() { - return new String[]{ INDEX_NAME_VERTEX_INDEX, INDEX_NAME_EDGE_INDEX, INDEX_NAME_FULLTEXT_INDEX}; - } - - private static void setupAtlasClient(String uid, String pwd) throws AtlasException { - String[] atlasEndpoint = getAtlasRESTUrl(); - if (atlasEndpoint == null || atlasEndpoint.length == 0) { - atlasEndpoint = new String[]{DEFAULT_ATLAS_URL}; - } - atlasClientV2 = getAtlasClientV2(atlasEndpoint, new String[]{uid, pwd}); - } - - private void restoreAll() throws Exception { - for (String indexName : getIndexes()){ - displayCrlf("Restoring: " + indexName); - long startTime = System.currentTimeMillis(); - - ManagementSystem mgmt = (ManagementSystem) graph.openManagement(); - JanusGraphIndex index = mgmt.getGraphIndex(indexName); - mgmt.updateIndex(index, SchemaAction.REINDEX).get(); - mgmt.commit(); - - ManagementSystem.awaitGraphIndexStatus(graph, indexName).status(SchemaStatus.ENABLED).call(); - - display(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); - displayCrlf(": Done!"); - } - } - - - private void restoreSelective(String guid) throws Exception { - Set referencedGUIDs = new HashSet<>(getEntityAndReferenceGuids(guid)); - displayCrlf("processing referencedGuids => "+ referencedGUIDs); - - StandardJanusGraph janusGraph = (StandardJanusGraph) graph; - IndexSerializer indexSerializer = janusGraph.getIndexSerializer(); - - for (String indexName : getIndexes()){ - displayCrlf("Restoring: " + indexName); - long startTime = System.currentTimeMillis(); - reindexVertex(indexName, indexSerializer, referencedGUIDs); - - display(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); - displayCrlf(": Done!"); - } - } - - private static void reindexVertex(String indexName, IndexSerializer indexSerializer, Set entityGUIDs) throws Exception { - Map>> documentsPerStore = new java.util.HashMap<>(); - ManagementSystem mgmt = (ManagementSystem) graph.openManagement(); - StandardJanusGraphTx tx = mgmt.getWrappedTx(); - BackendTransaction mutator = tx.getTxHandle(); - JanusGraphIndex index = mgmt.getGraphIndex(indexName); - MixedIndexType indexType = (MixedIndexType) mgmt.getSchemaVertex(index).asIndexType(); - - for (String entityGuid : entityGUIDs){ - for (int attemptCount = 1; attemptCount <= MAX_TRIES_ON_FAILURE; attemptCount++) { - AtlasVertex vertex = AtlasGraphUtilsV2.findByGuid(entityGuid); - try { - indexSerializer.reindexElement(vertex.getWrappedElement(), indexType, documentsPerStore); - break; - }catch (Exception e){ - displayCrlf("Exception: " + e.getMessage()); - displayCrlf("Pausing before retry.."); - Thread.sleep(2000 * attemptCount); - } - } - } - mutator.getIndexTransaction(indexType.getBackingIndexName()).restore(documentsPerStore); - } - - private static Set getEntityAndReferenceGuids(String guid) throws Exception { - Set set = new HashSet<>(); - set.add(guid); - AtlasEntityWithExtInfo entity = atlasClientV2.getEntityByGuid(guid); - Map map = entity.getReferredEntities(); - if (map == null || map.isEmpty()) { - return set; - } - set.addAll(map.keySet()); - return set; - } - - private static void display(String... formatMessage) { - displayFn(System.out::print, formatMessage); - } - - private static void displayCrlf(String... formatMessage) { - displayFn(System.out::println, formatMessage); - } - - private static void displayFn(Consumer fn, String... formatMessage) { - if (formatMessage.length == 1) { - fn.accept(formatMessage[0]); - } else { - fn.accept(String.format(formatMessage[0], formatMessage[1])); - } - } - - private static String[] getAtlasRESTUrl() { - Configuration atlasConf = null; - try { - atlasConf = ApplicationProperties.get(); - return atlasConf.getStringArray(APPLICATION_PROPERTY_ATLAS_ENDPOINT); - } catch (AtlasException e) { - return new String[]{DEFAULT_ATLAS_URL}; - } - } - - private static AtlasClientV2 getAtlasClientV2(String[] atlasEndpoint, String[] uidPwdFromCommandLine) throws AtlasException { - AtlasClientV2 atlasClientV2; - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - String[] uidPwd = (uidPwdFromCommandLine[0] == null || uidPwdFromCommandLine[1] == null) - ? AuthenticationUtil.getBasicAuthenticationInput() - : uidPwdFromCommandLine; - - atlasClientV2 = new AtlasClientV2(atlasEndpoint, uidPwd); - } else { - atlasClientV2 = new AtlasClientV2(atlasEndpoint); - } - return atlasClientV2; - } - - private static Set getEntityAndReferenceGuids(String guid, Map referredEntities) throws Exception { - Set set = new HashSet<>(); - set.add(guid); - if (referredEntities == null || referredEntities.isEmpty()) { - return set; - } - set.addAll(referredEntities.keySet()); - return set; - } - - public void restoreSelective(String guid, Map referredEntities) throws Exception { - Set referencedGUIDs = new HashSet<>(getEntityAndReferenceGuids(guid, referredEntities)); - displayCrlf("processing referencedGuids => " + referencedGUIDs); - - StandardJanusGraph janusGraph = (StandardJanusGraph) graph; - IndexSerializer indexSerializer = janusGraph.getIndexSerializer(); - - for (String indexName : getIndexes()) { - LOG.info("Restoring: " + indexName); - long startTime = System.currentTimeMillis(); - reindexVertex(indexName, indexSerializer, referencedGUIDs); - - LOG.info(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); - } - } - - public void restoreByIds(Set guids) throws Exception { - - StandardJanusGraph janusGraph = (StandardJanusGraph) graph; - IndexSerializer indexSerializer = janusGraph.getIndexSerializer(); - - for (String indexName : getIndexes()) { - LOG.info("Restoring: " + indexName); - long startTime = System.currentTimeMillis(); - reindexVertex(indexName, indexSerializer, guids); - - LOG.info(": Time taken: " + (System.currentTimeMillis() - startTime) + " ms"); - LOG.info(": Done!"); - } - } -} diff --git a/tools/atlas-index-repair/src/main/resources/atlas-log4j.xml b/tools/atlas-index-repair/src/main/resources/atlas-log4j.xml deleted file mode 100644 index 9346a36929..0000000000 --- a/tools/atlas-index-repair/src/main/resources/atlas-log4j.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/atlas-index-repair/src/main/resources/repair_index.py b/tools/atlas-index-repair/src/main/resources/repair_index.py deleted file mode 100755 index 0333dadca7..0000000000 --- a/tools/atlas-index-repair/src/main/resources/repair_index.py +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env python - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import sys -sys.path.insert(0, '/usr/hdp/current/atlas-server/bin/') - -import traceback -import subprocess -import atlas_config as mc - -ATLAS_LOG_FILE="atlas-index-janus-repair.log" -ATLAS_LOG_OPTS="-Datlas.log.dir=%s -Datlas.log.file="+ATLAS_LOG_FILE -ATLAS_COMMAND_OPTS="-Datlas.home=%s" -ATLAS_CONFIG_OPTS="-Datlas.conf=%s" -DEFAULT_JVM_HEAP_OPTS="-Xmx4096m -XX:MaxPermSize=512m" -DEFAULT_JVM_OPTS="-Dlog4j.configuration=atlas-log4j.xml -Djava.net.preferIPv4Stack=true -server" - -def main(): - atlas_home = mc.atlasDir() - confdir = mc.dirMustExist(mc.confDir(atlas_home)) - mc.executeEnvSh(confdir) - logdir = mc.dirMustExist(mc.logDir(atlas_home)) - mc.dirMustExist(mc.dataDir(atlas_home)) - - if mc.isCygwin(): - # Pathnames that are passed to JVM must be converted to Windows format. - jvm_atlas_home = mc.convertCygwinPath(atlas_home) - jvm_confdir = mc.convertCygwinPath(confdir) - jvm_logdir = mc.convertCygwinPath(logdir) - else: - jvm_atlas_home = atlas_home - jvm_confdir = confdir - jvm_logdir = logdir - - print ("Logging: "+ os.path.join(jvm_logdir, ATLAS_LOG_FILE)) - - #create sys property for conf dirs - jvm_opts_list = (ATLAS_LOG_OPTS % (jvm_logdir)).split() - - cmd_opts = (ATLAS_COMMAND_OPTS % jvm_atlas_home) - jvm_opts_list.extend(cmd_opts.split()) - - config_opts = (ATLAS_CONFIG_OPTS % jvm_confdir) - jvm_opts_list.extend(config_opts.split()) - - atlas_server_heap_opts = os.environ.get(mc.ATLAS_SERVER_HEAP, DEFAULT_JVM_HEAP_OPTS) - jvm_opts_list.extend(atlas_server_heap_opts.split()) - - atlas_server_jvm_opts = os.environ.get(mc.ATLAS_SERVER_OPTS) - if atlas_server_jvm_opts: - jvm_opts_list.extend(atlas_server_jvm_opts.split()) - - atlas_jvm_opts = os.environ.get(mc.ATLAS_OPTS, DEFAULT_JVM_OPTS) - jvm_opts_list.extend(atlas_jvm_opts.split()) - - #expand web app dir - web_app_dir = mc.webAppDir(atlas_home) - mc.expandWebApp(atlas_home) - - p = os.pathsep - atlas_classpath = os.path.join(os.getcwd(), ".", "*") + p \ - + confdir + p \ - + os.path.join(web_app_dir, "atlas", "WEB-INF", "classes" ) + p \ - + os.path.join(web_app_dir, "atlas", "WEB-INF", "lib", "*" ) + p \ - + os.path.join(atlas_home, "libext", "*") - - is_hbase = mc.is_hbase(confdir) - - if is_hbase: - #add hbase-site.xml to classpath - hbase_conf_dir = mc.hbaseConfDir(atlas_home) - - if os.path.exists(hbase_conf_dir): - atlas_classpath = atlas_classpath + p \ - + hbase_conf_dir - else: - if mc.is_hbase(confdir): - raise Exception("Could not find hbase-site.xml in %s. Please set env var HBASE_CONF_DIR to the hbase client conf dir", hbase_conf_dir) - - if mc.isCygwin(): - atlas_classpath = mc.convertCygwinPath(atlas_classpath, True) - - atlas_pid_file = mc.pidFile(atlas_home) - - if os.path.isfile(atlas_pid_file): - #Check if process listed in atlas.pid file is still running - pf = file(atlas_pid_file, 'r') - pid = pf.read().strip() - pf.close() - - - - if is_hbase and mc.is_hbase_local(confdir): - print("configured for local hbase.") - mc.configure_hbase(atlas_home) - mc.run_hbase_action(mc.hbaseBinDir(atlas_home), "start", hbase_conf_dir, logdir) - print("hbase started.") - - web_app_path = os.path.join(web_app_dir, "atlas") - if (mc.isCygwin()): - web_app_path = mc.convertCygwinPath(web_app_path) - - start_migration_export(atlas_classpath, atlas_pid_file, jvm_logdir, jvm_opts_list, web_app_path) - -def start_migration_export(atlas_classpath, atlas_pid_file, jvm_logdir, jvm_opts_list, web_app_path): - args = [] - args.extend(sys.argv[1:]) - process = java("org.apache.atlas.tools.RepairIndex", args, atlas_classpath, jvm_opts_list) - -def java(classname, args, classpath, jvm_opts_list): - java_home = os.environ.get("JAVA_HOME", None) - if java_home: - prg = os.path.join(java_home, "bin", "java") - else: - prg = mc.which("java") - - if prg is None: - raise EnvironmentError('The java binary could not be found in your path or JAVA_HOME') - - commandline = [prg] - commandline.extend(jvm_opts_list) - commandline.append("-classpath") - commandline.append(classpath) - commandline.append(classname) - commandline.extend(args) - - p = subprocess.Popen(commandline) - p.communicate() - -if __name__ == '__main__': - try: - returncode = main() - except Exception as e: - print("Exception: %s " % str(e)) - print(traceback.format_exc()) - returncode = -1 - - sys.exit(returncode) diff --git a/tools/atlas-migration-exporter/README b/tools/atlas-migration-exporter/README deleted file mode 100755 index e2adac9ac0..0000000000 --- a/tools/atlas-migration-exporter/README +++ /dev/null @@ -1,54 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -Introduction - This utility exports data in Apache Atlas HDP-2.6.x to a file system - directory, so that the exported data can be imported into Apache Atlas - in HDP-3.0. - -What is exported? - All data in Titan graph database, both type-system and entity-instances - data, will be exported. - -How much time will it take to export data? - The duration of the export process depends on the number of entities - present in graph database. While cluster configuration determines speed - of operation, for cluster with reasonable configuration, it takes about - 30 minutes to export 1 million entities. - -Steps to export data from Apache Atlas in HDP-2.6.x - - Shutdown Apache Atlas. This is critical to ensure that no updates are - being made to Apache Atlas database while export is in progress. - - - Execute the following commands in the host where Apache Atlas server runs: - cd /tools/atlas-migration-exporter - python atlas_migration_export.py -d - - - On successful completion, the migration exporter will display messages like: - atlas-migration-export: starting migration export. Log file location /var/log/atlas/atlas-migration-exporter.log - atlas-migration-export: initializing - atlas-migration-export: initialized - atlas-migration-export: exporting typesDef to file /atlas-migration-typesdef.json - atlas-migration-export: exported typesDef to file /atlas-migration-typesdef.json - atlas-migration-export: exporting data to file /atlas-migration-data.json - atlas-migration-export: exported data to file /atlas-migration-data.json - atlas-migration-export: completed migration export! - -Next Steps - Once export completes successfully, please refer to Apache Atlas Migration - Guide for details on importing the data in Apache Atlas in HDP-3.0. diff --git a/tools/atlas-migration-exporter/atlas-log4j.xml b/tools/atlas-migration-exporter/atlas-log4j.xml deleted file mode 100755 index ae0d27ded8..0000000000 --- a/tools/atlas-migration-exporter/atlas-log4j.xml +++ /dev/null @@ -1,73 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/atlas-migration-exporter/atlas_migration_export.py b/tools/atlas-migration-exporter/atlas_migration_export.py deleted file mode 100755 index e1b12f6092..0000000000 --- a/tools/atlas-migration-exporter/atlas_migration_export.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import sys -sys.path.insert(0, '/usr/hdp/current/atlas-server/bin/') - -import traceback -import subprocess -import atlas_config as mc - -ATLAS_LOG_OPTS="-Datlas.log.dir=%s -Datlas.log.file=atlas-migration-exporter.log" -ATLAS_COMMAND_OPTS="-Datlas.home=%s" -ATLAS_CONFIG_OPTS="-Datlas.conf=%s" -DEFAULT_JVM_HEAP_OPTS="-Xmx4096m -XX:MaxPermSize=512m" -DEFAULT_JVM_OPTS="-Dlog4j.configuration=atlas-log4j.xml -Djava.net.preferIPv4Stack=true -server" - -def main(): - is_setup = (len(sys.argv)>1) and sys.argv[1] is not None and sys.argv[1] == '-setup' - atlas_home = mc.atlasDir() - confdir = mc.dirMustExist(mc.confDir(atlas_home)) - mc.executeEnvSh(confdir) - logdir = mc.dirMustExist(mc.logDir(atlas_home)) - mc.dirMustExist(mc.dataDir(atlas_home)) - - if mc.isCygwin(): - # Pathnames that are passed to JVM must be converted to Windows format. - jvm_atlas_home = mc.convertCygwinPath(atlas_home) - jvm_confdir = mc.convertCygwinPath(confdir) - jvm_logdir = mc.convertCygwinPath(logdir) - else: - jvm_atlas_home = atlas_home - jvm_confdir = confdir - jvm_logdir = logdir - - #create sys property for conf dirs - jvm_opts_list = (ATLAS_LOG_OPTS % (jvm_logdir)).split() - - cmd_opts = (ATLAS_COMMAND_OPTS % jvm_atlas_home) - jvm_opts_list.extend(cmd_opts.split()) - - config_opts = (ATLAS_CONFIG_OPTS % jvm_confdir) - jvm_opts_list.extend(config_opts.split()) - - atlas_server_heap_opts = os.environ.get(mc.ATLAS_SERVER_HEAP, DEFAULT_JVM_HEAP_OPTS) - jvm_opts_list.extend(atlas_server_heap_opts.split()) - - atlas_server_jvm_opts = os.environ.get(mc.ATLAS_SERVER_OPTS) - if atlas_server_jvm_opts: - jvm_opts_list.extend(atlas_server_jvm_opts.split()) - - atlas_jvm_opts = os.environ.get(mc.ATLAS_OPTS, DEFAULT_JVM_OPTS) - jvm_opts_list.extend(atlas_jvm_opts.split()) - - #expand web app dir - web_app_dir = mc.webAppDir(atlas_home) - mc.expandWebApp(atlas_home) - - p = os.pathsep - atlas_classpath = os.path.join(os.path.dirname(os.path.realpath(__file__)), ".", "*") + p \ - + confdir + p \ - + os.path.join(web_app_dir, "atlas", "WEB-INF", "classes" ) + p \ - + os.path.join(web_app_dir, "atlas", "WEB-INF", "lib", "*" ) + p \ - + os.path.join(atlas_home, "libext", "*") - - is_hbase = mc.is_hbase(confdir) - - if is_hbase: - #add hbase-site.xml to classpath - hbase_conf_dir = mc.hbaseConfDir(atlas_home) - - if os.path.exists(hbase_conf_dir): - atlas_classpath = atlas_classpath + p \ - + hbase_conf_dir - else: - if mc.is_hbase(confdir): - raise Exception("Could not find hbase-site.xml in %s. Please set env var HBASE_CONF_DIR to the hbase client conf dir", hbase_conf_dir) - - if mc.isCygwin(): - atlas_classpath = mc.convertCygwinPath(atlas_classpath, True) - - atlas_pid_file = mc.pidFile(atlas_home) - - if os.path.isfile(atlas_pid_file): - #Check if process listed in atlas.pid file is still running - pf = file(atlas_pid_file, 'r') - pid = pf.read().strip() - pf.close() - - if mc.exist_pid((int)(pid)): - mc.server_already_running(pid) - else: - mc.server_pid_not_running(pid) - - if is_hbase and mc.is_hbase_local(confdir): - print("configured for local hbase.") - mc.configure_hbase(atlas_home) - mc.run_hbase_action(mc.hbaseBinDir(atlas_home), "start", hbase_conf_dir, logdir) - print("hbase started.") - - web_app_path = os.path.join(web_app_dir, "atlas") - if (mc.isCygwin()): - web_app_path = mc.convertCygwinPath(web_app_path) - - start_migration_export(atlas_classpath, atlas_pid_file, jvm_logdir, jvm_opts_list, web_app_path) - -def start_migration_export(atlas_classpath, atlas_pid_file, jvm_logdir, jvm_opts_list, web_app_path): - args = [] - args.extend(sys.argv[1:]) - process = java("org.apache.atlas.migration.Exporter", args, atlas_classpath, jvm_opts_list) - -def java(classname, args, classpath, jvm_opts_list): - java_home = os.environ.get("JAVA_HOME", None) - if java_home: - prg = os.path.join(java_home, "bin", "java") - else: - prg = mc.which("java") - - if prg is None: - raise EnvironmentError('The java binary could not be found in your path or JAVA_HOME') - - commandline = [prg] - commandline.extend(jvm_opts_list) - commandline.append("-classpath") - commandline.append(classpath) - commandline.append(classname) - commandline.extend(args) - - p = subprocess.Popen(commandline) - p.communicate() - -if __name__ == '__main__': - try: - returncode = main() - except Exception as e: - print("Exception: %s " % str(e)) - print(traceback.format_exc()) - returncode = -1 - - sys.exit(returncode) diff --git a/tools/classification-updater/pom.xml b/tools/classification-updater/pom.xml index 596cc19d53..31ee4ed07e 100644 --- a/tools/classification-updater/pom.xml +++ b/tools/classification-updater/pom.xml @@ -99,9 +99,10 @@ org.apache.maven.plugins maven-compiler-plugin + ${maven-compiler-plugin} - 1.8 - 1.8 + 17 + 17 diff --git a/webapp/pom.xml b/webapp/pom.xml index 05a97f200a..c4b58fd0a3 100755 --- a/webapp/pom.xml +++ b/webapp/pom.xml @@ -111,10 +111,10 @@ - + org.apache.atlas @@ -156,18 +156,6 @@ ${project.version} - - org.apache.atlas - atlas-janusgraph-hbase2 - ${project.version} - - - org.noggit - noggit - - - - org.apache.hadoop hadoop-common @@ -578,19 +566,6 @@ jackson-core ${jackson.version} - - org.apache.atlas - atlas-index-repair-tool - 3.0.0-SNAPSHOT - compile - - - - com.aayushatharva.brotli4j - brotli4j - 1.17.0 - - @@ -899,8 +874,8 @@ ${project.parent.basedir}/build-tools/src/main/resources/enunciate.xml ${project.build.directory}/api/v2/ - 1.8 - 1.8 + 17 + 17 ${skipEnunciate} diff --git a/webapp/src/main/java/org/apache/atlas/Atlas.java b/webapp/src/main/java/org/apache/atlas/Atlas.java index 548ef6e4e7..0c67a48f37 100755 --- a/webapp/src/main/java/org/apache/atlas/Atlas.java +++ b/webapp/src/main/java/org/apache/atlas/Atlas.java @@ -30,7 +30,6 @@ import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.util.ShutdownHookManager; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; @@ -70,7 +69,7 @@ public final class Atlas { private static EmbeddedServer server; static { - ShutdownHookManager.get().addShutdownHook(new Thread() { + Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { @@ -83,7 +82,7 @@ public void run() { LOG.info("<== Shutdown of Atlas"); } } - }, AtlasConstants.ATLAS_SHUTDOWN_HOOK_PRIORITY); + }); } private static void shutdown() { diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java deleted file mode 100755 index fcf5bd47c3..0000000000 --- a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java +++ /dev/null @@ -1,507 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.examples; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasErrorCode; -import org.apache.atlas.AtlasException; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.typedef.*; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.configuration.Configuration; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -/** - * A driver that sets up sample types and data for testing purposes. - * Please take a look at QueryDSL in docs for the Meta Model. - * todo - move this to examples module. - */ -public class QuickStart { - public static final String ATLAS_REST_ADDRESS = "atlas.rest.address"; - public static final String SALES_DB = "Sales"; - public static final String SALES_DB_DESCRIPTION = "Sales Database"; - public static final String SALES_FACT_TABLE = "sales_fact"; - public static final String FACT_TRAIT = "Fact_v1"; - public static final String COLUMNS_ATTRIBUTE = "columns"; - public static final String TIME_ID_COLUMN = "time_id"; - public static final String DB_ATTRIBUTE = "db"; - public static final String SALES_FACT_TABLE_DESCRIPTION = "sales fact table"; - public static final String LOAD_SALES_DAILY_PROCESS = "loadSalesDaily"; - public static final String LOAD_SALES_DAILY_PROCESS_DESCRIPTION = "hive query for daily summary"; - public static final String INPUTS_ATTRIBUTE = "inputs"; - public static final String OUTPUTS_ATTRIBUTE = "outputs"; - public static final String TIME_DIM_TABLE = "time_dim"; - public static final String SALES_FACT_DAILY_MV_TABLE = "sales_fact_daily_mv"; - public static final String PRODUCT_DIM_VIEW = "product_dim_view"; - public static final String PRODUCT_DIM_TABLE = "product_dim"; - public static final String INPUT_TABLES_ATTRIBUTE = "inputTables"; - - public static void main(String[] args) throws Exception { - String[] basicAuthUsernamePassword = null; - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput(); - } - - runQuickstart(args, basicAuthUsernamePassword); - } - - @VisibleForTesting - static void runQuickstart(String[] args, String[] basicAuthUsernamePassword) throws Exception { - String[] urls = getServerUrl(args); - QuickStart quickStart = null; - - try { - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - quickStart = new QuickStart(urls, basicAuthUsernamePassword); - } else { - quickStart = new QuickStart(urls); - } - - // Shows how to create types in Atlas for your meta model - quickStart.createTypes(); - - // Shows how to create entities (instances) for the added types in Atlas - quickStart.createEntities(); - - // Shows some search queries using DSL based on types - quickStart.search(); - } finally { - if(quickStart!=null) { - quickStart.closeConnection(); - } - } - } - - static String[] getServerUrl(String[] args) throws AtlasException { - if (args.length > 0) { - return args[0].split(","); - } - - Configuration configuration = ApplicationProperties.get(); - String[] urls = configuration.getStringArray(ATLAS_REST_ADDRESS); - if (urls == null || urls.length == 0) { - System.out.println("Usage: quick_start_v1.py ://: like http://localhost:21000>"); - System.exit(-1); - } - - return urls; - } - - static final String DATABASE_TYPE = "DB_v1"; - static final String COLUMN_TYPE = "Column_v1"; - static final String TABLE_TYPE = "Table_v1"; - static final String VIEW_TYPE = "View_v1"; - static final String LOAD_PROCESS_TYPE = "LoadProcess_v1"; - static final String STORAGE_DESC_TYPE = "StorageDesc_v1"; - - private static final String[] TYPES = - {DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE, "JdbcAccess_v1", - "ETL_v1", "Metric_v1", "PII_v1", "Fact_v1", "Dimension_v1", "Log Data_v1"}; - - private final AtlasClient metadataServiceClient; - - QuickStart(String[] urls,String[] basicAuthUsernamePassword) { - metadataServiceClient = new AtlasClient(urls,basicAuthUsernamePassword); - } - - QuickStart(String[] urls) throws AtlasException { - metadataServiceClient = new AtlasClient(urls); - } - - - void createTypes() throws Exception { - TypesDef typesDef = createTypeDefinitions(); - - String typesAsJSON = AtlasType.toV1Json(typesDef); - System.out.println("typesAsJSON = " + typesAsJSON); - metadataServiceClient.createType(typesAsJSON); - - // verify types created - verifyTypesCreated(); - } - - TypesDef createTypeDefinitions() throws Exception { - ClassTypeDefinition dbClsDef = TypesUtil - .createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE, null, - TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("locationUri", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG)); - - ClassTypeDefinition storageDescClsDef = TypesUtil - .createClassTypeDef(STORAGE_DESC_TYPE, STORAGE_DESC_TYPE, null, attrDef("location", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("inputFormat", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("outputFormat", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("compressed", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED, false, null)); - - ClassTypeDefinition columnClsDef = TypesUtil - .createClassTypeDef(COLUMN_TYPE, COLUMN_TYPE, null, attrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("dataType", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("comment", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - ClassTypeDefinition tblClsDef = TypesUtil - .createClassTypeDef(TABLE_TYPE, TABLE_TYPE, Collections.singleton("DataSet"), - new AttributeDefinition(DB_ATTRIBUTE, DATABASE_TYPE, Multiplicity.REQUIRED, false, null), - new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null), - attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("lastAccessTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), attrDef("retention", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("viewOriginalText", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("viewExpandedText", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("tableType", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("temporary", AtlasBaseTypeDef.ATLAS_TYPE_BOOLEAN), - new AttributeDefinition(COLUMNS_ATTRIBUTE, AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE), - Multiplicity.COLLECTION, true, null)); - - ClassTypeDefinition loadProcessClsDef = TypesUtil - .createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, Collections.singleton("Process"), - attrDef("userName", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("startTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("endTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("queryText", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryPlan", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryId", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryGraph", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED)); - - ClassTypeDefinition viewClsDef = TypesUtil - .createClassTypeDef(VIEW_TYPE, VIEW_TYPE, Collections.singleton("DataSet"), - new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null), - new AttributeDefinition("inputTables", AtlasBaseTypeDef.getArrayTypeName(TABLE_TYPE), - Multiplicity.COLLECTION, false, null)); - - TraitTypeDefinition dimTraitDef = TypesUtil.createTraitTypeDef("Dimension_v1", "Dimension Trait", null); - - TraitTypeDefinition factTraitDef = TypesUtil.createTraitTypeDef("Fact_v1", "Fact Trait", null); - - TraitTypeDefinition piiTraitDef = TypesUtil.createTraitTypeDef("PII_v1", "PII Trait", null); - - TraitTypeDefinition metricTraitDef = TypesUtil.createTraitTypeDef("Metric_v1", "Metric Trait", null); - - TraitTypeDefinition etlTraitDef = TypesUtil.createTraitTypeDef("ETL_v1", "ETL Trait", null); - - TraitTypeDefinition jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess_v1", "JdbcAccess Trait", null); - - TraitTypeDefinition logTraitDef = TypesUtil.createTraitTypeDef("Log Data_v1", "LogData Trait", null); - - return new TypesDef(Collections.emptyList(), Collections.emptyList(), - Arrays.asList(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef, logTraitDef), - Arrays.asList(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef)); - } - - AttributeDefinition attrDef(String name, String dT) { - return attrDef(name, dT, Multiplicity.OPTIONAL, false, null); - } - - AttributeDefinition attrDef(String name, String dT, Multiplicity m) { - return attrDef(name, dT, m, false, null); - } - - AttributeDefinition attrDef(String name, String dT, Multiplicity m, boolean isComposite, - String reverseAttributeName) { - Preconditions.checkNotNull(name); - Preconditions.checkNotNull(dT); - return new AttributeDefinition(name, dT, m, isComposite, reverseAttributeName); - } - - void createEntities() throws Exception { - Id salesDB = database(SALES_DB, SALES_DB_DESCRIPTION, "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - - - Referenceable sd = - rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", - true); - - List salesFactColumns = Arrays.asList(rawColumn(TIME_ID_COLUMN, "int", "time id"), rawColumn("product_id", "int", "product id"), - rawColumn("customer_id", "int", "customer id", "PII_v1"), - rawColumn("sales", "double", "product id", "Metric_v1")); - - List logFactColumns = Arrays.asList(rawColumn("time_id", "int", "time id"), rawColumn("app_id", "int", "app id"), - rawColumn("machine_id", "int", "machine id"), rawColumn("log", "string", "log data", "Log Data_v1")); - - Id salesFact = table(SALES_FACT_TABLE, SALES_FACT_TABLE_DESCRIPTION, salesDB, sd, "Joe", "Managed", - salesFactColumns, FACT_TRAIT); - - List productDimColumns = Arrays.asList(rawColumn("product_id", "int", "product id"), rawColumn("product_name", "string", "product name"), - rawColumn("brand_name", "int", "brand name")); - - Id productDim = - table(PRODUCT_DIM_TABLE, "product dimension table", salesDB, sd, "John Doe", "Managed", - productDimColumns, "Dimension_v1"); - - List timeDimColumns = Arrays.asList(rawColumn("time_id", "int", "time id"), rawColumn("dayOfYear", "int", "day Of Year"), - rawColumn("weekDay", "int", "week Day")); - - Id timeDim = table(TIME_DIM_TABLE, "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns, - "Dimension_v1"); - - - List customerDimColumns = Arrays.asList(rawColumn("customer_id", "int", "customer id", "PII_v1"), - rawColumn("name", "string", "customer name", "PII_v1"), - rawColumn("address", "string", "customer address", "PII_v1")); - - Id customerDim = - table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns, - "Dimension_v1"); - - - Id reportingDB = - database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting"); - - Id logDB = database("Logging", "logging database", "Tim ETL", "hdfs://host:8000/apps/warehouse/logging"); - - Id salesFactDaily = - table(SALES_FACT_DAILY_MV_TABLE, "sales fact daily materialized view", reportingDB, sd, "Joe BI", - "Managed", salesFactColumns, "Metric_v1"); - - Id loggingFactDaily = - table("log_fact_daily_mv", "log fact daily materialized view", logDB, sd, "Tim ETL", "Managed", - logFactColumns, "Log Data_v1"); - - loadProcess(LOAD_SALES_DAILY_PROCESS, LOAD_SALES_DAILY_PROCESS_DESCRIPTION, "John ETL", - Arrays.asList(salesFact, timeDim), - Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL_v1"); - - view(PRODUCT_DIM_VIEW, reportingDB, Collections.singletonList(productDim), "Dimension_v1", "JdbcAccess_v1"); - - view("customer_dim_view", reportingDB, Collections.singletonList(customerDim), "Dimension_v1", "JdbcAccess_v1"); - - Id salesFactMonthly = - table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI", - "Managed", salesFactColumns, "Metric_v1"); - - loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", Collections.singletonList(salesFactDaily), - Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1"); - - Id loggingFactMonthly = - table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL", - "Managed", logFactColumns, "Log Data_v1"); - - loadProcess("loadLogsMonthly", "hive query for monthly summary", "Tim ETL", Collections.singletonList(loggingFactDaily), - Collections.singletonList(loggingFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1"); - } - - private Id createInstance(Referenceable referenceable) throws Exception { - String typeName = referenceable.getTypeName(); - - String entityJSON = AtlasType.toV1Json(referenceable); - System.out.println("Submitting new entity= " + entityJSON); - List guids = metadataServiceClient.createEntity(entityJSON); - System.out.println("created instance for type " + typeName + ", guid: " + guids); - - // return the Id for created instance with guid - if (guids.size() > 0) { - return new Id(guids.get(guids.size() - 1), referenceable.getId().getVersion(), referenceable.getTypeName()); - } - - return null; - } - - Id database(String name, String description, String owner, String locationUri, String... traitNames) - throws AtlasBaseException { - try { - Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("locationUri", locationUri); - referenceable.set("createTime", System.currentTimeMillis()); - - return createInstance(referenceable); - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s database entity creation failed", name)); - } - } - - Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) { - Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE); - referenceable.set("location", location); - referenceable.set("inputFormat", inputFormat); - referenceable.set("outputFormat", outputFormat); - referenceable.set("compressed", compressed); - - return referenceable; - } - - Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws AtlasBaseException { - try { - Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("dataType", dataType); - referenceable.set("comment", comment); - - return referenceable; - } - catch(Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s, column entity creation failed", name)); - } - } - - Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType, - List columns, String... traitNames) throws AtlasBaseException { - try { - Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("tableType", tableType); - referenceable.set("createTime", System.currentTimeMillis()); - referenceable.set("lastAccessTime", System.currentTimeMillis()); - referenceable.set("retention", System.currentTimeMillis()); - referenceable.set("db", dbId); - referenceable.set("sd", sd); - referenceable.set("columns", columns); - - return createInstance(referenceable); - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s table entity creation failed", name)); - } - } - - Id loadProcess(String name, String description, String user, List inputTables, List outputTables, - String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames) - throws AtlasBaseException { - try { - Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames); - // super type attributes - referenceable.set(AtlasClient.NAME, name); - referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - referenceable.set("description", description); - referenceable.set(INPUTS_ATTRIBUTE, inputTables); - referenceable.set(OUTPUTS_ATTRIBUTE, outputTables); - - referenceable.set("user", user); - referenceable.set("startTime", System.currentTimeMillis()); - referenceable.set("endTime", System.currentTimeMillis() + 10000); - - referenceable.set("queryText", queryText); - referenceable.set("queryPlan", queryPlan); - referenceable.set("queryId", queryId); - referenceable.set("queryGraph", queryGraph); - - return createInstance(referenceable); - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s process entity creation failed", name)); - } - } - - Id view(String name, Id dbId, List inputTables, String... traitNames) throws AtlasBaseException { - try { - Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - referenceable.set("db", dbId); - - referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables); - - return createInstance(referenceable); - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s Id creation", name)); - } - } - - private void verifyTypesCreated() throws AtlasBaseException { - try { - List types = metadataServiceClient.listTypes(); - for (String type : TYPES) { - assert types.contains(type); - } - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "view creation failed."); - } - } - - private String[] getDSLQueries() { - return new String[]{"from DB_v1", "DB_v1", "DB_v1 where name=\"Reporting\"", "DB_v1 where DB_v1.name=\"Reporting\"", - "DB_v1 name = \"Reporting\"", "DB_v1 DB_v1.name = \"Reporting\"", - "DB_v1 where name=\"Reporting\" select name, owner", "DB_v1 where DB_v1.name=\"Reporting\" select name, owner", - "DB_v1 has name", "DB_v1 where DB_v1 has name", - // "DB_v1, Table_v1", TODO: Fix "DB, Table", Table, db; Table db works - "DB_v1 is JdbcAccess", - /* - "DB, hive_process has name", - "DB as db1, Table where db1.name = \"Reporting\"", - "DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()}, - */ - "from Table_v1", "Table_v1", "Table_v1 is Dimension_v1", "Column_v1 where Column_v1 isa PII_v1", "View_v1 is Dimension_v1", - /*"Column where Column isa PII select Column.name",*/ - "Column_v1 select Column_v1.name", "Column_v1 select name", "Column_v1 where Column_v1.name=\"customer_id\"", - "from Table_v1 select Table_v1.name", "DB_v1 where (name = \"Reporting\")", - "DB_v1 where (name = \"Reporting\") select name as _col_0, owner as _col_1", "DB_v1 where DB_v1 is JdbcAccess_v1", - "DB_v1 where DB_v1 has name", "DB_v1 Table_v1", "DB_v1 where DB_v1 has name", - "DB_v1 as db1 Table where (db1.name = \"Reporting\")", - "DB_v1 where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", - /* - todo: does not work - "DB where (name = \"Reporting\") and ((createTime + 1) > 0)", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name - as dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as - dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner - select db1.name as dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner - select db1.name as dbName, tab.name as tabName", - */ - // trait searches - "Dimension_v1", - /*"Fact", - todo: does not work*/ - "JdbcAccess_v1", "ETL_v1", "Metric_v1", "PII_v1", "`Log Data_v1`", - /* - // Lineage - todo - fix this, its not working - "Table hive_process outputTables", - "Table loop (hive_process outputTables)", - "Table as _loop0 loop (hive_process outputTables) withPath", - "Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as - destTable withPath", - */ - "Table_v1 where name=\"sales_fact\", columns", - "Table_v1 where name=\"sales_fact\", columns as column select column.name, column.dataType, column" - + ".comment", - "from DataSet", "from Process",}; - } - - private void search() throws AtlasBaseException { - try { - for (String dslQuery : getDSLQueries()) { - JsonNode results = metadataServiceClient.search(dslQuery, 10, 0); - if (results != null && results instanceof ArrayNode) { - System.out.println("query [" + dslQuery + "] returned [" + results.size() + "] rows"); - } else { - System.out.println("query [" + dslQuery + "] failed, results:" + results); - } - } - } catch (Exception e) { - throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "one or more dsl queries failed"); - } - } - - private void closeConnection() { - if (metadataServiceClient != null) { - metadataServiceClient.close(); - } - } -} diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java deleted file mode 100755 index 5309d19939..0000000000 --- a/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java +++ /dev/null @@ -1,743 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.examples; - -import com.google.common.annotations.VisibleForTesting; -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasException; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.SearchFilter; -import org.apache.atlas.model.discovery.AtlasSearchResult; -import org.apache.atlas.model.discovery.AtlasSearchResult.AtlasFullTextResult; -import org.apache.atlas.model.discovery.AtlasSearchResult.AttributeSearchResult; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.instance.EntityMutations.EntityOperation; -import org.apache.atlas.model.lineage.AtlasLineageInfo; -import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection; -import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageRelation; -import org.apache.atlas.model.typedef.AtlasBusinessMetadataDef; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasRelationshipDef; -import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.ArrayUtils; - -import javax.ws.rs.core.MultivaluedMap; -import java.util.*; - -import static java.util.Arrays.asList; -import static org.apache.atlas.AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME; -import static org.apache.atlas.model.typedef.AtlasBusinessMetadataDef.ATTR_OPTION_APPLICABLE_ENTITY_TYPES; -import static org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory.AGGREGATION; -import static org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory.COMPOSITION; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality.SET; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE; -import static org.apache.atlas.type.AtlasTypeUtil.createClassTypeDef; -import static org.apache.atlas.type.AtlasTypeUtil.createOptionalAttrDef; -import static org.apache.atlas.type.AtlasTypeUtil.createRelationshipEndDef; -import static org.apache.atlas.type.AtlasTypeUtil.createRelationshipTypeDef; -import static org.apache.atlas.type.AtlasTypeUtil.createRequiredAttrDef; -import static org.apache.atlas.type.AtlasTypeUtil.createTraitTypeDef; -import static org.apache.atlas.type.AtlasTypeUtil.createUniqueRequiredAttrDef; -import static org.apache.atlas.type.AtlasTypeUtil.toAtlasRelatedObjectId; -import static org.apache.atlas.type.AtlasTypeUtil.toAtlasRelatedObjectIds; - -/** - * A driver that sets up sample types and entities using v2 types and entity model for testing purposes. - */ -public class QuickStartV2 { - public static final String ATLAS_REST_ADDRESS = "atlas.rest.address"; - - public static final String SALES_DB = "Sales"; - public static final String REPORTING_DB = "Reporting"; - public static final String LOGGING_DB = "Logging"; - - public static final String SALES_FACT_TABLE = "sales_fact"; - public static final String PRODUCT_DIM_TABLE = "product_dim"; - public static final String CUSTOMER_DIM_TABLE = "customer_dim"; - public static final String TIME_DIM_TABLE = "time_dim"; - public static final String SALES_FACT_DAILY_MV_TABLE = "sales_fact_daily_mv"; - public static final String SALES_FACT_MONTHLY_MV_TABLE = "sales_fact_monthly_mv"; - public static final String LOG_FACT_DAILY_MV_TABLE = "log_fact_daily_mv"; - public static final String LOG_FACT_MONTHLY_MV_TABLE = "logging_fact_monthly_mv"; - - public static final String TIME_ID_COLUMN = "time_id"; - public static final String PRODUCT_ID_COLUMN = "product_id"; - public static final String CUSTOMER_ID_COLUMN = "customer_id"; - public static final String APP_ID_COLUMN = "app_id"; - public static final String MACHINE_ID_COLUMN = "machine_id"; - public static final String PRODUCT_NAME_COLUMN = "product_name"; - public static final String BRAND_NAME_COLUMN = "brand_name"; - public static final String NAME_COLUMN = "name"; - public static final String SALES_COLUMN = "sales"; - public static final String LOG_COLUMN = "log"; - public static final String ADDRESS_COLUMN = "address"; - public static final String DAY_OF_YEAR_COLUMN = "dayOfYear"; - public static final String WEEKDAY_COLUMN = "weekDay"; - - public static final String DIMENSION_CLASSIFICATION = "Dimension"; - public static final String FACT_CLASSIFICATION = "Fact"; - public static final String PII_CLASSIFICATION = "PII"; - public static final String METRIC_CLASSIFICATION = "Metric"; - public static final String ETL_CLASSIFICATION = "ETL"; - public static final String JDBC_CLASSIFICATION = "JdbcAccess"; - public static final String LOGDATA_CLASSIFICATION = "Log Data"; - - public static final String LOAD_SALES_DAILY_PROCESS = "loadSalesDaily"; - public static final String LOAD_SALES_DAILY_PROCESS_EXEC1 = "loadSalesDailyExec1"; - public static final String LOAD_SALES_DAILY_PROCESS_EXEC2 = "loadSalesDailyExec2"; - public static final String LOAD_SALES_MONTHLY_PROCESS = "loadSalesMonthly"; - public static final String LOAD_SALES_MONTHLY_PROCESS_EXEC1 = "loadSalesMonthlyExec1"; - public static final String LOAD_SALES_MONTHLY_PROCESS_EXEC2 = "loadSalesMonthlyExec2"; - public static final String LOAD_LOGS_MONTHLY_PROCESS = "loadLogsMonthly"; - public static final String LOAD_LOGS_MONTHLY_PROCESS_EXEC1 = "loadLogsMonthlyExec1"; - public static final String LOAD_LOGS_MONTHLY_PROCESS_EXEC2 = "loadLogsMonthlyExec2"; - - public static final String PRODUCT_DIM_VIEW = "product_dim_view"; - public static final String CUSTOMER_DIM_VIEW = "customer_dim_view"; - - public static final String DATABASE_TYPE = "DB"; - public static final String COLUMN_TYPE = "Column"; - public static final String TABLE_TYPE = "Table"; - public static final String VIEW_TYPE = "View"; - public static final String LOAD_PROCESS_TYPE = "LoadProcess"; - public static final String LOAD_PROCESS_EXECUTION_TYPE = "LoadProcessExecution"; - public static final String STORAGE_DESC_TYPE = "StorageDesc"; - - public static final String TABLE_DATABASE_TYPE = "Table_DB"; - public static final String VIEW_DATABASE_TYPE = "View_DB"; - public static final String VIEW_TABLES_TYPE = "View_Tables"; - public static final String TABLE_COLUMNS_TYPE = "Table_Columns"; - public static final String TABLE_STORAGE_DESC_TYPE = "Table_StorageDesc"; - public static final String PROCESS_PROCESS_EXECUTION_DESC_TYPE = "Process_ProcessExecution"; - - public static final String VERSION_1 = "1.0"; - public static final String MANAGED_TABLE = "Managed"; - public static final String EXTERNAL_TABLE = "External"; - public static final String CLUSTER_SUFFIX = "@cl1"; - - public static final String[] TYPES = { DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, LOAD_PROCESS_EXECUTION_TYPE, - VIEW_TYPE, JDBC_CLASSIFICATION, ETL_CLASSIFICATION, METRIC_CLASSIFICATION, - PII_CLASSIFICATION, FACT_CLASSIFICATION, DIMENSION_CLASSIFICATION, LOGDATA_CLASSIFICATION, - TABLE_DATABASE_TYPE, VIEW_DATABASE_TYPE, VIEW_TABLES_TYPE, TABLE_COLUMNS_TYPE, TABLE_STORAGE_DESC_TYPE }; - - public static void main(String[] args) throws Exception { - String[] basicAuthUsernamePassword = null; - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput(); - } - - runQuickstart(args, basicAuthUsernamePassword); - } - - @VisibleForTesting - static void runQuickstart(String[] args, String[] basicAuthUsernamePassword) throws Exception { - String[] urls = getServerUrl(args); - - QuickStartV2 quickStartV2 = null; - try { - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - quickStartV2 = new QuickStartV2(urls, basicAuthUsernamePassword); - } else { - quickStartV2 = new QuickStartV2(urls); - } - - // Shows how to create v2 types in Atlas for your meta model - quickStartV2.createTypes(); - - // Shows how to create v2 entities (instances) for the added types in Atlas - quickStartV2.createEntities(); - - // Shows some search queries using DSL based on types - quickStartV2.search(); - - // Shows some lineage information on entity - quickStartV2.lineage(); - } finally { - if (quickStartV2!= null) { - quickStartV2.closeConnection(); - } - } - - } - - static String[] getServerUrl(String[] args) throws AtlasException { - if (args.length > 0) { - return args[0].split(","); - } - - Configuration configuration = ApplicationProperties.get(); - String[] urls = configuration.getStringArray(ATLAS_REST_ADDRESS); - - if (ArrayUtils.isEmpty(urls)) { - System.out.println("org.apache.atlas.examples.QuickStartV2 ://: like http://localhost:21000>"); - System.exit(-1); - } - - return urls; - } - - private final AtlasClientV2 atlasClientV2; - - QuickStartV2(String[] urls, String[] basicAuthUsernamePassword) { - atlasClientV2 = new AtlasClientV2(urls,basicAuthUsernamePassword); - } - - QuickStartV2(String[] urls) throws AtlasException { - atlasClientV2 = new AtlasClientV2(urls); - } - - - void createTypes() throws Exception { - AtlasTypesDef atlasTypesDef = createTypeDefinitions(); - - System.out.println("\nCreating sample types: "); - - atlasClientV2.createAtlasTypeDefs(atlasTypesDef); - - verifyTypesCreated(); - } - - AtlasTypesDef createTypeDefinitions() { - // Entity-Definitions - AtlasEntityDef dbTypeDef = createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE, VERSION_1, Collections.singleton("DataSet"), - createOptionalAttrDef("locationUri", "string"), - createOptionalAttrDef("createTime", "long")); - - AtlasEntityDef tableTypeDef = createClassTypeDef(TABLE_TYPE, TABLE_TYPE, VERSION_1, Collections.singleton("DataSet"), - new HashMap() {{ put("schemaElementsAttribute", "columns"); }} , - createOptionalAttrDef("createTime", "long"), - createOptionalAttrDef("lastAccessTime", "long"), - createOptionalAttrDef("retention", "long"), - createOptionalAttrDef("viewOriginalText", "string"), - createOptionalAttrDef("viewExpandedText", "string"), - createOptionalAttrDef("tableType", "string"), - createOptionalAttrDef("temporary", "boolean")); - - AtlasEntityDef colTypeDef = createClassTypeDef(COLUMN_TYPE, COLUMN_TYPE, VERSION_1, Collections.singleton("DataSet"), - new HashMap() {{ put("schemaAttributes", "[\"name\", \"description\", \"owner\", \"type\", \"comment\", \"position\"]"); }}, - createOptionalAttrDef("dataType", "string"), - createOptionalAttrDef("comment", "string")); - - AtlasEntityDef sdTypeDef = createClassTypeDef(STORAGE_DESC_TYPE, STORAGE_DESC_TYPE, VERSION_1, Collections.singleton("DataSet"), - createOptionalAttrDef("location", "string"), - createOptionalAttrDef("inputFormat", "string"), - createOptionalAttrDef("outputFormat", "string"), - createRequiredAttrDef("compressed", "boolean")); - - AtlasEntityDef processTypeDef = createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, VERSION_1, Collections.singleton("Process"), - createOptionalAttrDef("userName", "string"), - createOptionalAttrDef("startTime", "long"), - createOptionalAttrDef("endTime", "long"), - createRequiredAttrDef("queryText", "string"), - createRequiredAttrDef("queryPlan", "string"), - createRequiredAttrDef("queryId", "string"), - createRequiredAttrDef("queryGraph", "string")); - - AtlasEntityDef processExecutionTypeDef = createClassTypeDef(LOAD_PROCESS_EXECUTION_TYPE, LOAD_PROCESS_EXECUTION_TYPE, VERSION_1, Collections.singleton("ProcessExecution"), - createOptionalAttrDef("userName", "string"), - createOptionalAttrDef("startTime", "long"), - createOptionalAttrDef("endTime", "long"), - createRequiredAttrDef("queryText", "string"), - createRequiredAttrDef("queryPlan", "string"), - createRequiredAttrDef("queryId", "string"), - createRequiredAttrDef("queryGraph", "string")); - - processExecutionTypeDef.setOption(AtlasEntityDef.OPTION_DISPLAY_TEXT_ATTRIBUTE, "queryText"); - - AtlasEntityDef viewTypeDef = createClassTypeDef(VIEW_TYPE, VIEW_TYPE, VERSION_1, Collections.singleton("DataSet")); - - // Relationship-Definitions - AtlasRelationshipDef tableDatabaseTypeDef = createRelationshipTypeDef(TABLE_DATABASE_TYPE, TABLE_DATABASE_TYPE, VERSION_1, AGGREGATION, PropagateTags.NONE, - createRelationshipEndDef(TABLE_TYPE, "db", SINGLE, false), - createRelationshipEndDef(DATABASE_TYPE, "tables", SET, true)); - - AtlasRelationshipDef viewDatabaseTypeDef = createRelationshipTypeDef(VIEW_DATABASE_TYPE, VIEW_DATABASE_TYPE, VERSION_1, AGGREGATION, PropagateTags.NONE, - createRelationshipEndDef(VIEW_TYPE, "db", SINGLE, false), - createRelationshipEndDef(DATABASE_TYPE, "views", SET, true)); - - AtlasRelationshipDef viewTablesTypeDef = createRelationshipTypeDef(VIEW_TABLES_TYPE, VIEW_TABLES_TYPE, VERSION_1, AGGREGATION, PropagateTags.NONE, - createRelationshipEndDef(VIEW_TYPE, "inputTables", SET, true), - createRelationshipEndDef(TABLE_TYPE, "view", SINGLE, false)); - - AtlasRelationshipDef tableColumnsTypeDef = createRelationshipTypeDef(TABLE_COLUMNS_TYPE, TABLE_COLUMNS_TYPE, VERSION_1, COMPOSITION, PropagateTags.NONE, - createRelationshipEndDef(TABLE_TYPE, "columns", SET, true), - createRelationshipEndDef(COLUMN_TYPE, "table", SINGLE, false)); - - AtlasRelationshipDef tableStorageDescTypeDef = createRelationshipTypeDef(TABLE_STORAGE_DESC_TYPE, TABLE_STORAGE_DESC_TYPE, VERSION_1, COMPOSITION, PropagateTags.NONE, - createRelationshipEndDef(TABLE_TYPE, "sd", SINGLE, true), - createRelationshipEndDef(STORAGE_DESC_TYPE, "table", SINGLE, false)); - AtlasRelationshipDef processProcessExecutionTypeDef = createRelationshipTypeDef(PROCESS_PROCESS_EXECUTION_DESC_TYPE, PROCESS_PROCESS_EXECUTION_DESC_TYPE, VERSION_1, AGGREGATION, PropagateTags.NONE, - createRelationshipEndDef(LOAD_PROCESS_TYPE, "processExecutions", SET, true), - createRelationshipEndDef(LOAD_PROCESS_EXECUTION_TYPE, "process", SINGLE, false)); - - - // Classification-Definitions - AtlasClassificationDef dimClassifDef = createTraitTypeDef(DIMENSION_CLASSIFICATION, "Dimension Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef factClassifDef = createTraitTypeDef(FACT_CLASSIFICATION, "Fact Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef piiClassifDef = createTraitTypeDef(PII_CLASSIFICATION, "PII Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef metricClassifDef = createTraitTypeDef(METRIC_CLASSIFICATION, "Metric Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef etlClassifDef = createTraitTypeDef(ETL_CLASSIFICATION, "ETL Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef jdbcClassifDef = createTraitTypeDef(JDBC_CLASSIFICATION, "JdbcAccess Classification", VERSION_1, Collections.emptySet()); - AtlasClassificationDef logClassifDef = createTraitTypeDef(LOGDATA_CLASSIFICATION, "LogData Classification", VERSION_1, Collections.emptySet()); - - List entityDefs = asList(dbTypeDef, sdTypeDef, colTypeDef, tableTypeDef, processTypeDef, processExecutionTypeDef, viewTypeDef); - List relationshipDefs = asList(tableDatabaseTypeDef, viewDatabaseTypeDef, viewTablesTypeDef, tableColumnsTypeDef, tableStorageDescTypeDef, processProcessExecutionTypeDef); - List classificationDefs = asList(dimClassifDef, factClassifDef, piiClassifDef, metricClassifDef, etlClassifDef, jdbcClassifDef, logClassifDef); - - // BusinessMetadata definitions - AtlasAttributeDef bmAttrDef1 = new AtlasAttributeDef("attr1", "int"); - AtlasAttributeDef bmAttrDef2 = new AtlasAttributeDef("attr2", "int"); - - bmAttrDef1.setOption(ATTR_OPTION_APPLICABLE_ENTITY_TYPES, AtlasType.toJson(Collections.singleton(TABLE_TYPE))); - bmAttrDef1.setIsOptional(true); - bmAttrDef1.setIsUnique(false); - - bmAttrDef2.setOption(ATTR_OPTION_APPLICABLE_ENTITY_TYPES, AtlasType.toJson(Collections.singleton(TABLE_TYPE))); - bmAttrDef2.setIsOptional(true); - bmAttrDef2.setIsUnique(false); - - AtlasBusinessMetadataDef testBusinessMetadataDef = new AtlasBusinessMetadataDef("test_businessMetadata", "test_description", VERSION_1); - - testBusinessMetadataDef.setAttributeDefs(Arrays.asList(bmAttrDef1, bmAttrDef2)); - - List businessMetadataDefs = asList(testBusinessMetadataDef); - - return new AtlasTypesDef(Collections.emptyList(), Collections.emptyList(), classificationDefs, entityDefs, relationshipDefs, businessMetadataDefs); - } - - void createEntities() throws Exception { - System.out.println("\nCreating sample entities: "); - - // Database entities - AtlasEntity salesDB = createDatabase(SALES_DB, "sales database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - AtlasEntity reportingDB = createDatabase(REPORTING_DB, "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting"); - AtlasEntity logDB = createDatabase(LOGGING_DB, "logging database", "Tim ETL", "hdfs://host:8000/apps/warehouse/logging"); - - // Table entities - AtlasEntity salesFact = createTable(SALES_FACT_TABLE, "sales fact table", salesDB, "Joe", MANAGED_TABLE, - Arrays.asList(createColumn(SALES_DB, SALES_FACT_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(SALES_DB, SALES_FACT_TABLE, PRODUCT_ID_COLUMN, "int", "product id"), - createColumn(SALES_DB, SALES_FACT_TABLE, CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION), - createColumn(SALES_DB, SALES_FACT_TABLE, SALES_COLUMN, "double", "product id", METRIC_CLASSIFICATION)), - FACT_CLASSIFICATION); - - AtlasEntity productDim = createTable(PRODUCT_DIM_TABLE, "product dimension table", salesDB, "John Doe", MANAGED_TABLE, - Arrays.asList(createColumn(SALES_DB, PRODUCT_DIM_TABLE, PRODUCT_ID_COLUMN, "int", "product id"), - createColumn(SALES_DB, PRODUCT_DIM_TABLE, PRODUCT_NAME_COLUMN, "string", "product name"), - createColumn(SALES_DB, PRODUCT_DIM_TABLE, BRAND_NAME_COLUMN, "int", "brand name")), - DIMENSION_CLASSIFICATION); - - AtlasEntity customerDim = createTable(CUSTOMER_DIM_TABLE, "customer dimension table", salesDB, "fetl", EXTERNAL_TABLE, - Arrays.asList(createColumn(SALES_DB, CUSTOMER_DIM_TABLE, CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION), - createColumn(SALES_DB, CUSTOMER_DIM_TABLE, NAME_COLUMN, "string", "customer name", PII_CLASSIFICATION), - createColumn(SALES_DB, CUSTOMER_DIM_TABLE, ADDRESS_COLUMN, "string", "customer address", PII_CLASSIFICATION)), - DIMENSION_CLASSIFICATION); - - AtlasEntity timeDim = createTable(TIME_DIM_TABLE, "time dimension table", salesDB, "John Doe", EXTERNAL_TABLE, - Arrays.asList(createColumn(SALES_DB, TIME_DIM_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(SALES_DB, TIME_DIM_TABLE, DAY_OF_YEAR_COLUMN, "int", "day Of Year"), - createColumn(SALES_DB, TIME_DIM_TABLE, WEEKDAY_COLUMN, "int", "week Day")), - DIMENSION_CLASSIFICATION); - - AtlasEntity loggingFactDaily = createTable(LOG_FACT_DAILY_MV_TABLE, "log fact daily materialized view", logDB, "Tim ETL", MANAGED_TABLE, - Arrays.asList(createColumn(LOGGING_DB, LOG_FACT_DAILY_MV_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(LOGGING_DB, LOG_FACT_DAILY_MV_TABLE, APP_ID_COLUMN, "int", "app id"), - createColumn(LOGGING_DB, LOG_FACT_DAILY_MV_TABLE, MACHINE_ID_COLUMN, "int", "machine id"), - createColumn(LOGGING_DB, LOG_FACT_DAILY_MV_TABLE, LOG_COLUMN, "string", "log data", LOGDATA_CLASSIFICATION)), - LOGDATA_CLASSIFICATION); - - AtlasEntity loggingFactMonthly = createTable(LOG_FACT_MONTHLY_MV_TABLE, "logging fact monthly materialized view", logDB, "Tim ETL", MANAGED_TABLE, - Arrays.asList(createColumn(LOGGING_DB, LOG_FACT_MONTHLY_MV_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(LOGGING_DB, LOG_FACT_MONTHLY_MV_TABLE, APP_ID_COLUMN, "int", "app id"), - createColumn(LOGGING_DB, LOG_FACT_MONTHLY_MV_TABLE, MACHINE_ID_COLUMN, "int", "machine id"), - createColumn(LOGGING_DB, LOG_FACT_MONTHLY_MV_TABLE, LOG_COLUMN, "string", "log data", LOGDATA_CLASSIFICATION)), - LOGDATA_CLASSIFICATION); - - AtlasEntity salesFactDaily = createTable(SALES_FACT_DAILY_MV_TABLE, "sales fact daily materialized view", reportingDB, "Joe BI", MANAGED_TABLE, - Arrays.asList(createColumn(REPORTING_DB, SALES_FACT_DAILY_MV_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(REPORTING_DB, SALES_FACT_DAILY_MV_TABLE, PRODUCT_ID_COLUMN, "int", "product id"), - createColumn(REPORTING_DB, SALES_FACT_DAILY_MV_TABLE, CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION), - createColumn(REPORTING_DB, SALES_FACT_DAILY_MV_TABLE, SALES_COLUMN, "double", "product id", METRIC_CLASSIFICATION)), - METRIC_CLASSIFICATION); - - AtlasEntity salesFactMonthly = createTable(SALES_FACT_MONTHLY_MV_TABLE, "sales fact monthly materialized view", reportingDB, "Jane BI", MANAGED_TABLE, - Arrays.asList(createColumn(REPORTING_DB, SALES_FACT_MONTHLY_MV_TABLE, TIME_ID_COLUMN, "int", "time id"), - createColumn(REPORTING_DB, SALES_FACT_MONTHLY_MV_TABLE, PRODUCT_ID_COLUMN, "int", "product id"), - createColumn(REPORTING_DB, SALES_FACT_MONTHLY_MV_TABLE, CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION), - createColumn(REPORTING_DB, SALES_FACT_MONTHLY_MV_TABLE, SALES_COLUMN, "double", "product id", METRIC_CLASSIFICATION)), - METRIC_CLASSIFICATION); - - // View entities - createView(PRODUCT_DIM_VIEW, reportingDB, asList(productDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION); - createView(CUSTOMER_DIM_VIEW, reportingDB, asList(customerDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION); - - // Process entities - AtlasEntity loadProcess = createProcess(LOAD_SALES_DAILY_PROCESS, "hive query for daily summary", "John ETL", - asList(salesFact, timeDim), - asList(salesFactDaily), - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - - createProcessExecution(loadProcess, LOAD_SALES_DAILY_PROCESS_EXEC1, "hive query execution 1 for daily summary", "John ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - - createProcessExecution(loadProcess, LOAD_SALES_DAILY_PROCESS_EXEC2, "hive query execution 2 for daily summary", "John ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - - AtlasEntity loadProcess2 = createProcess(LOAD_SALES_MONTHLY_PROCESS, "hive query for monthly summary", "John ETL", - asList(salesFactDaily), - asList(salesFactMonthly), - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - createProcessExecution(loadProcess2, LOAD_SALES_MONTHLY_PROCESS_EXEC1, "hive query execution 1 for monthly summary", "John ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - - createProcessExecution(loadProcess2, LOAD_SALES_MONTHLY_PROCESS_EXEC2, "hive query execution 2 for monthly summary", "John ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - - - AtlasEntity loadProcess3 = createProcess(LOAD_LOGS_MONTHLY_PROCESS, "hive query for monthly summary", "Tim ETL", - asList(loggingFactDaily), - asList(loggingFactMonthly), - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - createProcessExecution(loadProcess3, LOAD_LOGS_MONTHLY_PROCESS_EXEC1, "hive query execution 1 for monthly summary", "Tim ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - createProcessExecution(loadProcess3, LOAD_LOGS_MONTHLY_PROCESS_EXEC2, "hive query execution 1 for monthly summary", "Tim ETL", - "create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION); - } - - private AtlasEntity createInstance(AtlasEntity entity) throws Exception { - return createInstance(new AtlasEntityWithExtInfo(entity)); - } - - private AtlasEntity createInstance(AtlasEntityWithExtInfo entityWithExtInfo) throws Exception { - AtlasEntity ret = null; - EntityMutationResponse response = atlasClientV2.createEntity(entityWithExtInfo); - List entities = response.getEntitiesByOperation(EntityOperation.CREATE); - - if (CollectionUtils.isNotEmpty(entities)) { - AtlasEntityWithExtInfo getByGuidResponse = atlasClientV2.getEntityByGuid(entities.get(0).getGuid()); - - ret = getByGuidResponse.getEntity(); - - System.out.println("Created entity of type [" + ret.getTypeName() + "], guid: " + ret.getGuid()); - } - - return ret; - } - - AtlasEntity createDatabase(String name, String description, String owner, String locationUri, String... classificationNames) throws Exception { - AtlasEntity entity = new AtlasEntity(DATABASE_TYPE); - - // set attributes - entity.setAttribute("name", name); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + CLUSTER_SUFFIX); - entity.setAttribute("description", description); - entity.setAttribute("owner", owner); - entity.setAttribute("locationUri", locationUri); - entity.setAttribute("createTime", System.currentTimeMillis()); - - // set classifications - entity.setClassifications(toAtlasClassifications(classificationNames)); - - return createInstance(entity); - } - - private List toAtlasClassifications(String[] classificationNames) { - List ret = new ArrayList<>(); - List classifications = asList(classificationNames); - - if (CollectionUtils.isNotEmpty(classifications)) { - for (String classificationName : classifications) { - ret.add(new AtlasClassification(classificationName)); - } - } - - return ret; - } - - AtlasEntity createStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) { - AtlasEntity ret = new AtlasEntity(STORAGE_DESC_TYPE); - - ret.setAttribute("name", "sd:" + location); - ret.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, "sd:" + location + CLUSTER_SUFFIX); - ret.setAttribute("location", location); - ret.setAttribute("inputFormat", inputFormat); - ret.setAttribute("outputFormat", outputFormat); - ret.setAttribute("compressed", compressed); - - return ret; - } - - AtlasEntity createColumn(String databaseName, String tableName, String columnName, String dataType, String comment, String... classificationNames) { - AtlasEntity ret = new AtlasEntity(COLUMN_TYPE); - - // set attributes - ret.setAttribute("name", columnName); - ret.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, databaseName + "." + tableName + "." + columnName + CLUSTER_SUFFIX); - ret.setAttribute("dataType", dataType); - ret.setAttribute("comment", comment); - - // set classifications - ret.setClassifications(toAtlasClassifications(classificationNames)); - - return ret; - } - - AtlasEntity createTable(String name, String description, AtlasEntity database, String owner, String tableType, - List columns, String... classificationNames) throws Exception { - AtlasEntity tblEntity = new AtlasEntity(TABLE_TYPE); - - // set attributes - tblEntity.setAttribute("name", name); - tblEntity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + CLUSTER_SUFFIX); - tblEntity.setAttribute("description", description); - tblEntity.setAttribute("owner", owner); - tblEntity.setAttribute("tableType", tableType); - tblEntity.setAttribute("createTime", System.currentTimeMillis()); - tblEntity.setAttribute("lastAccessTime", System.currentTimeMillis()); - tblEntity.setAttribute("retention", System.currentTimeMillis()); - - // set relationship attributes - AtlasEntity storageDesc = createStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true); - storageDesc.setRelationshipAttribute("table", toAtlasRelatedObjectId(tblEntity)); - - tblEntity.setRelationshipAttribute("db", toAtlasRelatedObjectId(database)); - tblEntity.setRelationshipAttribute("sd", toAtlasRelatedObjectId(storageDesc)); - tblEntity.setRelationshipAttribute("columns", toAtlasRelatedObjectIds(columns)); - - // set classifications - tblEntity.setClassifications(toAtlasClassifications(classificationNames)); - - AtlasEntityWithExtInfo entityWithExtInfo = new AtlasEntityWithExtInfo(); - - entityWithExtInfo.setEntity(tblEntity); - entityWithExtInfo.addReferredEntity(storageDesc); - - for (AtlasEntity column : columns) { - column.setRelationshipAttribute("table", toAtlasRelatedObjectId(tblEntity)); - - entityWithExtInfo.addReferredEntity(column); - } - - return createInstance(entityWithExtInfo); - } - - AtlasEntity createProcess(String name, String description, String user, List inputs, List outputs, - String queryText, String queryPlan, String queryId, String queryGraph, String... classificationNames) throws Exception { - - AtlasEntity entity = new AtlasEntity(LOAD_PROCESS_TYPE); - - // set attributes - entity.setAttribute("name", name); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + CLUSTER_SUFFIX); - entity.setAttribute("description", description); - entity.setAttribute("user", user); - entity.setAttribute("startTime", System.currentTimeMillis()); - entity.setAttribute("endTime", System.currentTimeMillis() + 10000); - entity.setAttribute("queryText", queryText); - entity.setAttribute("queryPlan", queryPlan); - entity.setAttribute("queryId", queryId); - entity.setAttribute("queryGraph", queryGraph); - - // set relationship attributes - entity.setRelationshipAttribute("inputs", toAtlasRelatedObjectIds(inputs)); - entity.setRelationshipAttribute("outputs", toAtlasRelatedObjectIds(outputs)); - - // set classifications - entity.setClassifications(toAtlasClassifications(classificationNames)); - - return createInstance(entity); - } - - AtlasEntity createProcessExecution(AtlasEntity hiveProcess, String name, String description, String user, - String queryText, String queryPlan, String queryId, String queryGraph, String... classificationNames) throws Exception { - - AtlasEntity entity = new AtlasEntity(LOAD_PROCESS_EXECUTION_TYPE); - Long startTime = System.currentTimeMillis(); - Long endTime = System.currentTimeMillis() + 10000; - // set attributes - entity.setAttribute("name", name); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + CLUSTER_SUFFIX + startTime.toString() + endTime.toString()); - entity.setAttribute("description", description); - entity.setAttribute("user", user); - entity.setAttribute("startTime", startTime); - entity.setAttribute("endTime", endTime); - entity.setAttribute("queryText", queryText); - entity.setAttribute("queryPlan", queryPlan); - entity.setAttribute("queryId", queryId); - entity.setAttribute("queryGraph", queryGraph); - entity.setRelationshipAttribute("process", AtlasTypeUtil.toAtlasRelatedObjectId(hiveProcess)); - - // set classifications - entity.setClassifications(toAtlasClassifications(classificationNames)); - - return createInstance(entity); - } - - AtlasEntity createView(String name, AtlasEntity database, List inputTables, String... classificationNames) throws Exception { - AtlasEntity entity = new AtlasEntity(VIEW_TYPE); - - // set attributes - entity.setAttribute("name", name); - entity.setAttribute(REFERENCEABLE_ATTRIBUTE_NAME, name + CLUSTER_SUFFIX); - - // set relationship attributes - entity.setRelationshipAttribute("db", toAtlasRelatedObjectId(database)); - entity.setRelationshipAttribute("inputTables", toAtlasRelatedObjectIds(inputTables)); - - // set classifications - entity.setClassifications(toAtlasClassifications(classificationNames)); - - return createInstance(entity); - } - - private void verifyTypesCreated() throws Exception { - MultivaluedMap searchParams = new MultivaluedMapImpl(); - - for (String typeName : TYPES) { - searchParams.clear(); - searchParams.add(SearchFilter.PARAM_NAME, typeName); - - SearchFilter searchFilter = new SearchFilter(searchParams); - AtlasTypesDef searchDefs = atlasClientV2.getAllTypeDefs(searchFilter); - - assert (!searchDefs.isEmpty()); - - System.out.println("Created type [" + typeName + "]"); - } - } - - private String[] getDSLQueries() { - return new String[]{ - "from DB", - "DB", - "DB where name=%22Reporting%22", - "DB where name=%22encode_db_name%22", - "Table where name=%2522sales_fact%2522", - "DB where name=\"Reporting\"", - "DB where DB.name=\"Reporting\"", - "DB name = \"Reporting\"", - "DB DB.name = \"Reporting\"", - "DB where name=\"Reporting\" select name, owner", - "DB where DB.name=\"Reporting\" select name, owner", - "DB has name", - "DB where DB has name", -//--TODO: Fix "DB, Table", // Table, db; Table db works - "DB is JdbcAccess", - "from Table", - "Table", - "Table is Dimension", - "Column where Column isa PII", - "View is Dimension", - "Column select Column.name", - "Column select name", - "Column where Column.name=\"customer_id\"", - "from Table select Table.name", - "DB where (name = \"Reporting\")", -//--TODO: Fix "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", - "DB where DB is JdbcAccess", - "DB where DB has name", -//--TODO: Fix "DB Table", - "DB as db1 Table where (db1.name = \"Reporting\")", -//--TODO: Fix "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", // N - DIMENSION_CLASSIFICATION, - JDBC_CLASSIFICATION, - ETL_CLASSIFICATION, - METRIC_CLASSIFICATION, - PII_CLASSIFICATION, - "`Log Data`", - "Table where name=\"sales_fact\", columns", - "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment", - "from DataSet", - "from Process" }; - } - - private void search() throws Exception { - System.out.println("\nSample DSL Queries: "); - - for (String dslQuery : getDSLQueries()) { - try { - AtlasSearchResult results = atlasClientV2.dslSearchWithParams(dslQuery, 10, 0); - - if (results != null) { - List entitiesResult = results.getEntities(); - List fullTextResults = results.getFullTextResult(); - AttributeSearchResult attribResult = results.getAttributes(); - - if (CollectionUtils.isNotEmpty(entitiesResult)) { - System.out.println("query [" + dslQuery + "] returned [" + entitiesResult.size() + "] rows."); - } else if (CollectionUtils.isNotEmpty(fullTextResults)) { - System.out.println("query [" + dslQuery + "] returned [" + fullTextResults.size() + "] rows."); - } else if (attribResult != null) { - System.out.println("query [" + dslQuery + "] returned [" + attribResult.getValues().size() + "] rows."); - } else { - System.out.println("query [" + dslQuery + "] returned [ 0 ] rows."); - } - } else { - System.out.println("query [" + dslQuery + "] failed, results:" + results); - } - } catch (Exception e) { - System.out.println("query [" + dslQuery + "] execution failed!"); - } - } - } - - private void lineage() throws AtlasServiceException { - System.out.println("\nSample Lineage Info: "); - - AtlasLineageInfo lineageInfo = atlasClientV2.getLineageInfo(getTableId(SALES_FACT_DAILY_MV_TABLE), LineageDirection.BOTH, 0); - Set relations = lineageInfo.getRelations(); - Map guidEntityMap = lineageInfo.getGuidEntityMap(); - - for (LineageRelation relation : relations) { - AtlasEntityHeader fromEntity = guidEntityMap.get(relation.getFromEntityId()); - AtlasEntityHeader toEntity = guidEntityMap.get(relation.getToEntityId()); - - System.out.println(fromEntity.getDisplayText() + "(" + fromEntity.getTypeName() + ") -> " + - toEntity.getDisplayText() + "(" + toEntity.getTypeName() + ")"); - } - } - - private String getTableId(String tableName) throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, tableName + CLUSTER_SUFFIX); - AtlasEntity tableEntity = atlasClientV2.getEntityByAttribute(TABLE_TYPE, attributes).getEntity(); - - return tableEntity.getGuid(); - } - - private void closeConnection() { - if (atlasClientV2 != null) { - atlasClientV2.close(); - } - } -} \ No newline at end of file diff --git a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java index 49c504f9f1..a1a9ac968d 100644 --- a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java +++ b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java @@ -21,6 +21,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import kafka.utils.ShutdownableThread; import org.apache.atlas.*; +import org.apache.atlas.annotation.EnableConditional; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.ha.HAConfiguration; import org.apache.atlas.kafka.AtlasKafkaMessage; @@ -71,6 +72,7 @@ import org.apache.kafka.common.TopicPartition; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.DependsOn; import org.springframework.core.annotation.Order; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; @@ -108,6 +110,7 @@ */ @Component @Order(5) +@EnableConditional(property = "atlas.notification.hook.enable") @DependsOn(value = {"atlasTypeDefStoreInitializer", "atlasTypeDefGraphStoreV2"}) public class NotificationHookConsumer implements Service, ActiveStateChangeHandler { private static final Logger LOG = LoggerFactory.getLogger(NotificationHookConsumer.class); @@ -652,8 +655,8 @@ void handleMessage(AtlasKafkaMessage kafkaMsg) throws AtlasSer if (auditLog == null) { auditLog = new AuditLog(messageUser, THREADNAME_PREFIX, - AtlasClient.API_V1.CREATE_ENTITY.getMethod(), - AtlasClient.API_V1.CREATE_ENTITY.getNormalizedPath()); + AtlasClientV2.API_V2.CREATE_ENTITY.getMethod(), + AtlasClientV2.API_V2.CREATE_ENTITY.getNormalizedPath()); } createOrUpdate(entities, false, stats, context); diff --git a/webapp/src/main/java/org/apache/atlas/util/AccessAuditLogsIndexCreator.java b/webapp/src/main/java/org/apache/atlas/util/AccessAuditLogsIndexCreator.java index f5786cf7d3..ee026df68b 100644 --- a/webapp/src/main/java/org/apache/atlas/util/AccessAuditLogsIndexCreator.java +++ b/webapp/src/main/java/org/apache/atlas/util/AccessAuditLogsIndexCreator.java @@ -17,7 +17,7 @@ package org.apache.atlas.util; import org.apache.atlas.AtlasConfiguration; -import org.apache.atlas.authorization.credutils.CredentialsProviderUtil; +import org.apache.atlas.audit.utils.CredentialsProviderUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.configuration.Configuration; import org.apache.commons.lang.StringUtils; diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/BrotliCompressionFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/BrotliCompressionFilter.java deleted file mode 100644 index f83b0b595e..0000000000 --- a/webapp/src/main/java/org/apache/atlas/web/filters/BrotliCompressionFilter.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.apache.atlas.web.filters; - -import com.aayushatharva.brotli4j.Brotli4jLoader; -import com.aayushatharva.brotli4j.encoder.Encoder; -import com.aayushatharva.brotli4j.encoder.Encoder.Parameters; - -import javax.servlet.*; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; - -public class BrotliCompressionFilter implements Filter { - - @Override - public void init(FilterConfig filterConfig) { - Brotli4jLoader.ensureAvailability(); - } - - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) - throws IOException, ServletException { - - // Ensure request and response are HttpServletRequest and HttpServletResponse - if (request instanceof HttpServletRequest && response instanceof HttpServletResponse) { - HttpServletRequest httpRequest = (HttpServletRequest) request; - HttpServletResponse httpResponse = (HttpServletResponse) response; - - String acceptEncoding = httpRequest.getHeader("Accept-Encoding"); - - // Check if the client supports Brotli compression - if (acceptEncoding != null && acceptEncoding.contains("br")) { - // Wrap the response with a Brotli compression wrapper - BrotliResponseWrapper responseWrapper = new BrotliResponseWrapper(httpResponse); - chain.doFilter(request, responseWrapper); - - // Compress the response content with Brotli - byte[] uncompressedData = responseWrapper.getOutputStreamData(); - Parameters params = new Parameters().setQuality(6); // Set Brotli quality level - byte[] compressedOutput = Encoder.compress(uncompressedData, params); - - // Write Brotli-compressed data to the actual response - httpResponse.setHeader("Content-Encoding", "br"); - httpResponse.setContentLength(compressedOutput.length); - httpResponse.getOutputStream().write(compressedOutput); - } else { - // Proceed without compression - chain.doFilter(request, response); - } - } else { - // Proceed without compression if not HTTP - chain.doFilter(request, response); - } - } - - @Override - public void destroy() { - // Optional: Add cleanup logic here if needed - } -} diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/BrotliResponseWrapper.java b/webapp/src/main/java/org/apache/atlas/web/filters/BrotliResponseWrapper.java deleted file mode 100644 index e8cdbb860b..0000000000 --- a/webapp/src/main/java/org/apache/atlas/web/filters/BrotliResponseWrapper.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.apache.atlas.web.filters; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponseWrapper; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.PrintWriter; - -public class BrotliResponseWrapper extends HttpServletResponseWrapper { - - private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - private ServletOutputStream servletOutputStream; - private PrintWriter writer; - - public BrotliResponseWrapper(HttpServletResponse response) { - super(response); - } - - @Override - public ServletOutputStream getOutputStream() throws IOException { - if (servletOutputStream == null) { - servletOutputStream = new ServletOutputStream() { - @Override - public void write(int b) { - outputStream.write(b); - } - - @Override - public boolean isReady() { - return true; - } - - @Override - public void setWriteListener(javax.servlet.WriteListener listener) { - // No-op for this example - } - }; - } - return servletOutputStream; - } - - @Override - public PrintWriter getWriter() throws IOException { - if (writer == null) { - writer = new PrintWriter(outputStream); - } - return writer; - } - - public byte[] getOutputStreamData() { - return outputStream.toByteArray(); - } -} diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java index 71d773a879..0f8d8ed6b3 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java @@ -20,7 +20,6 @@ import com.sun.jersey.multipart.FormDataParam; import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.authorize.AtlasAdminAccessRequest; @@ -123,6 +122,7 @@ import static org.apache.atlas.AtlasErrorCode.DEPRECATED_API; import static org.apache.atlas.AtlasErrorCode.DISABLED_API; +import static org.apache.atlas.repository.Constants.STATUS; import static org.apache.atlas.web.filters.AtlasCSRFPreventionFilter.CSRF_TOKEN; @@ -327,7 +327,7 @@ public Response getStatus() { } Map responseData = new HashMap() {{ - put(AtlasClient.STATUS, serviceState.getState().toString()); + put(STATUS, serviceState.getState().toString()); }}; if(serviceState.isInstanceInMigration()) { diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java index ba94ff263c..7d18dcf626 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/DiscoveryREST.java @@ -17,7 +17,6 @@ */ package org.apache.atlas.web.rest; -import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.RequestContext; @@ -68,6 +67,7 @@ import java.util.Set; import java.util.Arrays; +import static org.apache.atlas.repository.Constants.NAME; import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; import static org.apache.atlas.repository.Constants.REQUEST_HEADER_HOST; import static org.apache.atlas.repository.Constants.REQUEST_HEADER_USER_AGENT; @@ -297,7 +297,7 @@ public AtlasSearchResult searchUsingAttribute(@QueryParam("attrName") Str AtlasEntityType entityType = typeRegistry.getEntityTypeByName(typeName); if (entityType != null) { - String[] defaultAttrNames = new String[] { AtlasClient.QUALIFIED_NAME, AtlasClient.NAME }; + String[] defaultAttrNames = new String[] { QUALIFIED_NAME, NAME }; for (String defaultAttrName : defaultAttrNames) { AtlasStructType.AtlasAttribute attribute = entityType.getAttribute(defaultAttrName); @@ -311,14 +311,14 @@ public AtlasSearchResult searchUsingAttribute(@QueryParam("attrName") Str } if (StringUtils.isEmpty(attrName)) { - attrName = AtlasClient.QUALIFIED_NAME; + attrName = QUALIFIED_NAME; } } SearchParameters searchParams = new SearchParameters(); FilterCriteria attrFilter = new FilterCriteria(); - attrFilter.setAttributeName(StringUtils.isEmpty(attrName) ? AtlasClient.QUALIFIED_NAME : attrName); + attrFilter.setAttributeName(StringUtils.isEmpty(attrName) ? QUALIFIED_NAME : attrName); attrFilter.setOperator(SearchParameters.Operator.STARTS_WITH); attrFilter.setAttributeValue(attrValuePrefix); diff --git a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java index 5333d63e7b..b6a17a75fe 100644 --- a/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java +++ b/webapp/src/main/java/org/apache/atlas/web/rest/EntityREST.java @@ -49,6 +49,7 @@ import org.apache.atlas.type.AtlasType; import org.apache.atlas.type.AtlasTypeRegistry; import org.apache.atlas.util.FileUtils; +import org.apache.atlas.util.RepairIndex; import org.apache.atlas.utils.AtlasPerfMetrics; import org.apache.atlas.utils.AtlasPerfTracer; import org.apache.atlas.web.util.Servlets; @@ -58,7 +59,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; -import org.apache.atlas.tools.RepairIndex; +//import org.apache.atlas.tools.RepairIndex; import javax.inject.Inject; import javax.inject.Singleton; diff --git a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java index 4fbfb88b7e..c9d5ece761 100755 --- a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java +++ b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java @@ -67,7 +67,7 @@ public EmbeddedServer(String host, int port, String path) throws IOException { int minThreads = AtlasConfiguration.WEBSERVER_MIN_THREADS.getInt(); int maxThreads = AtlasConfiguration.WEBSERVER_MAX_THREADS.getInt(); long keepAliveTime = AtlasConfiguration.WEBSERVER_KEEPALIVE_SECONDS.getLong(); - ThreadPoolExecutor executor = new ThreadPoolExecutor(maxThreads, maxThreads, keepAliveTime, TimeUnit.SECONDS, queue); + ThreadPoolExecutor executor = new ThreadPoolExecutor(minThreads, maxThreads, keepAliveTime, TimeUnit.SECONDS, queue); ExecutorThreadPool pool = new ExecutorThreadPool(executor, minThreads); server = new Server(pool); diff --git a/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java b/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java index 562d9b720d..693f466bc7 100644 --- a/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java +++ b/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java @@ -17,7 +17,6 @@ */ package org.apache.atlas.web.util; -import org.apache.atlas.AtlasClient; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntityHeader; @@ -36,6 +35,9 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import static org.apache.atlas.repository.Constants.NAME; +import static org.apache.atlas.repository.Constants.QUALIFIED_NAME; + public final class LineageUtils { private LineageUtils() {} @@ -67,15 +69,15 @@ public static Struct toLineageStruct(AtlasLineageInfo lineageInfo, AtlasTypeRegi vertexIdMap.put(Constants.ATTRIBUTE_NAME_STATE, (entityHeader.getStatus() == AtlasEntity.Status.ACTIVE) ? "ACTIVE" : "DELETED"); vertexIdMap.put(Constants.ATTRIBUTE_NAME_TYPENAME, entityHeader.getTypeName()); - Object qualifiedName = entityHeader.getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME); + Object qualifiedName = entityHeader.getAttribute(QUALIFIED_NAME); if (qualifiedName == null) { qualifiedName = entityHeader.getDisplayText(); } Map values = new HashMap<>(); - values.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); + values.put(QUALIFIED_NAME, qualifiedName); values.put(VERTEX_ID_ATTR_NAME, constructResultStruct(vertexIdMap, true)); - values.put(AtlasClient.NAME, entityHeader.getDisplayText()); + values.put(NAME, entityHeader.getDisplayText()); verticesMap.put(guid, constructResultStruct(values, false)); } } diff --git a/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java b/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java index f27ce1974b..bd5b12c404 100755 --- a/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java +++ b/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java @@ -19,7 +19,6 @@ package org.apache.atlas.web.util; import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.AtlasErrorCode; import org.apache.atlas.LocalServletRequest; @@ -47,6 +46,8 @@ import java.util.Map; import org.springframework.web.util.UriUtils; +import static org.apache.atlas.repository.Constants.ERROR; + /** * Utility functions for dealing with servlets. */ @@ -156,7 +157,7 @@ public static Response getErrorResponse(Throwable e, Response.Status status) { public static Response getErrorResponse(String message, Response.Status status) { Object errorEntity = escapeJsonString(message); - ObjectNode errorJson = AtlasJson.createV1ObjectNode(AtlasClient.ERROR, errorEntity); + ObjectNode errorJson = AtlasJson.createV1ObjectNode(ERROR, errorEntity); return Response.status(status).entity(errorJson).type(JSON_MEDIA_TYPE).build(); } diff --git a/webapp/src/main/webapp/WEB-INF/web.xml b/webapp/src/main/webapp/WEB-INF/web.xml index 291c871a8c..590901b279 100755 --- a/webapp/src/main/webapp/WEB-INF/web.xml +++ b/webapp/src/main/webapp/WEB-INF/web.xml @@ -114,16 +114,6 @@ /api/atlas/admin/status - - brotliFilter - org.apache.atlas.web.filters.BrotliCompressionFilter - - - - brotliFilter - /* - - org.springframework.web.context.request.RequestContextListener diff --git a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java b/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java deleted file mode 100644 index f66037595f..0000000000 --- a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java +++ /dev/null @@ -1,153 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.examples; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.web.integration.BaseResourceIT; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.List; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.AssertJUnit.assertTrue; - -public class QuickStartIT extends BaseResourceIT { - - @BeforeClass - public void runQuickStart() throws Exception { - super.setUp(); - - QuickStart.runQuickstart(new String[]{}, new String[]{"admin", "admin"}); - } - - @Test - public void testDBIsAdded() throws Exception { - Referenceable db = getDB(QuickStart.SALES_DB); - assertEquals(QuickStart.SALES_DB, db.get("name")); - assertEquals(QuickStart.SALES_DB_DESCRIPTION, db.get("description")); - } - - private Referenceable getDB(String dbName) throws AtlasServiceException { - return atlasClientV1.getEntity(QuickStart.DATABASE_TYPE, "name", dbName); - } - - @Test - public void testTablesAreAdded() throws AtlasServiceException { - Referenceable table = getTable(QuickStart.SALES_FACT_TABLE); - verifySimpleTableAttributes(table); - - verifyDBIsLinkedToTable(table); - - verifyColumnsAreAddedToTable(table); - - verifyTrait(table); - } - - private Referenceable getTable(String tableName) throws AtlasServiceException { - return atlasClientV1.getEntity(QuickStart.TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - } - - private void verifyTrait(Referenceable table) { - assertNotNull(table.getTrait(QuickStart.FACT_TRAIT)); - } - - private void verifyColumnsAreAddedToTable(Referenceable table) { - List columns = (List) table.get(QuickStart.COLUMNS_ATTRIBUTE); - assertEquals(4, columns.size()); - Referenceable column = columns.get(0); - assertEquals(QuickStart.TIME_ID_COLUMN, column.get("name")); - assertEquals("int", column.get("dataType")); - } - - private void verifyDBIsLinkedToTable(Referenceable table) throws AtlasServiceException { - Referenceable db = getDB(QuickStart.SALES_DB); - assertEquals(db.getId(), table.get(QuickStart.DB_ATTRIBUTE)); - } - - private void verifySimpleTableAttributes(Referenceable table) { - assertEquals(QuickStart.SALES_FACT_TABLE, table.get("name")); - assertEquals(QuickStart.SALES_FACT_TABLE_DESCRIPTION, table.get("description")); - } - - @Test - public void testProcessIsAdded() throws AtlasServiceException { - Referenceable loadProcess = atlasClientV1.getEntity(QuickStart.LOAD_PROCESS_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - QuickStart.LOAD_SALES_DAILY_PROCESS); - - assertEquals(QuickStart.LOAD_SALES_DAILY_PROCESS, loadProcess.get(AtlasClient.NAME)); - assertEquals(QuickStart.LOAD_SALES_DAILY_PROCESS_DESCRIPTION, loadProcess.get("description")); - - List inputs = (List)loadProcess.get(QuickStart.INPUTS_ATTRIBUTE); - List outputs = (List)loadProcess.get(QuickStart.OUTPUTS_ATTRIBUTE); - assertEquals(2, inputs.size()); - String salesFactTableId = getTableId(QuickStart.SALES_FACT_TABLE); - String timeDimTableId = getTableId(QuickStart.TIME_DIM_TABLE); - String salesFactDailyMVId = getTableId(QuickStart.SALES_FACT_DAILY_MV_TABLE); - - assertEquals(salesFactTableId, inputs.get(0)._getId()); - assertEquals(timeDimTableId, inputs.get(1)._getId()); - assertEquals(salesFactDailyMVId, outputs.get(0)._getId()); - } - - private String getTableId(String tableName) throws AtlasServiceException { - return getTable(tableName).getId()._getId(); - } - - @Test - public void testLineageIsMaintained() throws AtlasServiceException { - String salesFactTableId = getTableId(QuickStart.SALES_FACT_TABLE); - String timeDimTableId = getTableId(QuickStart.TIME_DIM_TABLE); - String salesFactDailyMVId = getTableId(QuickStart.SALES_FACT_DAILY_MV_TABLE); - - ObjectNode inputGraph = atlasClientV1.getInputGraphForEntity(salesFactDailyMVId); - JsonNode vertices = inputGraph.get("values").get("vertices"); - JsonNode edges = inputGraph.get("values").get("edges"); - - assertTrue(vertices.has(salesFactTableId)); - assertTrue(vertices.has(timeDimTableId)); - assertTrue(vertices.has(salesFactDailyMVId)); - - assertTrue(edges.has(salesFactDailyMVId)); - ArrayNode inputs = (ArrayNode)edges.get((edges.get(salesFactDailyMVId)).get(0).asText()); - String i1 = inputs.get(0).asText(); - String i2 = inputs.get(1).asText(); - assertTrue(salesFactTableId.equals(i1) || salesFactTableId.equals(i2)); - assertTrue(timeDimTableId.equals(i1) || timeDimTableId.equals(i2)); - } - - @Test - public void testViewIsAdded() throws AtlasServiceException { - - Referenceable view = atlasClientV1.getEntity(QuickStart.VIEW_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, QuickStart.PRODUCT_DIM_VIEW); - - assertEquals(QuickStart.PRODUCT_DIM_VIEW, view.get(AtlasClient.NAME)); - - Id productDimId = getTable(QuickStart.PRODUCT_DIM_TABLE).getId(); - Id inputTableId = ((List) view.get(QuickStart.INPUT_TABLES_ATTRIBUTE)).get(0); - assertEquals(productDimId, inputTableId); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java b/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java deleted file mode 100644 index c65f6d3d4b..0000000000 --- a/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java +++ /dev/null @@ -1,228 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.examples; - -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo; -import org.apache.atlas.model.lineage.AtlasLineageInfo; -import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection; -import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageRelation; -import org.apache.atlas.web.integration.BaseResourceIT; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -import static org.apache.atlas.AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME; -import static org.apache.atlas.examples.QuickStartV2.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.AssertJUnit.assertTrue; - -public class QuickStartV2IT extends BaseResourceIT { - - @BeforeClass - public void runQuickStart() throws Exception { - super.setUp(); - - QuickStartV2.runQuickstart(new String[]{}, new String[]{"admin", "admin"}); - } - - @Test - public void testDBIsAdded() throws Exception { - AtlasEntity db = getDB(SALES_DB); - Map dbAttributes = db.getAttributes(); - - assertEquals(SALES_DB, dbAttributes.get("name")); - assertEquals("sales database", dbAttributes.get("description")); - } - - private AtlasEntity getDB(String dbName) throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, dbName + CLUSTER_SUFFIX); - AtlasEntity dbEntity = atlasClientV2.getEntityByAttribute(QuickStartV2.DATABASE_TYPE, attributes).getEntity(); - - return dbEntity; - } - - @Test - public void testTablesAreAdded() throws AtlasServiceException { - AtlasEntity table = getTable(SALES_FACT_TABLE); - - verifySimpleTableAttributes(table); - - verifyDBIsLinkedToTable(table); - - verifyColumnsAreAddedToTable(table); - - verifyTrait(table); - } - - @Test - public void testTablesAreAdded2() throws AtlasServiceException { - List tableNames = Arrays.asList(SALES_FACT_TABLE, PRODUCT_DIM_TABLE, CUSTOMER_DIM_TABLE, - TIME_DIM_TABLE, SALES_FACT_DAILY_MV_TABLE, SALES_FACT_MONTHLY_MV_TABLE, - LOG_FACT_DAILY_MV_TABLE, LOG_FACT_MONTHLY_MV_TABLE); - - AtlasEntitiesWithExtInfo entities = getTables(tableNames); - - assertNotNull(entities); - assertNotNull(entities.getEntities()); - assertEquals(entities.getEntities().size(), tableNames.size()); - } - - private AtlasEntity getTable(String tableName) throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, tableName + CLUSTER_SUFFIX); - AtlasEntity tableEntity = atlasClientV2.getEntityByAttribute(QuickStartV2.TABLE_TYPE, attributes).getEntity(); - - return tableEntity; - } - - private AtlasEntitiesWithExtInfo getTables(List tableNames) throws AtlasServiceException { - List> attributesList = new ArrayList<>(); - - for (String tableName : tableNames) { - attributesList.add(Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, tableName + CLUSTER_SUFFIX)); - } - - return atlasClientV2.getEntitiesByAttribute(QuickStartV2.TABLE_TYPE, attributesList); - } - - private AtlasEntity getProcess(String processName) throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, processName + CLUSTER_SUFFIX); - AtlasEntity processEntity = atlasClientV2.getEntityByAttribute(LOAD_PROCESS_TYPE, attributes).getEntity(); - - return processEntity; - } - - - private void verifyTrait(AtlasEntity table) throws AtlasServiceException { - AtlasClassifications classfications = atlasClientV2.getClassifications(table.getGuid()); - List traits = classfications.getList(); - - assertNotNull(traits.get(0).getTypeName()); - } - - private void verifyColumnsAreAddedToTable(AtlasEntity table) { - Map tableAttributes = table.getRelationshipAttributes(); - List columns = (List) tableAttributes.get("columns"); - - assertEquals(4, columns.size()); - - for (Map colMap : columns) { - String colGuid = (String) colMap.get("guid"); - - assertNotNull(UUID.fromString(colGuid)); - } - } - - private void verifyDBIsLinkedToTable(AtlasEntity table) throws AtlasServiceException { - AtlasEntity db = getDB(SALES_DB); - Map tableAttributes = table.getRelationshipAttributes(); - Map dbFromTable = (Map) tableAttributes.get("db"); - - assertEquals(db.getGuid(), dbFromTable.get("guid")); - } - - private void verifySimpleTableAttributes(AtlasEntity table) { - Map tableAttributes = table.getAttributes(); - - assertEquals(SALES_FACT_TABLE, tableAttributes.get("name")); - assertEquals("sales fact table", tableAttributes.get("description")); - } - - @Test - public void testProcessIsAdded() throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, LOAD_SALES_DAILY_PROCESS + CLUSTER_SUFFIX); - AtlasEntity loadProcess = atlasClientV2.getEntityByAttribute(LOAD_PROCESS_TYPE, attributes).getEntity(); - Map loadProcessAttribs = loadProcess.getAttributes(); - - assertEquals(LOAD_SALES_DAILY_PROCESS, loadProcessAttribs.get(NAME)); - assertEquals("hive query for daily summary", loadProcessAttribs.get("description")); - - List inputs = (List) loadProcessAttribs.get("inputs"); - List outputs = (List) loadProcessAttribs.get("outputs"); - - assertEquals(2, inputs.size()); - - String salesFactTableId = getTableId(SALES_FACT_TABLE); - String timeDimTableId = getTableId(TIME_DIM_TABLE); - String salesFactDailyMVId = getTableId(SALES_FACT_DAILY_MV_TABLE); - - assertEquals(salesFactTableId, ((Map) inputs.get(0)).get("guid")); - assertEquals(timeDimTableId, ((Map) inputs.get(1)).get("guid")); - assertEquals(salesFactDailyMVId, ((Map) outputs.get(0)).get("guid")); - } - - private String getTableId(String tableName) throws AtlasServiceException { - return getTable(tableName).getGuid(); - } - - private String getProcessId(String processName) throws AtlasServiceException { - return getProcess(processName).getGuid(); - } - - @Test - public void testLineageIsMaintained() throws AtlasServiceException { - String salesFactTableId = getTableId(SALES_FACT_TABLE); - String timeDimTableId = getTableId(TIME_DIM_TABLE); - String salesFactDailyMVId = getTableId(SALES_FACT_DAILY_MV_TABLE); - String salesFactMonthlyMvId = getTableId(SALES_FACT_MONTHLY_MV_TABLE); - String salesDailyProcessId = getProcessId(LOAD_SALES_DAILY_PROCESS); - String salesMonthlyProcessId = getProcessId(LOAD_SALES_MONTHLY_PROCESS); - - AtlasLineageInfo inputLineage = atlasClientV2.getLineageInfo(salesFactDailyMVId, LineageDirection.BOTH, 0); - List relations = new ArrayList<>(inputLineage.getRelations()); - Map entityMap = inputLineage.getGuidEntityMap(); - - assertEquals(relations.size(), 5); - assertEquals(entityMap.size(), 6); - - assertTrue(entityMap.containsKey(salesFactTableId)); - assertTrue(entityMap.containsKey(timeDimTableId)); - assertTrue(entityMap.containsKey(salesFactDailyMVId)); - assertTrue(entityMap.containsKey(salesDailyProcessId)); - assertTrue(entityMap.containsKey(salesFactMonthlyMvId)); - assertTrue(entityMap.containsKey(salesMonthlyProcessId)); - } - - @Test - public void testViewIsAdded() throws AtlasServiceException { - Map attributes = Collections.singletonMap(REFERENCEABLE_ATTRIBUTE_NAME, PRODUCT_DIM_VIEW + CLUSTER_SUFFIX); - AtlasEntity view = atlasClientV2.getEntityByAttribute(VIEW_TYPE, attributes).getEntity(); - Map viewAttributes = view.getAttributes(); - Map viewRelationshipAttributes = view.getRelationshipAttributes(); - - assertEquals(PRODUCT_DIM_VIEW, viewAttributes.get(NAME)); - - String productDimId = getTable(PRODUCT_DIM_TABLE).getGuid(); - List inputTables = (List) viewRelationshipAttributes.get("inputTables"); - Map inputTablesMap = (Map) inputTables.get(0); - - assertEquals(productDimId, inputTablesMap.get("guid")); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java b/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java deleted file mode 100644 index aaa51bda9c..0000000000 --- a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java +++ /dev/null @@ -1,201 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.notification; - -import org.apache.atlas.AtlasClient; -import org.apache.atlas.kafka.NotificationProvider; -import org.apache.atlas.notification.NotificationInterface.NotificationType; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.instance.Struct; -import org.apache.atlas.v1.model.notification.EntityNotificationV1; -import org.apache.atlas.v1.model.notification.EntityNotificationV1.OperationType; -import org.apache.atlas.v1.model.typedef.*; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.atlas.web.integration.BaseResourceIT; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; - -import java.util.*; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; - -/** - * Entity Notification Integration Tests. - */ -public class EntityNotificationIT extends BaseResourceIT { - private final String DATABASE_NAME = "db" + randomString(); - private final String TABLE_NAME = "table" + randomString(); - private Id tableId; - private Id dbId; - private String traitName; - private NotificationConsumer notificationConsumer; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - initNotificationService(); - - createTypeDefinitionsV1(); - - Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(DATABASE_NAME); - - dbId = createInstance(HiveDBInstance); - - notificationConsumer = notificationInterface.createConsumers(NotificationType.ENTITIES, 1).get(0); - } - - @AfterClass - public void teardown() throws Exception { - cleanUpNotificationService(); - } - - public void testCreateEntity() throws Exception { - Referenceable tableInstance = createHiveTableInstanceBuiltIn(DATABASE_NAME, TABLE_NAME, dbId); - - tableId = createInstance(tableInstance); - - final String guid = tableId._getId(); - - waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid)); - } - - public void testUpdateEntity() throws Exception { - final String property = "description"; - final String newValue = "New description!"; - - final String guid = tableId._getId(); - - atlasClientV1.updateEntityAttribute(guid, property, newValue); - - waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE_BUILTIN, guid)); - } - - public void testDeleteEntity() throws Exception { - final String tableName = "table-" + randomString(); - final String dbName = "db-" + randomString(); - final Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - final Id dbId = createInstance(HiveDBInstance); - final Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - final Id tableId = createInstance(tableInstance); - final String guid = tableId._getId(); - - waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid)); - - final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME); - - atlasClientV1.deleteEntity(HIVE_TABLE_TYPE_BUILTIN, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - - waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid)); - } - - public void testAddTrait() throws Exception { - String superSuperTraitName = "SuperTrait" + randomString(); - String superTraitName = "SuperTrait" + randomString(); - - traitName = "Trait" + randomString(); - - createTrait(superSuperTraitName); - createTrait(superTraitName, superSuperTraitName); - createTrait(traitName, superTraitName); - - Struct traitInstance = new Struct(traitName); - String traitInstanceJSON = AtlasType.toV1Json(traitInstance); - - LOG.debug("Trait instance = {}", traitInstanceJSON); - - final String guid = tableId._getId(); - - atlasClientV1.addTrait(guid, traitInstance); - - EntityNotificationV1 entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid)); - - Referenceable entity = entityNotification.getEntity(); - - assertTrue(entity.getTraitNames().contains(traitName)); - - List allTraits = entityNotification.getAllTraits(); - List allTraitNames = new LinkedList<>(); - - for (Struct struct : allTraits) { - allTraitNames.add(struct.getTypeName()); - } - - assertTrue(allTraitNames.contains(traitName)); - assertTrue(allTraitNames.contains(superTraitName)); - assertTrue(allTraitNames.contains(superSuperTraitName)); - - String anotherTraitName = "Trait" + randomString(); - - createTrait(anotherTraitName, superTraitName); - - traitInstance = new Struct(anotherTraitName); - traitInstanceJSON = AtlasType.toV1Json(traitInstance); - - LOG.debug("Trait instance = {}", traitInstanceJSON); - - atlasClientV1.addTrait(guid, traitInstance); - - entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid)); - - allTraits = entityNotification.getAllTraits(); - allTraitNames = new LinkedList<>(); - - for (Struct struct : allTraits) { - allTraitNames.add(struct.getTypeName()); - } - - assertTrue(allTraitNames.contains(traitName)); - assertTrue(allTraitNames.contains(anotherTraitName)); - // verify that the super type shows up twice in all traits - assertEquals(2, Collections.frequency(allTraitNames, superTraitName)); - } - - public void testDeleteTrait() throws Exception { - final String guid = tableId._getId(); - - atlasClientV1.deleteTrait(guid, traitName); - - EntityNotificationV1 entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, - newNotificationPredicate(EntityNotificationV1.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid)); - - assertFalse(entityNotification.getEntity().getTraitNames().contains(traitName)); - } - - - // ----- helper methods --------------------------------------------------- - - private void createTrait(String traitName, String ... superTraitNames) throws Exception { - TraitTypeDefinition traitDef = TypesUtil.createTraitTypeDef(traitName, null, new HashSet<>(Arrays.asList(superTraitNames))); - TypesDef typesDef = new TypesDef(Collections.emptyList(), - Collections.emptyList(), - Collections.singletonList(traitDef), - Collections.emptyList()); - String traitDefinitionJSON = AtlasType.toV1Json(typesDef); - - LOG.debug("Trait definition = {}", traitDefinitionJSON); - - createType(traitDefinitionJSON); - } - -} diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java deleted file mode 100644 index 6e24f52984..0000000000 --- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java +++ /dev/null @@ -1,262 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.notification; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.model.notification.HookNotification; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest; -import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityPartialUpdateRequest; -import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest; -import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest; -import org.apache.atlas.web.integration.BaseResourceIT; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.List; - -import static java.lang.Thread.sleep; -import static org.testng.Assert.assertEquals; - -public class NotificationHookConsumerIT extends BaseResourceIT { - private static final String TEST_USER = "testuser"; - - public static final String NAME = "name"; - public static final String DESCRIPTION = "description"; - public static final String QUALIFIED_NAME = "qualifiedName"; - public static final String CLUSTER_NAME = "clusterName"; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - initNotificationService(); - - createTypeDefinitionsV1(); - } - - @AfterClass - public void teardown() throws Exception { - cleanUpNotificationService(); - } - - private void sendHookMessage(HookNotification message) throws NotificationException, InterruptedException { - notificationInterface.send(NotificationInterface.NotificationType.HOOK, message); - - sleep(1000); - } - - @Test - public void testMessageHandleFailureConsumerContinues() throws Exception { - //send invalid message - update with invalid type - sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, randomString(), null, null, new Referenceable(randomString()))); - - //send valid message - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, dbName); - entity.set(CLUSTER_NAME, randomString()); - - sendHookMessage(new EntityCreateRequest(TEST_USER, entity)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - ArrayNode results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_BUILTIN, entity.get(NAME))); - - return results.size() == 1; - } - }); - } - - @Test - public void testCreateEntity() throws Exception { - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - final String clusterName = randomString(); - final String qualifiedName = dbName + "@" + clusterName; - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, qualifiedName); - entity.set(CLUSTER_NAME, clusterName); - - sendHookMessage(new EntityCreateRequest(TEST_USER, entity)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - ArrayNode results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, entity.get(QUALIFIED_NAME))); - - return results.size() == 1; - } - }); - - //Assert that user passed in hook message is used in audit - Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME)); - List events = atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1); - - assertEquals(events.size(), 1); - assertEquals(events.get(0).getUser(), TEST_USER); - } - - @Test - public void testUpdateEntityPartial() throws Exception { - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - final String clusterName = randomString(); - final String qualifiedName = dbName + "@" + clusterName; - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, qualifiedName); - entity.set(CLUSTER_NAME, clusterName); - - atlasClientV1.createEntity(entity); - - final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN); - - newEntity.set("owner", randomString()); - - sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME), newEntity)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName); - - return (localEntity.get("owner") != null && localEntity.get("owner").equals(newEntity.get("owner"))); - } - }); - - //Its partial update and un-set fields are not updated - Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME)); - - assertEquals(actualEntity.get(DESCRIPTION), entity.get(DESCRIPTION)); - } - - @Test - public void testUpdatePartialUpdatingQualifiedName() throws Exception { - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - final String clusterName = randomString(); - final String qualifiedName = dbName + "@" + clusterName; - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, qualifiedName); - entity.set(CLUSTER_NAME, clusterName); - - atlasClientV1.createEntity(entity); - - final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String newName = "db" + randomString(); - final String newQualifiedName = newName + "@" + clusterName; - - newEntity.set(QUALIFIED_NAME, newQualifiedName); - - sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName, newEntity)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - ArrayNode results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newQualifiedName)); - - return results.size() == 1; - } - }); - - //no entity with the old qualified name - ArrayNode results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, qualifiedName)); - - assertEquals(results.size(), 0); - } - - @Test - public void testDeleteByQualifiedName() throws Exception { - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - final String clusterName = randomString(); - final String qualifiedName = dbName + "@" + clusterName; - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, qualifiedName); - entity.set(CLUSTER_NAME, clusterName); - - final String dbId = atlasClientV1.createEntity(entity).get(0); - - sendHookMessage(new EntityDeleteRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - Referenceable getEntity = atlasClientV1.getEntity(dbId); - - return getEntity.getId().getState() == Id.EntityState.DELETED; - } - }); - } - - @Test - public void testUpdateEntityFullUpdate() throws Exception { - final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - final String dbName = "db" + randomString(); - final String clusterName = randomString(); - final String qualifiedName = dbName + "@" + clusterName; - - entity.set(NAME, dbName); - entity.set(DESCRIPTION, randomString()); - entity.set(QUALIFIED_NAME, qualifiedName); - entity.set(CLUSTER_NAME, clusterName); - - atlasClientV1.createEntity(entity); - - final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN); - - newEntity.set(NAME, dbName); - newEntity.set(DESCRIPTION, randomString()); - newEntity.set("owner", randomString()); - newEntity.set(QUALIFIED_NAME, qualifiedName); - newEntity.set(CLUSTER_NAME, clusterName); - - //updating unique attribute - sendHookMessage(new EntityUpdateRequest(TEST_USER, newEntity)); - - waitFor(MAX_WAIT_TIME, new Predicate() { - @Override - public boolean evaluate() throws Exception { - ArrayNode results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newEntity.get(QUALIFIED_NAME))); - - return results.size() == 1; - } - }); - - Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName); - - assertEquals(actualEntity.get(DESCRIPTION), newEntity.get(DESCRIPTION)); - assertEquals(actualEntity.get("owner"), newEntity.get("owner")); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java index fdfc2560de..7f8368da57 100644 --- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java +++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java @@ -18,7 +18,6 @@ package org.apache.atlas.notification; -import org.apache.atlas.AtlasClient; import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasServiceException; import org.apache.atlas.exception.AtlasBaseException; @@ -49,6 +48,7 @@ import java.util.List; import java.util.Properties; +import static org.apache.atlas.repository.Constants.DATA_SET_SUPER_TYPE; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; @@ -207,7 +207,7 @@ void consumeOneMessage(NotificationConsumer consumer, } Referenceable createEntity() { - final Referenceable entity = new Referenceable(AtlasClient.DATA_SET_SUPER_TYPE); + final Referenceable entity = new Referenceable(DATA_SET_SUPER_TYPE); entity.set(NAME, "db" + randomString()); entity.set(DESCRIPTION, randomString()); @@ -217,7 +217,7 @@ Referenceable createEntity() { } AtlasEntity createV2Entity() { - final AtlasEntity entity = new AtlasEntity(AtlasClient.DATA_SET_SUPER_TYPE); + final AtlasEntity entity = new AtlasEntity(DATA_SET_SUPER_TYPE); entity.setAttribute(NAME, "db" + randomString()); entity.setAttribute(DESCRIPTION, randomString()); diff --git a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java index 88c702893e..e7cfe40de6 100644 --- a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java +++ b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java @@ -28,12 +28,12 @@ import static org.apache.atlas.repository.Constants.CLASSIFICATION_NAMES_KEY; import static org.apache.atlas.repository.Constants.CUSTOM_ATTRIBUTES_PROPERTY_KEY; import static org.apache.atlas.repository.Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY; +import static org.apache.atlas.repository.Constants.NAME; import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY; import static org.apache.atlas.repository.Constants.TIMESTAMP_PROPERTY_KEY; import static org.apache.atlas.repository.Constants.TYPE_NAME_PROPERTY_KEY; import static org.apache.atlas.utils.TestLoadModelUtils.createTypesAsNeeded; -import org.apache.atlas.AtlasClient; import org.apache.atlas.RequestContext; import org.apache.atlas.TestModules; import org.apache.atlas.TestUtilsV2; @@ -814,7 +814,7 @@ private void verifyAttributes(List retrievedEntities) throws Except if ( retrievedColumnEntity != null) { LOG.info("verifying entity of type {} ", columns.get(0).getTypeName()); - Assert.assertEquals(columns.get(0).getAttribute(AtlasClient.NAME), retrievedColumnEntity.getAttribute(AtlasClient.NAME)); + Assert.assertEquals(columns.get(0).getAttribute(NAME), retrievedColumnEntity.getAttribute(NAME)); Assert.assertEquals(columns.get(0).getAttribute("type"), retrievedColumnEntity.getAttribute("type")); } @@ -822,7 +822,7 @@ private void verifyAttributes(List retrievedEntities) throws Except LOG.info("verifying entity of type {} ", tableEntity.getTypeName()); //String - Assert.assertEquals(tableEntity.getAttribute(AtlasClient.NAME), retrievedTableEntity.getAttribute(AtlasClient.NAME)); + Assert.assertEquals(tableEntity.getAttribute(NAME), retrievedTableEntity.getAttribute(NAME)); //Map Assert.assertEquals(tableEntity.getAttribute("parametersMap"), retrievedTableEntity.getAttribute("parametersMap")); //enum diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java deleted file mode 100755 index 718844d40c..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.atlas.AtlasClient; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -/** - * Integration test for Admin jersey resource. - */ -public class AdminJerseyResourceIT extends BaseResourceIT { - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - } - - @Test - public void testGetVersion() throws Exception { - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.VERSION, null, (String[]) null); - Assert.assertNotNull(response); - - PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties"); - - Assert.assertEquals(response.get("Version").asText(), buildConfiguration.getString("build.version")); - Assert.assertEquals(response.get("Name").asText(), buildConfiguration.getString("project.name")); - Assert.assertEquals(response.get("Description").asText(), buildConfiguration.getString("project.description")); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java deleted file mode 100755 index 6af234feae..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java +++ /dev/null @@ -1,774 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.base.Preconditions; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.kafka.AtlasKafkaMessage; -import org.apache.atlas.kafka.KafkaNotification; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasStruct; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.instance.EntityMutations; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.model.typedef.AtlasBusinessMetadataDef; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasEnumDef; -import org.apache.atlas.model.typedef.AtlasRelationshipDef; -import org.apache.atlas.model.typedef.AtlasStructDef; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.notification.NotificationConsumer; -import org.apache.atlas.notification.NotificationInterface; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.ParamChecker; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.instance.Struct; -import org.apache.atlas.v1.model.notification.EntityNotificationV1; -import org.apache.atlas.v1.model.typedef.AttributeDefinition; -import org.apache.atlas.v1.model.typedef.ClassTypeDefinition; -import org.apache.atlas.v1.model.typedef.EnumTypeDefinition; -import org.apache.atlas.v1.model.typedef.EnumTypeDefinition.EnumValue; -import org.apache.atlas.v1.model.typedef.Multiplicity; -import org.apache.atlas.v1.model.typedef.StructTypeDefinition; -import org.apache.atlas.v1.model.typedef.TraitTypeDefinition; -import org.apache.atlas.v1.model.typedef.TypesDef; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.lang.RandomStringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF; -import static org.apache.atlas.type.AtlasTypeUtil.createBusinessMetadataDef; -import static org.apache.atlas.type.AtlasTypeUtil.createOptionalAttrDef; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; - -/** - * Base class for integration tests. - * Sets up the web resource and has helper methods to created type and entity. - */ -public abstract class BaseResourceIT { - public static final Logger LOG = LoggerFactory.getLogger(BaseResourceIT.class); - - public static final String ATLAS_REST_ADDRESS = "atlas.rest.address"; - public static final String NAME = "name"; - public static final String QUALIFIED_NAME = "qualifiedName"; - public static final String CLUSTER_NAME = "clusterName"; - public static final String DESCRIPTION = "description"; - public static final String PII_TAG = "pii_Tag"; - public static final String PHI_TAG = "phi_Tag"; - public static final String PCI_TAG = "pci_Tag"; - public static final String SOX_TAG = "sox_Tag"; - public static final String SEC_TAG = "sec_Tag"; - public static final String FINANCE_TAG = "finance_Tag"; - public static final String CLASSIFICATION = "classification"; - public static final String ATLAS_LINEAGE_ON_DEMAND_ENABLED = "atlas.lineage.on.demand.enabled"; - - protected static final int MAX_WAIT_TIME = 60000; - - // All service clients - protected AtlasClient atlasClientV1; - protected AtlasClientV2 atlasClientV2; - protected String[] atlasUrls; - protected boolean isLineageOnDemandEnabled; - - - protected NotificationInterface notificationInterface = null; - protected KafkaNotification kafkaNotification = null; - - @BeforeClass - public void setUp() throws Exception { - //set high timeouts so that tests do not fail due to read timeouts while you - //are stepping through the code in a debugger - ApplicationProperties.get().setProperty("atlas.client.readTimeoutMSecs", "100000000"); - ApplicationProperties.get().setProperty("atlas.client.connectTimeoutMSecs", "100000000"); - - - Configuration configuration = ApplicationProperties.get(); - - isLineageOnDemandEnabled = configuration.getBoolean(ATLAS_LINEAGE_ON_DEMAND_ENABLED); - if (!isLineageOnDemandEnabled) { - ApplicationProperties.get().setProperty(ATLAS_LINEAGE_ON_DEMAND_ENABLED, true); - } - - atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS); - - if (atlasUrls == null || atlasUrls.length == 0) { - atlasUrls = new String[] { "http://localhost:21000/" }; - } - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - atlasClientV1 = new AtlasClient(atlasUrls, new String[]{"admin", "admin"}); - atlasClientV2 = new AtlasClientV2(atlasUrls, new String[]{"admin", "admin"}); - } else { - atlasClientV1 = new AtlasClient(atlasUrls); - atlasClientV2 = new AtlasClientV2(atlasUrls); - } - } - - protected void batchCreateTypes(AtlasTypesDef typesDef) throws AtlasServiceException { - AtlasTypesDef toCreate = new AtlasTypesDef(); - - for (AtlasEnumDef enumDef : typesDef.getEnumDefs()) { - if (atlasClientV2.typeWithNameExists(enumDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", enumDef.getName()); - } else { - toCreate.getEnumDefs().add(enumDef); - } - } - - for (AtlasStructDef structDef : typesDef.getStructDefs()) { - if (atlasClientV2.typeWithNameExists(structDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", structDef.getName()); - } else { - toCreate.getStructDefs().add(structDef); - } - } - - for (AtlasEntityDef entityDef : typesDef.getEntityDefs()) { - if (atlasClientV2.typeWithNameExists(entityDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", entityDef.getName()); - } else { - toCreate.getEntityDefs().add(entityDef); - } - } - - for (AtlasClassificationDef classificationDef : typesDef.getClassificationDefs()) { - if (atlasClientV2.typeWithNameExists(classificationDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", classificationDef.getName()); - } else { - toCreate.getClassificationDefs().add(classificationDef); - } - } - - for (AtlasRelationshipDef relationshipDef : typesDef.getRelationshipDefs()) { - if (atlasClientV2.typeWithNameExists(relationshipDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", relationshipDef.getName()); - } else { - toCreate.getRelationshipDefs().add(relationshipDef); - } - } - - for (AtlasBusinessMetadataDef businessMetadataDef : typesDef.getBusinessMetadataDefs()) { - if (atlasClientV2.typeWithNameExists(businessMetadataDef.getName())) { - LOG.warn("Type with name {} already exists. Skipping", businessMetadataDef.getName()); - } else { - toCreate.getBusinessMetadataDefs().add(businessMetadataDef); - } - } - - atlasClientV2.createAtlasTypeDefs(toCreate); - } - - protected void createType(AtlasTypesDef typesDef) throws AtlasServiceException { - // Since the bulk create bails out on a single failure, this has to be done as a workaround - batchCreateTypes(typesDef); - } - - protected List createType(TypesDef typesDef) throws Exception { - List enumTypes = new ArrayList<>(); - List structTypes = new ArrayList<>(); - List traitTypes = new ArrayList<>(); - List classTypes = new ArrayList<>(); - - for (EnumTypeDefinition enumTypeDefinition : typesDef.getEnumTypes()) { - if (atlasClientV2.typeWithNameExists(enumTypeDefinition.getName())) { - LOG.warn("Type with name {} already exists. Skipping", enumTypeDefinition.getName()); - } else { - enumTypes.add(enumTypeDefinition); - } - } - - for (StructTypeDefinition structTypeDefinition : typesDef.getStructTypes()) { - if (atlasClientV2.typeWithNameExists(structTypeDefinition.getTypeName())) { - LOG.warn("Type with name {} already exists. Skipping", structTypeDefinition.getTypeName()); - } else { - structTypes.add(structTypeDefinition); - } - } - - for (TraitTypeDefinition hierarchicalTypeDefinition : typesDef.getTraitTypes()) { - if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.getTypeName())) { - LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.getTypeName()); - } else { - traitTypes.add(hierarchicalTypeDefinition); - } - } - - for (ClassTypeDefinition hierarchicalTypeDefinition : typesDef.getClassTypes()) { - if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.getTypeName())) { - LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.getTypeName()); - } else { - classTypes.add(hierarchicalTypeDefinition); - } - } - - TypesDef toCreate = new TypesDef(enumTypes, structTypes, traitTypes, classTypes); - - return atlasClientV1.createType(toCreate); - } - - protected List createType(String typesAsJSON) throws Exception { - return createType(AtlasType.fromV1Json(typesAsJSON, TypesDef.class)); - } - - protected Id createInstance(Referenceable referenceable) throws Exception { - String typeName = referenceable.getTypeName(); - - System.out.println("creating instance of type " + typeName); - - List guids = atlasClientV1.createEntity(referenceable); - - System.out.println("created instance for type " + typeName + ", guid: " + guids); - - // return the reference to created instance with guid - if (guids.size() > 0) { - return new Id(guids.get(guids.size() - 1), 0, referenceable.getTypeName()); - } - - return null; - } - - protected TypesDef getTypesDef(List enums, - List structs, - List traits, - List classes){ - enums = (enums != null) ? enums : Collections.emptyList(); - structs = (structs != null) ? structs : Collections.emptyList(); - traits = (traits != null) ? traits : Collections.emptyList(); - classes = (classes != null) ? classes : Collections.emptyList(); - - return new TypesDef(enums, structs, traits, classes); - } - - protected AtlasEntityHeader modifyEntity(AtlasEntity atlasEntity, boolean update) { - EntityMutationResponse entity = null; - - try { - if (!update) { - entity = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(atlasEntity)); - - assertNotNull(entity); - assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); - assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size() > 0); - - return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0); - } else { - entity = atlasClientV2.updateEntity(new AtlasEntityWithExtInfo(atlasEntity)); - - assertNotNull(entity); - assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); - assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0); - - return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0); - } - - } catch (AtlasServiceException e) { - LOG.error("Entity {} failed", update ? "update" : "creation", entity); - } - - return null; - } - - protected AtlasEntityHeader createEntity(AtlasEntity atlasEntity) { - return modifyEntity(atlasEntity, false); - } - - protected AtlasEntityHeader updateEntity(AtlasEntity atlasEntity) { - return modifyEntity(atlasEntity, true); - } - - protected static final String DATABASE_TYPE_V2 = "hive_db_v2"; - protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2"; - protected static final String COLUMN_TYPE_V2 = "hive_column_v2"; - protected static final String HIVE_PROCESS_TYPE_V2 = "hive_process_v2"; - - protected static final String DATABASE_TYPE = "hive_db_v1"; - protected static final String HIVE_TABLE_TYPE = "hive_table_v1"; - protected static final String COLUMN_TYPE = "hive_column_v1"; - protected static final String HIVE_PROCESS_TYPE = "hive_process_v1"; - - protected static final String DATABASE_TYPE_BUILTIN = "hive_db"; - protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table"; - protected static final String COLUMN_TYPE_BUILTIN = "hive_column"; - protected static final String HIVE_PROCESS_TYPE_BUILTIN = "hive_process"; - - protected void createTypeDefinitionsV1() throws Exception { - ClassTypeDefinition dbClsDef = TypesUtil - .createClassTypeDef(DATABASE_TYPE, null, null, - TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("locationUri", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - new AttributeDefinition("tables", AtlasBaseTypeDef.getArrayTypeName(HIVE_TABLE_TYPE), - Multiplicity.OPTIONAL, false, "db") - ); - - ClassTypeDefinition columnClsDef = TypesUtil - .createClassTypeDef(COLUMN_TYPE, null, null, attrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("dataType", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("comment", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType", null, - Arrays.asList(new AttributeDefinition[]{TypesUtil.createRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef("serde", AtlasBaseTypeDef.ATLAS_TYPE_STRING)})); - - EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),}; - - EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", null, null, Arrays.asList(values)); - - ClassTypeDefinition tblClsDef = TypesUtil - .createClassTypeDef(HIVE_TABLE_TYPE, null, Collections.singleton("DataSet"), - attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("lastAccessTime", AtlasBaseTypeDef.ATLAS_TYPE_DATE), - attrDef("temporary", AtlasBaseTypeDef.ATLAS_TYPE_BOOLEAN), - new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.OPTIONAL, true, "tables"), - new AttributeDefinition("columns", AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE), - Multiplicity.OPTIONAL, true, null), - new AttributeDefinition("tableType", "tableType", Multiplicity.OPTIONAL, false, null), - new AttributeDefinition("serde1", "serdeType", Multiplicity.OPTIONAL, false, null), - new AttributeDefinition("serde2", "serdeType", Multiplicity.OPTIONAL, false, null)); - - ClassTypeDefinition loadProcessClsDef = TypesUtil - .createClassTypeDef(HIVE_PROCESS_TYPE, null, Collections.singleton("Process"), - attrDef("userName", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - attrDef("startTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("endTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), - attrDef("queryText", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryPlan", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryId", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED), - attrDef("queryGraph", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED)); - - TraitTypeDefinition classificationTrait = TypesUtil - .createTraitTypeDef("classification", null, Collections.emptySet(), - TypesUtil.createRequiredAttrDef("tag", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - TraitTypeDefinition piiTrait = TypesUtil.createTraitTypeDef(PII_TAG, null, Collections.emptySet()); - TraitTypeDefinition phiTrait = TypesUtil.createTraitTypeDef(PHI_TAG, null, Collections.emptySet()); - TraitTypeDefinition pciTrait = TypesUtil.createTraitTypeDef(PCI_TAG, null, Collections.emptySet()); - TraitTypeDefinition soxTrait = TypesUtil.createTraitTypeDef(SOX_TAG, null, Collections.emptySet()); - TraitTypeDefinition secTrait = TypesUtil.createTraitTypeDef(SEC_TAG, null, Collections.emptySet()); - TraitTypeDefinition financeTrait = TypesUtil.createTraitTypeDef(FINANCE_TAG, null, Collections.emptySet()); - TraitTypeDefinition factTrait = TypesUtil.createTraitTypeDef("Fact" + randomString(), null, Collections.emptySet()); - TraitTypeDefinition etlTrait = TypesUtil.createTraitTypeDef("ETL" + randomString(), null, Collections.emptySet()); - TraitTypeDefinition dimensionTrait = TypesUtil.createTraitTypeDef("Dimension" + randomString(), null, Collections.emptySet()); - TraitTypeDefinition metricTrait = TypesUtil.createTraitTypeDef("Metric" + randomString(), null, Collections.emptySet()); - - createType(getTypesDef(Collections.singletonList(enumTypeDefinition), - Collections.singletonList(structTypeDefinition), - Arrays.asList(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait, factTrait, etlTrait, dimensionTrait, metricTrait), - Arrays.asList(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef))); - } - - protected void createTypeDefinitionsV2() throws Exception { - AtlasConstraintDef isCompositeSourceConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_OWNED_REF); - AtlasConstraintDef isCompositeTargetConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_INVERSE_REF, Collections.singletonMap(CONSTRAINT_PARAM_ATTRIBUTE, "randomTable")); - - AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef( - DATABASE_TYPE_V2, - null, - AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"), - AtlasTypeUtil.createRequiredAttrDef(DESCRIPTION, "string"), - AtlasTypeUtil.createOptionalAttrDef("locationUri", "string"), - AtlasTypeUtil.createOptionalAttrDef("owner", "string"), - AtlasTypeUtil.createOptionalAttrDef("createTime", "long"), - - //there is a serializ - new AtlasAttributeDef("randomTable", - AtlasBaseTypeDef.getArrayTypeName(HIVE_TABLE_TYPE_V2), - true, - Cardinality.SET, - 0, -1, false, true, false, Collections.singletonList(isCompositeSourceConstraint)) - ); - - AtlasEntityDef columnClsDef = AtlasTypeUtil - .createClassTypeDef(COLUMN_TYPE_V2, null, - AtlasTypeUtil.createOptionalAttrDef(NAME, "string"), - AtlasTypeUtil.createOptionalAttrDef("dataType", "string"), - AtlasTypeUtil.createOptionalAttrDef("comment", "string")); - - AtlasStructDef structTypeDef = AtlasTypeUtil.createStructTypeDef("serdeType", - AtlasTypeUtil.createRequiredAttrDef(NAME, "string"), - AtlasTypeUtil.createRequiredAttrDef("serde", "string") - ); - - AtlasEnumDef enumDef = new AtlasEnumDef("tableType", DESCRIPTION, Arrays.asList( - new AtlasEnumDef.AtlasEnumElementDef("MANAGED", null, 1), - new AtlasEnumDef.AtlasEnumElementDef("EXTERNAL", null, 2) - )); - - AtlasEntityDef tblClsDef = AtlasTypeUtil - .createClassTypeDef(HIVE_TABLE_TYPE_V2, - Collections.singleton("DataSet"), - AtlasTypeUtil.createOptionalAttrDef("createTime", "long"), - AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"), - AtlasTypeUtil.createOptionalAttrDef("temporary", "boolean"), - new AtlasAttributeDef("db", - DATABASE_TYPE_V2, - true, - Cardinality.SINGLE, - 0, 1, false, true, false, Collections.singletonList(isCompositeTargetConstraint)), - - //some tests don't set the columns field or set it to null... - AtlasTypeUtil.createOptionalAttrDef("columns", AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE_V2)), - AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"), - AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"), - AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType")); - - AtlasEntityDef loadProcessClsDef = AtlasTypeUtil - .createClassTypeDef(HIVE_PROCESS_TYPE_V2, - Collections.singleton("Process"), - AtlasTypeUtil.createOptionalAttrDef("userName", "string"), - AtlasTypeUtil.createOptionalAttrDef("startTime", "long"), - AtlasTypeUtil.createOptionalAttrDef("endTime", "long"), - AtlasTypeUtil.createRequiredAttrDef("queryText", "string"), - AtlasTypeUtil.createRequiredAttrDef("queryPlan", "string"), - AtlasTypeUtil.createRequiredAttrDef("queryId", "string"), - AtlasTypeUtil.createRequiredAttrDef("queryGraph", "string")); - - AtlasClassificationDef classificationTrait = AtlasTypeUtil - .createTraitTypeDef("classification", Collections.emptySet(), - AtlasTypeUtil.createRequiredAttrDef("tag", "string")); - AtlasClassificationDef piiTrait = AtlasTypeUtil.createTraitTypeDef(PII_TAG, Collections.emptySet()); - AtlasClassificationDef phiTrait = AtlasTypeUtil.createTraitTypeDef(PHI_TAG, Collections.emptySet()); - AtlasClassificationDef pciTrait = AtlasTypeUtil.createTraitTypeDef(PCI_TAG, Collections.emptySet()); - AtlasClassificationDef soxTrait = AtlasTypeUtil.createTraitTypeDef(SOX_TAG, Collections.emptySet()); - AtlasClassificationDef secTrait = AtlasTypeUtil.createTraitTypeDef(SEC_TAG, Collections.emptySet()); - AtlasClassificationDef financeTrait = AtlasTypeUtil.createTraitTypeDef(FINANCE_TAG, Collections.emptySet()); - - //bussinessmetadata - String _description = "_description"; - Map options = new HashMap<>(); - options.put("maxStrLength", "20"); - AtlasBusinessMetadataDef bmNoApplicableTypes = createBusinessMetadataDef("bmNoApplicableTypes", _description, "1.0", - createOptionalAttrDef("attr0", "string", options, _description)); - - - AtlasBusinessMetadataDef bmNoAttributes = createBusinessMetadataDef("bmNoAttributes", _description, "1.0", null); - - options.put("applicableEntityTypes", "[\"" + DATABASE_TYPE_V2 + "\",\"" + HIVE_TABLE_TYPE_V2 + "\"]"); - - AtlasBusinessMetadataDef bmWithAllTypes = createBusinessMetadataDef("bmWithAllTypes", _description, "1.0", - createOptionalAttrDef("attr1", AtlasBusinessMetadataDef.ATLAS_TYPE_BOOLEAN, options, _description), - createOptionalAttrDef("attr2", AtlasBusinessMetadataDef.ATLAS_TYPE_BYTE, options, _description), - createOptionalAttrDef("attr8", AtlasBusinessMetadataDef.ATLAS_TYPE_STRING, options, _description)); - - AtlasBusinessMetadataDef bmWithAllTypesMV = createBusinessMetadataDef("bmWithAllTypesMV", _description, "1.0", - createOptionalAttrDef("attr11", "array", options, _description), - createOptionalAttrDef("attr18", "array", options, _description)); - - AtlasTypesDef typesDef = new AtlasTypesDef(Collections.singletonList(enumDef), - Collections.singletonList(structTypeDef), - Arrays.asList(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait), - Arrays.asList(dbClsTypeDef, columnClsDef, tblClsDef, loadProcessClsDef), - new ArrayList<>(), - Arrays.asList(bmNoApplicableTypes, bmNoAttributes, bmWithAllTypes, bmWithAllTypesMV)); - batchCreateTypes(typesDef); - } - - AttributeDefinition attrDef(String name, String dT) { - return attrDef(name, dT, Multiplicity.OPTIONAL, false, null); - } - - AttributeDefinition attrDef(String name, String dT, Multiplicity m) { - return attrDef(name, dT, m, false, null); - } - - AttributeDefinition attrDef(String name, String dT, Multiplicity m, boolean isComposite, String reverseAttributeName) { - Preconditions.checkNotNull(name); - Preconditions.checkNotNull(dT); - - return new AttributeDefinition(name, dT, m, isComposite, reverseAttributeName); - } - - protected String randomString() { - //names cannot start with a digit - return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9); - } - - protected Referenceable createHiveTableInstanceBuiltIn(String dbName, String tableName, Id dbId) throws Exception { - Map values = new HashMap<>(); - values.put(NAME, dbName); - values.put(DESCRIPTION, "foo database"); - values.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - values.put("owner", "user1"); - values.put(CLUSTER_NAME, "cl1"); - values.put("parameters", Collections.EMPTY_MAP); - values.put("location", "/tmp"); - - Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values); - Referenceable tableInstance = new Referenceable(HIVE_TABLE_TYPE_BUILTIN, CLASSIFICATION, PII_TAG, PHI_TAG, PCI_TAG, SOX_TAG, SEC_TAG, FINANCE_TAG); - tableInstance.set(NAME, tableName); - tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - tableInstance.set("db", databaseInstance); - tableInstance.set(DESCRIPTION, "bar table"); - tableInstance.set("lastAccessTime", "2014-07-11T08:00:00.000Z"); - tableInstance.set("type", "managed"); - tableInstance.set("level", 2); - tableInstance.set("tableType", 1); // enum - tableInstance.set("compressed", false); - - Struct traitInstance = (Struct) tableInstance.getTrait("classification"); - traitInstance.set("tag", "foundation_etl"); - - Struct serde1Instance = new Struct("serdeType"); - serde1Instance.set(NAME, "serde1"); - serde1Instance.set("serde", "serde1"); - tableInstance.set("serde1", serde1Instance); - - Struct serde2Instance = new Struct("serdeType"); - serde2Instance.set(NAME, "serde2"); - serde2Instance.set("serde", "serde2"); - tableInstance.set("serde2", serde2Instance); - - List traits = tableInstance.getTraitNames(); - Assert.assertEquals(traits.size(), 7); - - return tableInstance; - } - - protected AtlasEntity createHiveTableInstanceV2(AtlasEntity databaseInstance, String tableName) throws Exception { - AtlasEntity tableInstance = new AtlasEntity(HIVE_TABLE_TYPE_V2); - tableInstance.setClassifications( - Arrays.asList(new AtlasClassification(CLASSIFICATION), - new AtlasClassification(PII_TAG), - new AtlasClassification(PHI_TAG), - new AtlasClassification(PCI_TAG), - new AtlasClassification(SOX_TAG), - new AtlasClassification(SEC_TAG), - new AtlasClassification(FINANCE_TAG)) - ); - - tableInstance.setAttribute(NAME, tableName); - tableInstance.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - tableInstance.setAttribute("db", AtlasTypeUtil.getAtlasObjectId(databaseInstance)); - tableInstance.setAttribute(DESCRIPTION, "bar table"); - tableInstance.setAttribute("lastAccessTime", "2014-07-11T08:00:00.000Z"); - tableInstance.setAttribute("type", "managed"); - tableInstance.setAttribute("level", 2); - tableInstance.setAttribute("tableType", "MANAGED"); // enum - tableInstance.setAttribute("compressed", false); - - AtlasClassification classification = tableInstance.getClassifications().get(0); - classification.setAttribute("tag", "foundation_etl"); - - AtlasStruct serde1Instance = new AtlasStruct("serdeType"); - serde1Instance.setAttribute(NAME, "serde1"); - serde1Instance.setAttribute("serde", "serde1"); - tableInstance.setAttribute("serde1", serde1Instance); - - AtlasStruct serde2Instance = new AtlasStruct("serdeType"); - serde2Instance.setAttribute(NAME, "serde2"); - serde2Instance.setAttribute("serde", "serde2"); - tableInstance.setAttribute("serde2", serde2Instance); - - List traits = tableInstance.getClassifications(); - Assert.assertEquals(traits.size(), 7); - - return tableInstance; - } - - protected Referenceable createHiveDBInstanceBuiltIn(String dbName) { - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN); - - databaseInstance.set(NAME, dbName); - databaseInstance.set(QUALIFIED_NAME, dbName); - databaseInstance.set(CLUSTER_NAME, randomString()); - databaseInstance.set(DESCRIPTION, "foo database"); - - return databaseInstance; - } - - protected Referenceable createHiveDBInstanceV1(String dbName) { - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); - - databaseInstance.set(NAME, dbName); - databaseInstance.set(DESCRIPTION, "foo database"); - databaseInstance.set(CLUSTER_NAME, "fooCluster"); - - return databaseInstance; - } - - protected AtlasEntity createHiveDBInstanceV2(String dbName) { - AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE_V2); - - atlasEntity.setAttribute(NAME, dbName); - atlasEntity.setAttribute(DESCRIPTION, "foo database"); - atlasEntity.setAttribute(CLUSTER_NAME, "fooCluster"); - atlasEntity.setAttribute("owner", "user1"); - atlasEntity.setAttribute("locationUri", "/tmp"); - atlasEntity.setAttribute("createTime",1000); - - return atlasEntity; - } - - protected AtlasEntity createEntity(String typeName, String name) { - AtlasEntity atlasEntity = new AtlasEntity(typeName); - - atlasEntity.setAttribute("name", name); - atlasEntity.setAttribute("qualifiedName", name); - atlasEntity.setAttribute("clusterName", randomString()); - - return atlasEntity; - } - - public interface Predicate { - - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - boolean evaluate() throws Exception; - } - - public interface NotificationPredicate { - - /** - * Perform a predicate evaluation. - * - * @return the boolean result of the evaluation. - * @throws Exception thrown if the predicate evaluation could not evaluate. - */ - boolean evaluate(EntityNotificationV1 notification) throws Exception; - } - - /** - * Wait for a condition, expressed via a {@link Predicate} to become true. - * - * @param timeout maximum time in milliseconds to wait for the predicate to become true. - * @param predicate predicate waiting on. - */ - protected void waitFor(int timeout, Predicate predicate) throws Exception { - ParamChecker.notNull(predicate, "predicate"); - - long mustEnd = System.currentTimeMillis() + timeout; - boolean eval; - - while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) { - LOG.info("Waiting up to {} msec", mustEnd - System.currentTimeMillis()); - - Thread.sleep(100); - } - - if (!eval) { - throw new Exception("Waiting timed out after " + timeout + " msec"); - } - } - - protected EntityNotificationV1 waitForNotification(final NotificationConsumer consumer, int maxWait, - final NotificationPredicate predicate) throws Exception { - final TypesUtil.Pair pair = TypesUtil.Pair.of(null, null); - final long maxCurrentTime = System.currentTimeMillis() + maxWait; - - waitFor(maxWait, new Predicate() { - @Override - public boolean evaluate() throws Exception { - try { - while (System.currentTimeMillis() < maxCurrentTime) { - List> messageList = consumer.receive(); - - if(messageList.size() > 0) { - EntityNotificationV1 notification = messageList.get(0).getMessage(); - - if (predicate.evaluate(notification)) { - pair.left = notification; - - return true; - } - } else { - LOG.info( System.currentTimeMillis()+ " messageList no records" +maxCurrentTime ); - } - } - } catch(Exception e) { - LOG.error(" waitForNotification", e); - //ignore - } - - return false; - } - }); - - return pair.left; - } - - protected NotificationPredicate newNotificationPredicate(final EntityNotificationV1.OperationType operationType, - final String typeName, final String guid) { - return new NotificationPredicate() { - @Override - public boolean evaluate(EntityNotificationV1 notification) throws Exception { - return notification != null && - notification.getOperationType() == operationType && - notification.getEntity().getTypeName().equals(typeName) && - notification.getEntity().getId()._getId().equals(guid); - } - }; - } - - protected ArrayNode searchByDSL(String dslQuery) throws AtlasServiceException { - return atlasClientV1.searchByDSL(dslQuery, 10, 0); - } - - protected void initNotificationService() throws Exception { - Configuration applicationProperties = ApplicationProperties.get(); - - applicationProperties.setProperty("atlas.kafka.data", "target/" + RandomStringUtils.randomAlphanumeric(5)); - - kafkaNotification = new KafkaNotification(applicationProperties); - notificationInterface = kafkaNotification; - - kafkaNotification.start(); - Thread.sleep(2000); - } - - protected void cleanUpNotificationService() { - if (kafkaNotification != null) { - kafkaNotification.close(); - kafkaNotification.stop(); - } - - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java deleted file mode 100644 index 1d8dfd93d8..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java +++ /dev/null @@ -1,330 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.web.integration; - - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.discovery.AtlasQuickSearchResult; -import org.apache.atlas.model.discovery.AtlasSearchResult; -import org.apache.atlas.model.discovery.QuickSearchParameters; -import org.apache.atlas.model.discovery.SearchParameters; -import org.apache.atlas.model.impexp.AtlasImportRequest; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.profile.AtlasUserSavedSearch; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.TestResourceFileUtils; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import java.io.IOException; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; - -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; -import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.fail; - -public class BasicSearchIT extends BaseResourceIT { - private AtlasUserSavedSearch userSavedSearch; - - @BeforeClass - @Override - public void setUp() throws Exception { - super.setUp(); - - String smallDatasetFileName = "hive-db-50-tables.zip"; - atlasClientV2.importData(new AtlasImportRequest(), TestResourceFileUtils.getTestFilePath(smallDatasetFileName)); - - // Create a entity with name/qualified name having special characters - - // Create a test tag - if (!atlasClientV2.typeWithNameExists("fooTag")) { - AtlasClassificationDef testClassificationDef = AtlasTypeUtil.createTraitTypeDef("fooTag", "Test tag", "1.0", Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(testClassificationDef); - atlasClientV2.createAtlasTypeDefs(typesDef); - } - - try { - atlasClientV2.getEntityByAttribute("hdfs_path", new HashMap() {{ - put("qualifiedName", URLEncoder.encode("test$1test+ - && || ! ( ) { } [ ] ^ < > ; : \" % * ` ~", "UTF-8")); - }}); - } catch (AtlasServiceException e) { - AtlasEntity hdfsEntity = new AtlasEntity("hdfs_path"); - hdfsEntity.setGuid("-1"); - hdfsEntity.setAttribute("description", "1test+ - && || ! ( ) { } [ ] ^ < > ; : \" % * ` ~"); - hdfsEntity.setAttribute("name", "1test+ - && || ! ( ) { } [ ] ^ < > ; : \" % * ` ~"); - hdfsEntity.setAttribute("owner", "test"); - hdfsEntity.setAttribute("qualifiedName", "test$1test+ - && || ! ( ) { } [ ] ^ < > ; : \" % * ` ~"); - hdfsEntity.setAttribute("path", "/test/foo"); - - hdfsEntity.setClassifications(new ArrayList()); - hdfsEntity.getClassifications().add(new AtlasClassification("fooTag")); - - EntityMutationResponse entityMutationResponse = atlasClientV2.createEntity(new AtlasEntity.AtlasEntityWithExtInfo(hdfsEntity)); - if (entityMutationResponse.getCreatedEntities() != null) { - assertEquals(entityMutationResponse.getCreatedEntities().size(), 1); - } else if (entityMutationResponse.getUpdatedEntities() != null) { - assertEquals(entityMutationResponse.getUpdatedEntities().size(), 1); - } else { - fail("Entity should've been created or updated"); - } - } - - // Add a 5s mandatory sleep for allowing index to catch up - try { - Thread.sleep(5000); - } catch (InterruptedException e) { - LOG.error("Sleep was interrupted. The search results might be inconsistent."); - } - } - - @DataProvider - public Object[][] basicSearchJSONNames() { - return new String[][]{ - {"search-parameters/entity-filters"}, - {"search-parameters/tag-filters"}, - {"search-parameters/combination-filters"} - }; - } - - @DataProvider - public Object[][] attributeSearchJSONNames() { - return new String[][]{ - {"search-parameters/attribute-filters"} - }; - } - - @Test(dataProvider = "basicSearchJSONNames") - public void testDiscoveryWithSearchParameters(String jsonFile) { - try { - BasicSearchParametersWithExpectation[] testExpectations = - TestResourceFileUtils.readObjectFromJson(jsonFile, BasicSearchParametersWithExpectation[].class); - assertNotNull(testExpectations); - - for (BasicSearchParametersWithExpectation testExpectation : testExpectations) { - LOG.info("TestDescription :{}", testExpectation.testDescription); - LOG.info("SearchParameters :{}", testExpectation.searchParameters); - - AtlasSearchResult searchResult = atlasClientV2.facetedSearch(testExpectation.searchParameters); - if (testExpectation.expectedCount > 0) { - assertNotNull(searchResult.getEntities()); - assertEquals(searchResult.getEntities().size(), testExpectation.expectedCount); - } - - if (testExpectation.searchParameters.getSortBy() != null && !testExpectation.searchParameters.getSortBy().isEmpty()) { - assertNotNull(searchResult.getEntities()); - assertEquals(searchResult.getEntities().get(0).getAttribute("name"), - "testtable_3"); - } - } - } catch (IOException | AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dataProvider = "attributeSearchJSONNames") - public void testAttributeSearch(String jsonFile) { - try { - BasicSearchParametersWithExpectation[] testExpectations = - TestResourceFileUtils.readObjectFromJson(jsonFile, BasicSearchParametersWithExpectation[].class); - assertNotNull(testExpectations); - - for (BasicSearchParametersWithExpectation testExpectation : testExpectations) { - LOG.info("TestDescription :{}", testExpectation.testDescription); - LOG.info("SearchParameters :{}", testExpectation.searchParameters); - SearchParameters parameters = testExpectation.getSearchParameters(); - - if (parameters.getEntityFilters() == null || parameters.getEntityFilters().getAttributeName() == null) { - continue; - } - SearchParameters.FilterCriteria filterCriteria = parameters.getEntityFilters(); - AtlasSearchResult searchResult = atlasClientV2.attributeSearch(parameters.getTypeName(), filterCriteria.getAttributeName(), filterCriteria.getAttributeValue(), parameters.getLimit(), parameters.getOffset()); - if (testExpectation.expectedCount > 0) { - assertNotNull(searchResult.getEntities()); - assertEquals(searchResult.getEntities().size(), testExpectation.expectedCount); - } - - if (testExpectation.searchParameters.getSortBy() != null && !testExpectation.searchParameters.getSortBy().isEmpty()) { - assertNotNull(searchResult.getEntities()); - assertEquals(searchResult.getEntities().get(0).getAttribute("name"), - "testtable_1"); - } - } - } catch (IOException | AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dataProvider = "attributeSearchJSONNames") - public void testSavedSearch(String jsonFile) { - try { - BasicSearchParametersWithExpectation[] testExpectations = - TestResourceFileUtils.readObjectFromJson(jsonFile, BasicSearchParametersWithExpectation[].class); - assertNotNull(testExpectations); - - for (BasicSearchParametersWithExpectation testExpectation : testExpectations) { - LOG.info("TestDescription :{}", testExpectation.testDescription); - LOG.info("SearchParameters :{}", testExpectation.searchParameters); - SearchParameters parameters = testExpectation.getSearchParameters(); - - AtlasUserSavedSearch savedSearch = new AtlasUserSavedSearch(); - savedSearch.setSearchType(AtlasUserSavedSearch.SavedSearchType.BASIC); - savedSearch.setName("basic_test"); - savedSearch.setGuid(""); - savedSearch.setSearchParameters(parameters); - savedSearch.setOwnerName("admin"); - - userSavedSearch = atlasClientV2.addSavedSearch(savedSearch); - assertNotNull(userSavedSearch); - List list = atlasClientV2.getSavedSearches("admin"); - assertNotNull(list); - } - } catch (IOException | AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dependsOnMethods = "testSavedSearch") - public void testExecuteSavedSearchByName() { - try { - AtlasSearchResult searchResult = atlasClientV2.executeSavedSearch("admin", "basic_test"); - assertNotNull(searchResult); - } catch (AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dependsOnMethods = "testSavedSearch") - public void tesUpdateSavedSearch() { - try { - userSavedSearch.setSearchType(AtlasUserSavedSearch.SavedSearchType.ADVANCED); - userSavedSearch = atlasClientV2.updateSavedSearch(userSavedSearch); - assertNotNull(userSavedSearch); - assertEquals(userSavedSearch.getSearchType(), AtlasUserSavedSearch.SavedSearchType.ADVANCED); - } catch (AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dependsOnMethods = "tesUpdateSavedSearch") - public void testExecuteSavedSearchByGuid() { - try { - AtlasSearchResult searchResult = atlasClientV2.executeSavedSearch(userSavedSearch.getGuid()); - assertNotNull(searchResult); - } catch (AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test(dependsOnMethods = "testExecuteSavedSearchByGuid") - public void testDeleteSavedSearch() { - AtlasUserSavedSearch searchAfterDelete = null; - try { - atlasClientV2.deleteSavedSearch(userSavedSearch.getGuid()); - searchAfterDelete = atlasClientV2.getSavedSearch("admin", "basic_test"); - } catch (AtlasServiceException e) { - assertNull(searchAfterDelete); - } - } - - @Test - public void testGetQuickSearch() { - try { - AtlasQuickSearchResult result = atlasClientV2.quickSearch("test", "hdfs_path", false, 2, 0); - assertNotNull(result); - List list = result.getSearchResults().getEntities(); - assertEquals(list.size(), 1); - } catch (AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @Test - public void testPostQuickSearch() { - try { - QuickSearchParameters quickSearchParameters = new QuickSearchParameters(); - quickSearchParameters.setQuery("test"); - quickSearchParameters.setTypeName("hdfs_path"); - AtlasQuickSearchResult result = atlasClientV2.quickSearch(quickSearchParameters); - List list = result.getSearchResults().getEntities(); - assertEquals(list.size(), 1); - } catch (AtlasServiceException e) { - fail(e.getMessage()); - } - } - - @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) - @JsonIgnoreProperties(ignoreUnknown = true) - private static class BasicSearchParametersWithExpectation { - private String testDescription; - private SearchParameters searchParameters; - private int expectedCount; - - public BasicSearchParametersWithExpectation() { - } - - public BasicSearchParametersWithExpectation(final String testDescription, final SearchParameters searchParameters, final int expectedCount) { - this.testDescription = testDescription; - this.searchParameters = searchParameters; - this.expectedCount = expectedCount; - } - - public SearchParameters getSearchParameters() { - return searchParameters; - } - - public void setSearchParameters(final SearchParameters searchParameters) { - this.searchParameters = searchParameters; - } - - public int getExpectedCount() { - return expectedCount; - } - - public void setExpectedCount(final int expectedCount) { - this.expectedCount = expectedCount; - } - - public String getTestDescription() { - return testDescription; - } - - public void setTestDescription(final String testDescription) { - this.testDescription = testDescription; - } - } -} \ No newline at end of file diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java deleted file mode 100644 index 2406f12be9..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java +++ /dev/null @@ -1,289 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.instance.Struct; -import org.apache.atlas.v1.model.typedef.TraitTypeDefinition; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.testng.Assert.assertEquals; - -/** - * Hive Lineage Integration Tests. - */ -public class DataSetLineageJerseyResourceIT extends BaseResourceIT { - - private String salesFactTable; - private String salesMonthlyTable; - private String salesDBName; - private static String FACT = "Fact_Tag"; - private static String ETL = "ETL_Tag"; - private static String DIMENSION = "Dimension_Tag"; - private static String METRIC = "Metric_Tag"; - private static String PII = "pii_Tag"; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - createTypeDefinitionsV1(); - setupInstances(); - } - - @Test - public void testInputsGraph() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId(); - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LINEAGE_INPUTS_GRAPH, null, tableId, "/inputs/graph"); - - Assert.assertNotNull(response); - System.out.println("inputs graph = " + response); - - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - JsonNode results = response.get(AtlasClient.RESULTS); - Assert.assertNotNull(results); - - Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class); - Map vertices = (Map) resultsInstance.get("vertices"); - Assert.assertEquals(vertices.size(), 4); - - Map edges = (Map) resultsInstance.get("edges"); - Assert.assertEquals(edges.size(), 4); - } - - @Test - public void testInputsGraphForEntity() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - salesMonthlyTable).getId()._getId(); - ObjectNode results = atlasClientV1.getInputGraphForEntity(tableId); - Assert.assertNotNull(results); - - Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class); - resultsInstance.normalize(); - - Map vertices = (Map) resultsInstance.get("vertices"); - Assert.assertEquals(vertices.size(), 4); - - Object verticesObject = vertices.get(tableId); - Struct vertex = null; - - if (verticesObject instanceof Map) { - vertex = new Struct ((Map)verticesObject); - } else if (verticesObject instanceof Struct) { - vertex = (Struct)verticesObject; - } - - assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.ACTIVE.name()); - - Map edges = (Map) resultsInstance.get("edges"); - Assert.assertEquals(edges.size(), 4); - } - - @Test - public void testOutputsGraph() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId(); - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LINEAGE_INPUTS_GRAPH, null, tableId, "/outputs/graph"); - - Assert.assertNotNull(response); - System.out.println("outputs graph= " + response); - - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - JsonNode results = response.get(AtlasClient.RESULTS); - Assert.assertNotNull(results); - - Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class); - Map vertices = (Map) resultsInstance.get("vertices"); - Assert.assertEquals(vertices.size(), 3); - - Map edges = (Map) resultsInstance.get("edges"); - Assert.assertEquals(edges.size(), 4); - } - - @Test - public void testOutputsGraphForEntity() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId(); - ObjectNode results = atlasClientV1.getOutputGraphForEntity(tableId); - Assert.assertNotNull(results); - - Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class); - Map vertices = (Map) resultsInstance.get("vertices"); - Assert.assertEquals(vertices.size(), 3); - - Object verticesObject = vertices.get(tableId); - Struct vertex = null; - - if (verticesObject instanceof Map) { - vertex = new Struct ((Map)verticesObject); - } else if (verticesObject instanceof Struct) { - vertex = (Struct)verticesObject; - } - - assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.ACTIVE.name()); - - Map edges = (Map) resultsInstance.get("edges"); - Assert.assertEquals(edges.size(), 4); - } - - @Test - public void testSchema() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId(); - ObjectNode response = atlasClientV1.getSchemaForEntity(tableId); - } - - @Test - public void testSchemaForEntity() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId(); - ObjectNode results = atlasClientV1.getSchemaForEntity(tableId); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testSchemaForInvalidTable() throws Exception { - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.NAME_LINEAGE_SCHEMA, null, "blah", "schema"); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testSchemaForDB() throws Exception { - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.NAME_LINEAGE_SCHEMA, null, salesDBName, "schema"); - } - - private void setupInstances() throws Exception { - TraitTypeDefinition factTrait = TypesUtil.createTraitTypeDef(FACT, null, Collections.emptySet()); - TraitTypeDefinition etlTrait = TypesUtil.createTraitTypeDef(ETL, null, Collections.emptySet()); - TraitTypeDefinition dimensionTrait = TypesUtil.createTraitTypeDef(DIMENSION, null, Collections.emptySet()); - TraitTypeDefinition metricTrait = TypesUtil.createTraitTypeDef(METRIC, null, Collections.emptySet()); - - createType(getTypesDef(null, null, Arrays.asList(factTrait, etlTrait, dimensionTrait, metricTrait), null)); - - salesDBName = "Sales" + randomString(); - Id salesDB = database(salesDBName, "Sales Database", "John ETL", - "hdfs://host:8000/apps/warehouse/sales"); - - List salesFactColumns = Arrays.asList(column("time_id", "int", "time id"), column("product_id", "int", "product id"), - column("customer_id", "int", "customer id", PII), - column("sales", "double", "product id", METRIC)); - - salesFactTable = "sales_fact" + randomString(); - Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns, FACT); - - List timeDimColumns = Arrays.asList(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"), - column("weekDay", "int", "week Day")); - - Id timeDim = - table("time_dim" + randomString(), "time dimension table", salesDB, "John Doe", "EXTERNAL", - timeDimColumns, DIMENSION); - - Id reportingDB = - database("Reporting" + randomString(), "reporting database", "Jane BI", - "hdfs://host:8000/apps/warehouse/reporting"); - - Id salesFactDaily = - table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB, - "Joe BI", "MANAGED", salesFactColumns, METRIC); - - loadProcess("loadSalesDaily" + randomString(), "John ETL", Arrays.asList(salesFact, timeDim), - Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph", ETL); - - salesMonthlyTable = "sales_fact_monthly_mv" + randomString(); - Id salesFactMonthly = - table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI", - "MANAGED", salesFactColumns, METRIC); - - loadProcess("loadSalesMonthly" + randomString(), "John ETL", Collections.singletonList(salesFactDaily), - Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph", ETL); - } - - Id database(String name, String description, String owner, String locationUri, String... traitNames) - throws Exception { - Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames); - referenceable.set(NAME, name); - referenceable.set(QUALIFIED_NAME, name); - referenceable.set(CLUSTER_NAME, locationUri + name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("locationUri", locationUri); - referenceable.set("createTime", System.currentTimeMillis()); - - return createInstance(referenceable); - } - - Referenceable column(String name, String type, String comment, String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames); - referenceable.set(NAME, name); - referenceable.set(QUALIFIED_NAME, name); - referenceable.set("type", type); - referenceable.set("comment", comment); - - return referenceable; - } - - Id table(String name, String description, Id dbId, String owner, String tableType, List columns, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("tableType", tableType); - referenceable.set("createTime", System.currentTimeMillis()); - referenceable.set("lastAccessTime", System.currentTimeMillis()); - referenceable.set("retention", System.currentTimeMillis()); - - referenceable.set("db", dbId); - referenceable.set("columns", columns); - - return createInstance(referenceable); - } - - Id loadProcess(String name, String user, List inputTables, List outputTables, String queryText, - String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name); - referenceable.set("userName", user); - referenceable.set("startTime", System.currentTimeMillis()); - referenceable.set("endTime", System.currentTimeMillis() + 10000); - - referenceable.set("inputs", inputTables); - referenceable.set("outputs", outputTables); - - referenceable.set("operationType", "testOperation"); - referenceable.set("queryText", queryText); - referenceable.set("queryPlan", queryPlan); - referenceable.set("queryId", queryId); - referenceable.set("queryGraph", queryGraph); - - return createInstance(referenceable); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/DebugMetricsIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/DebugMetricsIT.java deleted file mode 100644 index 3c614d480c..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/DebugMetricsIT.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.core.type.TypeReference; -import org.apache.atlas.AtlasBaseClient; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.utils.AtlasJson; -import org.apache.atlas.web.model.DebugMetrics; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.ws.rs.HttpMethod; -import javax.ws.rs.core.Response; -import java.util.HashMap; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.fail; - -public class DebugMetricsIT extends BaseResourceIT { - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - } - - @Test - public void checkMetricCountIncrement() { - - // Get the metrics - AtlasBaseClient.API metricsAPI = new AtlasBaseClient.API(AtlasBaseClient.BASE_URI + "admin/debug/metrics", HttpMethod.GET, Response.Status.OK); - try { - String metricsJson = atlasClientV1.callAPI(metricsAPI, String.class, null); - HashMap currentMetrics = AtlasJson.fromJson(metricsJson, new TypeReference>() {}); - DebugMetrics currentCreateOrUpdateDTO = currentMetrics.get("EntityREST_createOrUpdate"); - long currentCreateOrUpdateCount = 0; - if(currentCreateOrUpdateDTO != null) { - currentCreateOrUpdateCount = currentCreateOrUpdateDTO.getNumops(); - } - - // hit the api - AtlasEntity atlasEntity = createEntity(DATABASE_TYPE_BUILTIN, randomString()); - atlasClientV2.createEntity(new AtlasEntity.AtlasEntityWithExtInfo(atlasEntity)); - - atlasEntity = createEntity(DATABASE_TYPE_BUILTIN, randomString()); - atlasClientV2.createEntity(new AtlasEntity.AtlasEntityWithExtInfo(atlasEntity)); - - // get the metrics again - Thread.sleep(30000); // The metrics take some time to update - metricsJson = atlasClientV1.callAPI(metricsAPI, String.class, null); - HashMap newMetrics = AtlasJson.fromJson(metricsJson, new TypeReference>() {}); - DebugMetrics newCreateOrUpdateDTO = newMetrics.get("EntityREST_createOrUpdate"); - - // check if the metric count has increased - long newCreateOrUpdateCount = 0; - if(newCreateOrUpdateDTO != null) { - newCreateOrUpdateCount = newCreateOrUpdateDTO.getNumops(); - } - assertEquals(newCreateOrUpdateCount, (currentCreateOrUpdateCount + 2), "Count didn't increase after making API call"); - } catch (Exception e) { - fail("Caught exception while running the test: " + e.getMessage(), e); - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java deleted file mode 100755 index 8ece444f8c..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java +++ /dev/null @@ -1,1158 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.model.legacy.EntityResult; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.apache.atlas.v1.model.instance.Struct; -import org.apache.atlas.v1.model.typedef.*; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.commons.lang.StringUtils; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.*; - -import static com.sun.jersey.api.client.ClientResponse.Status.BAD_REQUEST; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - - -/** - * Integration tests for Entity Jersey Resource. - */ -public class EntityJerseyResourceIT extends BaseResourceIT { - - private static final Logger LOG = LoggerFactory.getLogger(EntityJerseyResourceIT.class); - - private static final String TRAITS = "traits"; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - createTypeDefinitionsV1(); - - } - - @Test - public void testCreateNestedEntities() throws Exception { - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE); - databaseInstance.set("name", "db_"+ randomString()); - databaseInstance.set("description", "foo database"); - - int nTables = 5; - int colsPerTable=3; - List tables = new ArrayList<>(); - List allColumns = new ArrayList<>(); - - for(int i = 0; i < nTables; i++) { - String tableName = "db1-table-" + i + randomString(); - - Referenceable tableInstance = new Referenceable(HIVE_TABLE_TYPE); - tableInstance.set("name", tableName); - tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - tableInstance.set("db", databaseInstance); - tableInstance.set("description", tableName + " table"); - tables.add(tableInstance); - - List columns = new ArrayList<>(); - for(int j = 0; j < colsPerTable; j++) { - Referenceable columnInstance = new Referenceable(COLUMN_TYPE); - columnInstance.set("name", tableName + "-col-" + j + randomString()); - columnInstance.set("dataType", "String"); - columnInstance.set("comment", "column " + j + " for table " + i); - allColumns.add(columnInstance); - columns.add(columnInstance); - } - tableInstance.set("columns", columns); - } - - //Create the tables. The database and columns should be created automatically, since - //the tables reference them. - List entityGUIDs = atlasClientV1.createEntity(tables); - assertNotNull(entityGUIDs); - assertEquals(entityGUIDs.size(), nTables * (colsPerTable + 1) + 1); - } - - - @Test - public void testSubmitEntity() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable referenceable = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(referenceable); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - } - - @Test - public void testRequestUser() throws Exception { - Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - entity.set("name", dbName); - entity.set(QUALIFIED_NAME, dbName); - entity.set("clusterName", randomString()); - entity.set("description", randomString()); - entity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - entity.set("owner", "user1"); - entity.set("clusterName", "cl1"); - entity.set("parameters", Collections.EMPTY_MAP); - entity.set("location", "/tmp"); - - - String user = "admin"; - AtlasClient localClient = null; - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - localClient = new AtlasClient(atlasUrls, new String[]{"admin", "admin"}); - } else { - localClient = new AtlasClient(atlasUrls); - } - String entityId = localClient.createEntity(entity).get(0); - - List events = atlasClientV1.getEntityAuditEvents(entityId, (short) 10); - assertEquals(events.size(), 1); - assertEquals(events.get(0).getUser(), user); - } - - @Test - //API should accept single entity (or jsonarray of entities) - public void testSubmitSingleEntity() throws Exception { - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - databaseInstance.set("name", dbName); - databaseInstance.set(QUALIFIED_NAME, dbName); - databaseInstance.set("clusterName", randomString()); - databaseInstance.set("description", randomString()); - databaseInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - databaseInstance.set("owner", "user1"); - databaseInstance.set("clusterName", "cl1"); - databaseInstance.set("parameters", Collections.EMPTY_MAP); - databaseInstance.set("location", "/tmp"); - - ObjectNode response = atlasClientV1.callAPIWithBody(AtlasClient.API_V1.CREATE_ENTITY, AtlasType.toV1Json(databaseInstance)); - assertNotNull(response); - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - EntityResult entityResult = EntityResult.fromString(response.toString()); - assertEquals(entityResult.getCreatedEntities().size(), 1); - assertNotNull(entityResult.getCreatedEntities().get(0)); - } - - @Test - public void testEntityDeduping() throws Exception { - final String dbName = "db" + randomString(); - Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbIdReference = createInstance(HiveDBInstance); - final String dbId = dbIdReference._getId(); - - assertEntityAudit(dbId, EntityAuditEvent.EntityAuditAction.ENTITY_CREATE); - -// Disabling DSL tests until v2 DSL implementation is ready - -// JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName)); -// assertEquals(results.length(), 1); -// -// //create entity again shouldn't create another instance with same unique attribute value -// List entityResults = atlasClientV1.createEntity(HiveDBInstance); -// assertEquals(entityResults.size(), 0); -// -// results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName)); -// assertEquals(results.length(), 1); -// -// //Test the same across references -// Referenceable table = new Referenceable(HIVE_TABLE_TYPE_BUILTIN); -// final String tableName = randomString(); -// Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbIdReference); -// atlasClientV1.createEntity(tableInstance); -// results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName)); -// assertEquals(results.length(), 1); - } - - private void assertEntityAudit(String dbid, EntityAuditEvent.EntityAuditAction auditAction) throws Exception { - List events = atlasClientV1.getEntityAuditEvents(dbid, (short) 100); - - for (EntityAuditEvent event : events) { - if (event.getAction() == auditAction) { - return; - } - } - fail("Expected audit event with action = " + auditAction); - } - - @Test - public void testEntityDefinitionAcrossTypeUpdate() throws Exception { - //create type - ClassTypeDefinition typeDefinition = TypesUtil - .createClassTypeDef(randomString(), null, Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(typeDefinition)); - - atlasClientV1.createType(AtlasType.toV1Json(typesDef)); - - //create entity for the type - Referenceable instance = new Referenceable(typeDefinition.getTypeName()); - instance.set("name", randomString()); - String guid = atlasClientV1.createEntity(instance).get(0); - - //update type - add attribute - typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.getTypeName(), null, Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createOptionalAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - TypesDef typeDef = new TypesDef(Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), - Arrays.asList(typeDefinition)); - atlasClientV1.updateType(typeDef); - - //Get definition after type update - new attributes should be null - Referenceable entity = atlasClientV1.getEntity(guid); - Assert.assertNull(entity.get("description")); - Assert.assertEquals(entity.get("name"), instance.get("name")); - } - - @DataProvider - public Object[][] invalidAttrValues() { - return new Object[][]{{null}, {""}}; - } - - @Test(dataProvider = "invalidAttrValues") - public void testEntityInvalidValue(String value) throws Exception { - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN); - databaseInstance.set("name", randomString()); - databaseInstance.set("description", value); - - try { - createInstance(databaseInstance); - Assert.fail("Expected AtlasServiceException"); - } catch (AtlasServiceException e) { - Assert.assertEquals(e.getStatus(), BAD_REQUEST); - } - } - - @Test - public void testGetEntityByAttribute() throws Exception { - Referenceable db1 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - String qualifiedName = dbName + "@cl1"; - db1.set(NAME, dbName); - db1.set(DESCRIPTION, randomString()); - db1.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); - db1.set("owner", "user1"); - db1.set(CLUSTER_NAME, "cl1"); - db1.set("parameters", Collections.EMPTY_MAP); - db1.set("location", "/tmp"); - createInstance(db1); - - //get entity by attribute - Referenceable referenceable = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName); - Assert.assertEquals(referenceable.getTypeName(), DATABASE_TYPE_BUILTIN); - Assert.assertEquals(referenceable.get(QUALIFIED_NAME), dbName + "@" + "cl1"); - } - - @Test - public void testSubmitEntityWithBadDateFormat() { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = null; - try { - dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - hiveTableInstance.set("lastAccessTime", "2014-07-11"); - createInstance(hiveTableInstance); - } catch (AtlasServiceException e) { - // Should catch the exception - assertEquals(e.getStatus().getStatusCode(), BAD_REQUEST.getStatusCode()); - } catch (Exception e) { - // ignore - } - - } - - @Test - public void testAddProperty() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable referenceable = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(referenceable); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - //add property - String description = "bar table - new desc"; - addProperty(guid, "description", description); - - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.GET_ENTITY, null, guid); - Assert.assertNotNull(response); - - referenceable.set("description", description); - - //invalid property for the type - try { - addProperty(guid, "invalid_property", "bar table"); - Assert.fail("Expected AtlasServiceException"); - } catch (AtlasServiceException e) { - Assert.assertEquals(e.getStatus().getStatusCode(), Response.Status.BAD_REQUEST.getStatusCode()); - } - - String currentTime = String.valueOf(new DateTime()); - - // updating date attribute as string not supported in v2 - // addProperty(guid, "createTime", currentTime); - - response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.GET_ENTITY, null, guid); - Assert.assertNotNull(response); - - referenceable.set("createTime", currentTime); - } - - @Test(expectedExceptions = IllegalArgumentException.class) - public void testAddNullProperty() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - //add property - addProperty(guid, null, "foo bar"); - Assert.fail(); - } - - @Test(enabled = false) - public void testAddNullPropertyValue() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - //add property - try { - addProperty(guid, "description", null); - Assert.fail("Expected AtlasServiceException"); - } catch(AtlasServiceException e) { - Assert.assertEquals(e.getStatus().getStatusCode(), Response.Status.BAD_REQUEST.getStatusCode()); - } - } - - @Test - public void testAddReferenceProperty() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - //Create new db instance - dbName = "db" + randomString(); - Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN); - databaseInstance.set(NAME, dbName); - databaseInstance.set(QUALIFIED_NAME, dbName); - databaseInstance.set(CLUSTER_NAME, randomString()); - databaseInstance.set("description", "new database"); - databaseInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - databaseInstance.set("owner", "user1"); - databaseInstance.set(CLUSTER_NAME, "cl1"); - databaseInstance.set("parameters", Collections.EMPTY_MAP); - databaseInstance.set("location", "/tmp"); - - Id dbInstance = createInstance(databaseInstance); - String newDBId = dbInstance._getId(); - - //Add reference property - EntityResult entityResult = atlasClientV1.updateEntityAttribute(guid, "db", newDBId); - assertEquals(entityResult.getUpdateEntities().size(), 2); - assertEquals(entityResult.getUpdateEntities().get(0), newDBId); - assertEquals(entityResult.getUpdateEntities().get(1), guid); - } - - @Test - public void testGetEntityDefinition() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - Referenceable entity = atlasClientV1.getEntity(guid); - Assert.assertNotNull(entity); - } - - private void addProperty(String guid, String property, String value) throws AtlasServiceException { - EntityResult entityResult = atlasClientV1.updateEntityAttribute(guid, property, value); - assertEquals(entityResult.getUpdateEntities().size(), 1); - assertEquals(entityResult.getUpdateEntities().get(0), guid); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testGetInvalidEntityDefinition() throws Exception { - - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.GET_ENTITY, null, "blah"); - - Assert.assertNotNull(response); - - Assert.assertNotNull(response.get(AtlasClient.ERROR)); - } - - @Test - public void testGetEntityList() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - List entities = atlasClientV1.listEntities(HIVE_TABLE_TYPE_BUILTIN); - Assert.assertNotNull(entities); - Assert.assertTrue(entities.contains(guid)); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testGetEntityListForBadEntityType() throws Exception { - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add("type", "blah"); - - ObjectNode response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API_V1.GET_ENTITY, queryParams); - assertNotNull(response); - Assert.assertNotNull(response.get(AtlasClient.ERROR)); - } - - - @Test - public void testGetEntityListForNoInstances() throws Exception { - String typeName = addNewType(); - - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add("type", typeName); - - ObjectNode response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API_V1.GET_ENTITY, queryParams); - assertNotNull(response); - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - final ArrayNode list = (ArrayNode) response.get(AtlasClient.RESULTS); - Assert.assertEquals(list.size(), 0); - } - - private String addNewType() throws Exception { - String typeName = "test" + randomString(); - ClassTypeDefinition testTypeDefinition = TypesUtil - .createClassTypeDef(typeName, null, Collections.emptySet(), - TypesUtil.createRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(testTypeDefinition)); - - createType(AtlasType.toV1Json(typesDef)); - return typeName; - } - - @Test - public void testGetTraitNames() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - List traits = atlasClientV1.listTraits(guid); - assertNotNull(traits); - Assert.assertEquals(traits.size(), 7); - } - - @Test - public void testAddTrait() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = - TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(typesDef); - - Struct traitInstance = new Struct(traitName); - - atlasClientV1.addTrait(guid, traitInstance); - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_ADD); - } - - @Test - public void testGetTraitDefinitionForEntity() throws Exception{ - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - String traitDefinitionAsJSON = AtlasType.toV1Json(typesDef); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - createType(AtlasType.toV1Json(typesDef)); - - Struct traitInstance = new Struct(traitName); - atlasClientV1.addTrait(guid, traitInstance); - Struct traitDef = atlasClientV1.getTraitDefinition(guid, traitName); - Assert.assertEquals(traitDef.getTypeName(), traitName); - - List allTraitDefs = atlasClientV1.listTraitDefinitions(guid); - System.out.println(allTraitDefs.toString()); - Assert.assertEquals(allTraitDefs.size(), 8); - } - - @Test - public void testAddExistingTrait() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = - TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(typesDef); - - Struct traitInstance = new Struct(traitName); - atlasClientV1.addTrait(guid, traitInstance); - - try { - atlasClientV1.addTrait(guid, traitInstance); - fail("Duplicate trait addition should've failed"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus(), BAD_REQUEST); - } - } - - @Test - public void testAddTraitWithAttribute() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - final String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = TypesUtil - .createTraitTypeDef(traitName, null, Collections.emptySet(), - TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(typesDef); - - Struct traitInstance = new Struct(traitName); - traitInstance.set("type", "SSN"); - atlasClientV1.addTrait(guid, traitInstance); - - // verify the response - Referenceable entity = atlasClientV1.getEntity(guid); - Assert.assertNotNull(entity); - Assert.assertEquals(entity.getId()._getId(), guid); - - assertNotNull(entity.getTrait(traitName)); - assertEquals(entity.getTrait(traitName).get("type"), traitInstance.get("type")); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testAddTraitWithNoRegistration() throws Exception { - final String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = - TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - - Struct traitInstance = new Struct(traitName); - String traitInstanceAsJSON = AtlasType.toV1Json(traitInstance); - LOG.debug("traitInstanceAsJSON = {}", traitInstanceAsJSON); - - atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.CREATE_ENTITY, traitInstanceAsJSON, "random", TRAITS); - } - - @Test - public void testDeleteTrait() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = - TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait); - LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(typesDef); - - Struct traitInstance = new Struct(traitName); - - atlasClientV1.addTrait(guid, traitInstance); - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_ADD); - - atlasClientV1.deleteTrait(guid, traitName); - - try { - atlasClientV1.getTraitDefinition(guid, traitName); - fail("Deleted trait definition shouldn't exist"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus(), ClientResponse.Status.NOT_FOUND); - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_DELETE); - } - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testDeleteTraitNonExistent() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - final String traitName = "blah_trait"; - atlasClientV1.deleteTrait(guid, traitName); - fail("trait=" + traitName + " should be defined in type system before it can be deleted"); - } - - @Test - public void testDeleteExistentTraitNonExistentForEntity() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id id = createInstance(hiveTableInstance); - - final String guid = id._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - final String traitName = "PII_Trait" + randomString(); - TraitTypeDefinition piiTrait = TypesUtil - .createTraitTypeDef(traitName, null, Collections.emptySet(), - TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(AtlasType.toV1Json(typesDef)); - - try { - atlasClientV1.deleteTrait(guid, traitName); - fail("Call should've failed for deletion of invalid trait"); - } catch (AtlasServiceException e) { - assertNotNull(e); - assertNotNull(e.getStatus()); - assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST); - } - } - - private String random() { - return RandomStringUtils.random(10); - } - - private String randomUTF8() throws Exception { - String ret = random(); - - if (!StandardCharsets.UTF_8.equals(Charset.defaultCharset())) { - ret = new String(ret.getBytes(), StandardCharsets.UTF_8.name()); - } - - return ret; - } - - @Test - public void testUTF8() throws Exception { - String attrName = randomUTF8(); - String attrValue = randomUTF8(); - String classType = randomString(); //Type names cannot be arbitrary UTF8 characters. See org.apache.atlas.type.AtlasTypeUtil#validateType() - ClassTypeDefinition classTypeDefinition = TypesUtil.createClassTypeDef(classType, null, Collections.emptySet(), TypesUtil.createUniqueRequiredAttrDef(attrName, AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDefinition)); - - createType(typesDef); - - Referenceable entityToCreate = new Referenceable(classType, Collections.singletonMap(attrName, attrValue)); - Id guid = createInstance(entityToCreate); - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.GET_ENTITY, null, guid._getId()); - Object objResponse = response.get(AtlasClient.DEFINITION); - String jsonResponse = AtlasType.toJson(objResponse); - Referenceable createdEntity = AtlasType.fromV1Json(jsonResponse, Referenceable.class); - Object entityAttrValue = createdEntity.get(attrName); - - Assert.assertEquals(entityAttrValue, attrValue, - "attrName=" + attrName + "; attrValue=" + attrValue + "; entityToCreate=" + entityToCreate + "; entityId=" + guid + "; getEntityResponse_Obj=" + objResponse + "; getEntityResponse_Json=" + jsonResponse + "; getEntityResponse_Entity=" + createdEntity); - } - - - @Test - public void testPartialUpdateByGuid() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id tableId = createInstance(hiveTableInstance); - - final String guid = tableId._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String colName = "col1"+randomString(); - final List columns = new ArrayList<>(); - Map values = new HashMap<>(); - values.put(NAME, colName); - values.put("comment", "col1 comment"); - values.put(QUALIFIED_NAME, "default.table.col1@"+colName); - values.put("comment", "col1 comment"); - values.put("type", "string"); - values.put("owner", "user1"); - values.put("position", 0); - values.put("description", "col1"); - values.put("table", tableId); //table is a required reference, can't be null - values.put("userDescription", null); - values.put("displayName", null); - - Referenceable ref = new Referenceable(BaseResourceIT.COLUMN_TYPE_BUILTIN, values); - columns.add(ref); - Referenceable tableUpdated = new Referenceable(BaseResourceIT.HIVE_TABLE_TYPE_BUILTIN, new HashMap() {{ - put("columns", columns); - }}); - - LOG.debug("Updating entity= {}", tableUpdated); - EntityResult entityResult = atlasClientV1.updateEntity(guid, tableUpdated); - assertEquals(entityResult.getUpdateEntities().size(), 1); - assertEquals(entityResult.getUpdateEntities().get(0), guid); - - Referenceable entity = atlasClientV1.getEntity(guid); - List refs = (List) entity.get("columns"); - - Referenceable column = refs.get(0); - - assertEquals(columns.get(0).getValues(), column.getValues()); - assertEquals(columns.get(0).getTypeName(), column.getTypeName()); - assertEquals(columns.get(0).getTraits(), column.getTraits()); - assertEquals(columns.get(0).getTraitNames(), column.getTraitNames()); - } - - @Test - public void testPartialUpdateByUniqueAttributes() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id tableId = createInstance(hiveTableInstance); - - final String guid = tableId._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - String colName = "col1"+randomString(); - final List columns = new ArrayList<>(); - Map values = new HashMap<>(); - values.put(NAME, colName); - values.put("comment", "col1 comment"); - values.put(QUALIFIED_NAME, "default.table.col1@"+colName); - values.put("comment", "col1 comment"); - values.put("type", "string"); - values.put("owner", "user1"); - values.put("position", 0); - values.put("description", "col1"); - values.put("table", tableId); //table is a required reference, can't be null - values.put("userDescription", null); - values.put("displayName", null); - - Referenceable ref = new Referenceable(BaseResourceIT.COLUMN_TYPE_BUILTIN, values); - columns.add(ref); - - //Update by unique attribute - values.put("type", "int"); - ref = new Referenceable(BaseResourceIT.COLUMN_TYPE_BUILTIN, values); - columns.set(0, ref); - Referenceable tableUpdated = new Referenceable(BaseResourceIT.HIVE_TABLE_TYPE_BUILTIN, new HashMap() {{ - put("columns", columns); - }}); - - LOG.debug("Updating entity= {}", tableUpdated); - EntityResult entityResult = atlasClientV1.updateEntity(BaseResourceIT.HIVE_TABLE_TYPE_BUILTIN, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, - (String) hiveTableInstance.get(QUALIFIED_NAME), tableUpdated); - assertEquals(entityResult.getUpdateEntities().size(), 1); - assertEquals(entityResult.getUpdateEntities().get(0), guid); - - Referenceable entity = atlasClientV1.getEntity(guid); - List refs = (List) entity.get("columns"); - - Assert.assertTrue(refs.get(0).getValuesMap().equals(values)); - Assert.assertEquals(refs.get(0).get("type"), "int"); - } - - @Test - public void testCompleteUpdate() throws Exception { - String dbName = "db" + randomString(); - String tableName = "table" + randomString(); - Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName); - Id dbId = createInstance(hiveDBInstance); - Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId); - Id tableId = createInstance(hiveTableInstance); - - final String guid = tableId._getId(); - try { - Assert.assertNotNull(UUID.fromString(guid)); - } catch (IllegalArgumentException e) { - Assert.fail("Response is not a guid, " + guid); - } - - final List columns = new ArrayList<>(); - Map values1 = new HashMap<>(); - values1.put(NAME, "col3"); - values1.put(QUALIFIED_NAME, "default.table.col3@cl1"); - values1.put("comment", "col3 comment"); - values1.put("type", "string"); - values1.put("owner", "user1"); - values1.put("position", 0); - values1.put("description", "col3"); - values1.put("table", tableId); - values1.put("userDescription", null); - values1.put("displayName", null); - - Map values2 = new HashMap<>(); - values2.put(NAME, "col4"); - values2.put(QUALIFIED_NAME, "default.table.col4@cl1"); - values2.put("comment", "col4 comment"); - values2.put("type", "string"); - values2.put("owner", "user2"); - values2.put("position", 1); - values2.put("description", "col4"); - values2.put("table", tableId); - values2.put("userDescription", null); - values2.put("displayName", null); - - Referenceable ref1 = new Referenceable(BaseResourceIT.COLUMN_TYPE_BUILTIN, values1); - Referenceable ref2 = new Referenceable(BaseResourceIT.COLUMN_TYPE_BUILTIN, values2); - columns.add(ref1); - columns.add(ref2); - hiveTableInstance.set("columns", columns); - LOG.debug("Replacing entity= {}", hiveTableInstance); - - EntityResult updateEntity = atlasClientV1.updateEntities(hiveTableInstance); - - assertNotNull(updateEntity.getUpdateEntities()); - - hiveTableInstance = atlasClientV1.getEntity(guid); - List refs = (List) hiveTableInstance.get("columns"); - Assert.assertEquals(refs.size(), 2); - - Referenceable col3 = getReferenceable(refs, "col3"); - Referenceable col4 = getReferenceable(refs, "col4"); - - Assert.assertEquals(col3.getValuesMap(), values1); - Assert.assertEquals(col4.getValuesMap(), values2); - } - - @Test - public void testDeleteEntitiesViaRestApi() throws Exception { - // Create 2 database entities - Referenceable db1 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - db1.set(NAME, dbName); - db1.set(DESCRIPTION, randomString()); - db1.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - db1.set("owner", "user1"); - db1.set(CLUSTER_NAME, "cl1"); - db1.set("parameters", Collections.EMPTY_MAP); - db1.set("location", "/tmp"); - Id db1Id = createInstance(db1); - - Referenceable db2 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName2 = randomString(); - db2.set(NAME, dbName2); - db2.set(QUALIFIED_NAME, dbName2); - db2.set(CLUSTER_NAME, randomString()); - db2.set(DESCRIPTION, randomString()); - db2.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName2); - db2.set("owner", "user2"); - db2.set(CLUSTER_NAME, "cl1"); - db2.set("parameters", Collections.EMPTY_MAP); - db2.set("location", "/tmp"); - Id db2Id = createInstance(db2); - - // Delete the database entities - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add(AtlasClient.GUID.toLowerCase(), db1Id._getId()); - queryParams.add(AtlasClient.GUID.toLowerCase(), db2Id._getId()); - - ObjectNode response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API_V1.DELETE_ENTITIES, queryParams); - List deletedGuidsList = EntityResult.fromString(response.toString()).getDeletedEntities(); - Assert.assertTrue(deletedGuidsList.contains(db1Id._getId())); - Assert.assertTrue(deletedGuidsList.contains(db2Id._getId())); - - // Verify entities were deleted from the repository. - for (String guid : deletedGuidsList) { - Referenceable entity = atlasClientV1.getEntity(guid); - assertEquals(entity.getId().getState(), Id.EntityState.DELETED); - } - } - - @Test - public void testDeleteEntitiesViaClientApi() throws Exception { - // Create 2 database entities - Referenceable db1 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - db1.set("name", dbName); - db1.set("description", randomString()); - db1.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName); - db1.set("owner", "user1"); - db1.set(CLUSTER_NAME, "cl1"); - db1.set("parameters", Collections.EMPTY_MAP); - db1.set("location", "/tmp"); - Id db1Id = createInstance(db1); - Referenceable db2 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName2 = randomString(); - db2.set("name", dbName2); - db2.set(QUALIFIED_NAME, dbName2); - db2.set(CLUSTER_NAME, randomString()); - db2.set("description", randomString()); - db2.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName2); - db2.set("owner", "user2"); - db2.set("clusterName", "cl1"); - db2.set("parameters", Collections.EMPTY_MAP); - db2.set("location", "/tmp"); - Id db2Id = createInstance(db2); - - // Delete the database entities - List deletedGuidsList = atlasClientV1.deleteEntities(db1Id._getId(), db2Id._getId()).getDeletedEntities(); - - // Verify that deleteEntities() response has database entity guids - Assert.assertEquals(deletedGuidsList.size(), 2); - Assert.assertTrue(deletedGuidsList.contains(db1Id._getId())); - Assert.assertTrue(deletedGuidsList.contains(db2Id._getId())); - - // Verify entities were deleted from the repository. - for (String guid : deletedGuidsList) { - Referenceable entity = atlasClientV1.getEntity(guid); - assertEquals(entity.getId().getState(), Id.EntityState.DELETED); - } - } - - @Test - public void testDeleteEntityByUniqAttribute() throws Exception { - // Create database entity - Referenceable db1 = new Referenceable(DATABASE_TYPE_BUILTIN); - String dbName = randomString(); - String qualifiedName = dbName + "@cl1"; - db1.set(NAME, dbName); - db1.set(QUALIFIED_NAME, qualifiedName); - db1.set(CLUSTER_NAME, randomString()); - db1.set(DESCRIPTION, randomString()); - db1.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); - db1.set("owner", "user1"); - db1.set(CLUSTER_NAME, "cl1"); - db1.set("parameters", Collections.EMPTY_MAP); - db1.set("location", "/tmp"); - Id db1Id = createInstance(db1); - - // Delete the database entity - List deletedGuidsList = atlasClientV1.deleteEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, qualifiedName).getDeletedEntities(); - - // Verify that deleteEntities() response has database entity guids - Assert.assertEquals(deletedGuidsList.size(), 1); - Assert.assertTrue(deletedGuidsList.contains(db1Id._getId())); - - // Verify entities were deleted from the repository. - for (String guid : deletedGuidsList) { - Referenceable entity = atlasClientV1.getEntity(guid); - assertEquals(entity.getId().getState(), Id.EntityState.DELETED); - } - } - - private Referenceable getReferenceable(List refs, String name) { - Referenceable ret = null; - - for (Referenceable ref : refs) { - Map values = ref.getValuesMap(); - String entityName = (String) values.get("name"); - - if (StringUtils.equalsIgnoreCase(name, entityName)) { - ret = ref; - break; - } - } - - return ret; - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java deleted file mode 100644 index 693cb4b1bb..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java +++ /dev/null @@ -1,183 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.AtlasBaseClient; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.lineage.AtlasLineageInfo; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import java.util.*; - -/** - * Entity Lineage v2 Integration Tests. - */ -public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceIT { - private static final String BASE_URI = "api/atlas/v2/lineage"; - private static final AtlasBaseClient.API LINEAGE_V2_API = new AtlasBaseClient.API(BASE_URI, "GET", Response.Status.OK); - private static final String INPUT_DIRECTION = "INPUT"; - private static final String OUTPUT_DIRECTION = "OUTPUT"; - private static final String BOTH_DIRECTION = "BOTH"; - private static final String DIRECTION_PARAM = "direction"; - private static final String DEPTH_PARAM = "depth"; - - private String salesFactTable; - private String salesMonthlyTable; - private String salesDBName; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - createTypeDefinitionsV1(); - setupInstances(); - } - - @Test - public void testInputLineageInfo() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId(); - - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add(DIRECTION_PARAM, INPUT_DIRECTION); - queryParams.add(DEPTH_PARAM, "5"); - ObjectNode response = atlasClientV1.callAPI(LINEAGE_V2_API, ObjectNode.class, queryParams, tableId); - Assert.assertNotNull(response); - System.out.println("input lineage info = " + response - ); - - AtlasLineageInfo inputLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class); - - Map entities = inputLineageInfo.getGuidEntityMap(); - Assert.assertNotNull(entities); - - Set relations = inputLineageInfo.getRelations(); - Assert.assertNotNull(relations); - - Assert.assertEquals(entities.size(), 6); - Assert.assertEquals(relations.size(), 5); - Assert.assertEquals(inputLineageInfo.getLineageDirection(), AtlasLineageInfo.LineageDirection.INPUT); - Assert.assertEquals(inputLineageInfo.getLineageDepth(), 5); - Assert.assertEquals(inputLineageInfo.getBaseEntityGuid(), tableId); - } - - @Test - public void testOutputLineageInfo() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId(); - - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add(DIRECTION_PARAM, OUTPUT_DIRECTION); - queryParams.add(DEPTH_PARAM, "5"); - ObjectNode response = atlasClientV1.callAPI(LINEAGE_V2_API, ObjectNode.class, queryParams, tableId); - - Assert.assertNotNull(response); - System.out.println("output lineage info = " + response); - - AtlasLineageInfo outputLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class); - - Map entities = outputLineageInfo.getGuidEntityMap(); - Assert.assertNotNull(entities); - - Set relations = outputLineageInfo.getRelations(); - Assert.assertNotNull(relations); - - Assert.assertEquals(entities.size(), 5); - Assert.assertEquals(relations.size(), 4); - Assert.assertEquals(outputLineageInfo.getLineageDirection(), AtlasLineageInfo.LineageDirection.OUTPUT); - Assert.assertEquals(outputLineageInfo.getLineageDepth(), 5); - Assert.assertEquals(outputLineageInfo.getBaseEntityGuid(), tableId); - } - - @Test - public void testLineageInfo() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, - AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId(); - - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add(DIRECTION_PARAM, BOTH_DIRECTION); - queryParams.add(DEPTH_PARAM, "5"); - ObjectNode response = atlasClientV1.callAPI(LINEAGE_V2_API, ObjectNode.class, queryParams, tableId); - - Assert.assertNotNull(response); - System.out.println("both lineage info = " + response); - - AtlasLineageInfo bothLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class); - - Map entities = bothLineageInfo.getGuidEntityMap(); - Assert.assertNotNull(entities); - - Set relations = bothLineageInfo.getRelations(); - Assert.assertNotNull(relations); - - Assert.assertEquals(entities.size(), 6); - Assert.assertEquals(relations.size(), 5); - Assert.assertEquals(bothLineageInfo.getLineageDirection(), AtlasLineageInfo.LineageDirection.BOTH); - Assert.assertEquals(bothLineageInfo.getLineageDepth(), 5); - Assert.assertEquals(bothLineageInfo.getBaseEntityGuid(), tableId); - } - - private void setupInstances() throws Exception { - salesDBName = "Sales" + randomString(); - Id salesDB = database(salesDBName, "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - - List salesFactColumns = Arrays.asList(column("time_id", "int", "time id"), column("product_id", "int", "product id"), - column("customer_id", "int", "customer id"), - column("sales", "double", "product id")); - - salesFactTable = "sales_fact" + randomString(); - Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns); - - List timeDimColumns = Arrays.asList(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"), - column("weekDay", "int", "week Day")); - - Id timeDim = - table("time_dim" + randomString(), "time dimension table", salesDB, "John Doe", "EXTERNAL", - timeDimColumns); - - Id reportingDB = - database("Reporting" + randomString(), "reporting database", "Jane BI", - "hdfs://host:8000/apps/warehouse/reporting"); - - Id salesFactDaily = - table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB, - "Joe BI", "MANAGED", salesFactColumns); - - loadProcess("loadSalesDaily" + randomString(), "John ETL", Arrays.asList(salesFact, timeDim), - Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph"); - - salesMonthlyTable = "sales_fact_monthly_mv" + randomString(); - Id salesFactMonthly = - table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI", - "MANAGED", salesFactColumns); - - loadProcess("loadSalesMonthly" + randomString(), "John ETL", Collections.singletonList(salesFactDaily), - Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph"); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java deleted file mode 100755 index 00f0aab1a2..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java +++ /dev/null @@ -1,1202 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.collect.Lists; -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.EntityAuditEvent; -import org.apache.atlas.bulkimport.BulkImportResponse; -import org.apache.atlas.model.TimeBoundary; -import org.apache.atlas.model.audit.AtlasAuditEntry; -import org.apache.atlas.model.audit.AuditSearchParameters; -import org.apache.atlas.model.audit.EntityAuditEventV2; -import org.apache.atlas.model.instance.AtlasClassification; -import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasObjectId; -import org.apache.atlas.model.instance.ClassificationAssociateRequest; -import org.apache.atlas.model.instance.EntityMutationResponse; -import org.apache.atlas.model.instance.EntityMutations; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.TestResourceFileUtils; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.RandomStringUtils; -import org.apache.hadoop.util.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -/** - * Integration tests for Entity Jersey Resource. - */ -public class EntityV2JerseyResourceIT extends BaseResourceIT { - - private static final Logger LOG = LoggerFactory.getLogger(EntityV2JerseyResourceIT.class); - - private static final String ENTITY_NOTIFICATION_VERSION_PROPERTY = "atlas.notification.entity.version"; - - private final String DATABASE_NAME = "db" + randomString(); - private final String TABLE_NAME = "table" + randomString(); - private String traitName; - private String createdDBName; - private String createdTableQualifiedName; - - private AtlasEntity dbEntity; - private AtlasEntity tableEntity; - private AtlasClassificationDef piiTrait; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - createTypeDefinitionsV2(); - - } - - @Test - public void testSubmitEntity() throws Exception { - TypesUtil.Pair dbAndTable = createDBAndTable(); - assertNotNull(dbAndTable); - assertNotNull(dbAndTable.left); - assertNotNull(dbAndTable.right); - // Writing created table data to a file for import test. - createImportFile(); - } - - @Test - public void testCreateNestedEntities() throws Exception { - AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo(); - - AtlasEntity databaseInstance = new AtlasEntity(DATABASE_TYPE_V2, "name", "db1"); - databaseInstance.setAttribute("name", "db1"); - databaseInstance.setAttribute("description", "foo database"); - databaseInstance.setAttribute("owner", "user1"); - databaseInstance.setAttribute("locationUri", "/tmp"); - databaseInstance.setAttribute("createTime",1000); - entities.addEntity(databaseInstance); - - int nTables = 5; - int colsPerTable=3; - - for(int i = 0; i < nTables; i++) { - String tableName = "db1-table-" + i; - - AtlasEntity tableInstance = new AtlasEntity(HIVE_TABLE_TYPE_V2, "name", tableName); - tableInstance.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - tableInstance.setAttribute("db", AtlasTypeUtil.getAtlasObjectId(databaseInstance)); - tableInstance.setAttribute("description", tableName + " table"); - entities.addEntity(tableInstance); - - List columns = new ArrayList<>(); - for(int j = 0; j < colsPerTable; j++) { - AtlasEntity columnInstance = new AtlasEntity(COLUMN_TYPE_V2); - columnInstance.setAttribute("name", tableName + "-col-" + j); - columnInstance.setAttribute("dataType", "String"); - columnInstance.setAttribute("comment", "column " + j + " for table " + i); - - columns.add(AtlasTypeUtil.getAtlasObjectId(columnInstance)); - - entities.addReferredEntity(columnInstance); - } - tableInstance.setAttribute("columns", columns); - } - - //Create the tables. The database and columns should be created automatically, since - //the tables reference them. - - EntityMutationResponse response = atlasClientV2.createEntities(entities); - Assert.assertNotNull(response); - - Map guidsCreated = response.getGuidAssignments(); - assertEquals(guidsCreated.size(), nTables * colsPerTable + nTables + 1); - assertNotNull(guidsCreated.get(databaseInstance.getGuid())); - - for(AtlasEntity r : entities.getEntities()) { - assertNotNull(guidsCreated.get(r.getGuid())); - } - - for(AtlasEntity r : entities.getReferredEntities().values()) { - assertNotNull(guidsCreated.get(r.getGuid())); - } - } - - @Test - public void testRequestUser() throws Exception { - AtlasEntity hiveDBInstanceV2 = createHiveDB(randomString()); - List events = atlasClientV1.getEntityAuditEvents(hiveDBInstanceV2.getGuid(), (short) 10); - assertEquals(events.size(), 1); - assertEquals(events.get(0).getUser(), "admin"); - } - - @Test - public void testEntityDeduping() throws Exception { - ArrayNode results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); - assertEquals(results.size(), 1); - - final AtlasEntity hiveDBInstanceV2 = createHiveDB(); - - results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); - assertEquals(results.size(), 1); - - //Test the same across references - final String tableName = randomString(); - AtlasEntity hiveTableInstanceV2 = createHiveTableInstanceV2(hiveDBInstanceV2, tableName); - hiveTableInstanceV2.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); - - EntityMutationResponse entity = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(hiveTableInstanceV2)); - assertNotNull(entity); - assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); - results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); - assertEquals(results.size(), 1); - } - - @Test - public void testEntityDefinitionAcrossTypeUpdate() throws Exception { - //create type - AtlasEntityDef entityDef = AtlasTypeUtil - .createClassTypeDef(randomString(), - Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string") - ); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getEntityDefs().add(entityDef); - - AtlasTypesDef created = atlasClientV2.createAtlasTypeDefs(typesDef); - assertNotNull(created); - assertNotNull(created.getEntityDefs()); - assertEquals(created.getEntityDefs().size(), 1); - - //create entity for the type - AtlasEntity instance = new AtlasEntity(entityDef.getName()); - instance.setAttribute("name", randomString()); - EntityMutationResponse mutationResponse = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(instance)); - assertNotNull(mutationResponse); - assertNotNull(mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); - assertEquals(mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size(),1 ); - String guid = mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0).getGuid(); - - //update type - add attribute - entityDef = AtlasTypeUtil.createClassTypeDef(entityDef.getName(), Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), - AtlasTypeUtil.createOptionalAttrDef("description", "string")); - - typesDef = new AtlasTypesDef(); - typesDef.getEntityDefs().add(entityDef); - - AtlasTypesDef updated = atlasClientV2.updateAtlasTypeDefs(typesDef); - assertNotNull(updated); - assertNotNull(updated.getEntityDefs()); - assertEquals(updated.getEntityDefs().size(), 1); - - //Get definition after type update - new attributes should be null - AtlasEntity entityByGuid = getEntityByGuid(guid); - assertNull(entityByGuid.getAttribute("description")); - assertEquals(entityByGuid.getAttribute("name"), instance.getAttribute("name")); - } - - @Test - public void testEntityInvalidValue() throws Exception { - AtlasEntity databaseInstance = new AtlasEntity(DATABASE_TYPE_V2); - String dbName = randomString(); - String nullString = null; - String emptyString = ""; - databaseInstance.setAttribute("name", dbName); - databaseInstance.setAttribute("description", nullString); - AtlasEntityHeader created = createEntity(databaseInstance); - - // null valid value for required attr - description - assertNull(created); - - databaseInstance.setAttribute("description", emptyString); - created = createEntity(databaseInstance); - - // empty string valid value for required attr - assertNotNull(created); - - databaseInstance.setGuid(created.getGuid()); - databaseInstance.setAttribute("owner", nullString); - databaseInstance.setAttribute("locationUri", emptyString); - - created = updateEntity(databaseInstance); - - // null/empty string valid value for optional attr - assertNotNull(created); - } - - @Test - public void testGetEntityByAttribute() throws Exception { - AtlasEntity hiveDB = createHiveDB(); - String qualifiedName = (String) hiveDB.getAttribute(NAME); - //get entity by attribute - - AtlasEntity byAttribute = atlasClientV2.getEntityByAttribute(DATABASE_TYPE_V2, toMap(NAME, qualifiedName)).getEntity(); - assertEquals(byAttribute.getTypeName(), DATABASE_TYPE_V2); - assertEquals(byAttribute.getAttribute(NAME), qualifiedName); - } - - @Test - public void testGetEntitiesByAttribute() throws Exception { - AtlasEntity hiveDB1 = createHiveDB(); - AtlasEntity hiveDB2 = createHiveDB(); - - String qualifiedNameDB1 = (String) hiveDB1.getAttribute(NAME); - String qualifiedNameDB2 = (String) hiveDB2.getAttribute(NAME); - List> list = new ArrayList<>(); - list.add(toMap(NAME, qualifiedNameDB1)); - list.add(toMap(NAME, qualifiedNameDB2)); - - AtlasEntity.AtlasEntitiesWithExtInfo info = atlasClientV2.getEntitiesByAttribute(DATABASE_TYPE_V2, list); - List entityList = info.getEntities(); - assertEquals(entityList.size(), 2); - assertEquals(entityList.get(0).getTypeName(), DATABASE_TYPE_V2); - assertEquals(entityList.get(1).getTypeName(), DATABASE_TYPE_V2); - } - - @Test - public void testSubmitEntityWithBadDateFormat() throws Exception { - AtlasEntity hiveDBEntity = createHiveDBInstanceV2("db" + randomString()); - AtlasEntityHeader hiveDBHeader = createEntity(hiveDBEntity); - hiveDBEntity.setGuid(hiveDBHeader.getGuid()); - - AtlasEntity tableInstance = createHiveTableInstanceV2(hiveDBEntity, "table" + randomString()); - //Dates with an invalid format are simply nulled out. This does not produce - //an error. See AtlasBuiltInTypes.AtlasDateType.getNormalizedValue(). - tableInstance.setAttribute("lastAccessTime", 1107201407); - AtlasEntityHeader tableEntityHeader = createEntity(tableInstance); - assertNotNull(tableEntityHeader); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testAddProperty() throws Exception { - //add property - String description = "bar table - new desc"; - addProperty(createHiveTable().getGuid(), "description", description); - - AtlasEntity entityByGuid = getEntityByGuid(createHiveTable().getGuid()); - Assert.assertNotNull(entityByGuid); - - entityByGuid.setAttribute("description", description); - - // TODO: This behavior should've been consistent across APIs -// //invalid property for the type -// try { -// addProperty(table.getGuid(), "invalid_property", "bar table"); -// Assert.fail("Expected AtlasServiceException"); -// } catch (AtlasServiceException e) { -// assertNotNull(e.getStatus()); -// assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST); -// } - - //non-string property, update - Object currentTime = new Date(System.currentTimeMillis()); - addProperty(createHiveTable().getGuid(), "createTime", currentTime); - - entityByGuid = getEntityByGuid(createHiveTable().getGuid()); - Assert.assertNotNull(entityByGuid); - } - - @Test - public void testAddNullPropertyValue() throws Exception { - // FIXME: Behavior has changed between v1 and v2 - //add property -// try { - addProperty(createHiveTable().getGuid(), "description", null); -// Assert.fail("Expected AtlasServiceException"); -// } catch(AtlasServiceException e) { -// Assert.assertEquals(e.getStatus().getStatusCode(), Response.Status.BAD_REQUEST.getStatusCode()); -// } - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testGetInvalidEntityDefinition() throws Exception { - getEntityByGuid("blah"); - } - - @Test(dependsOnMethods = "testSubmitEntity", enabled = false) - public void testGetEntityList() throws Exception { - // TODO: Can only be done when there's a search API exposed from entity REST - } - - @Test(enabled = false) - public void testGetEntityListForBadEntityType() throws Exception { - // FIXME: Complete test when search interface is in place - } - - @Test(enabled = false) - public void testGetEntityListForNoInstances() throws Exception { - // FIXME: Complete test when search interface is in place - /* - String typeName = ""; - - ClientResponse clientResponse = - service.path(ENTITIES).queryParam("type", typeName).accept(Servlets.JSON_MEDIA_TYPE) - .type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class); - Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); - - String responseAsString = clientResponse.getEntity(String.class); - Assert.assertNotNull(responseAsString); - - JSONObject response = new JSONObject(responseAsString); - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - final JSONArray list = response.getJSONArray(AtlasClient.RESULTS); - Assert.assertEquals(list.length(), 0); - */ - } - - private String addNewType() throws Exception { - String typeName = "test" + randomString(); - AtlasEntityDef classTypeDef = AtlasTypeUtil - .createClassTypeDef(typeName, Collections.emptySet(), - AtlasTypeUtil.createRequiredAttrDef("name", "string"), - AtlasTypeUtil.createRequiredAttrDef("description", "string")); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getEntityDefs().add(classTypeDef); - createType(typesDef); - return typeName; - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testGetTraitNames() throws Exception { - AtlasClassifications classifications = atlasClientV2.getClassifications(createHiveTable().getGuid()); - assertNotNull(classifications); - assertTrue(classifications.getList().size() > 0); - assertEquals(classifications.getList().size(), 9); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testCommonAttributes() throws Exception{ - AtlasEntity entity = getEntityByGuid(createHiveTable().getGuid()); - Assert.assertNotNull(entity.getStatus()); - Assert.assertNotNull(entity.getVersion()); - Assert.assertNotNull(entity.getCreatedBy()); - Assert.assertNotNull(entity.getCreateTime()); - Assert.assertNotNull(entity.getUpdatedBy()); - Assert.assertNotNull(entity.getUpdateTime()); - } - - @Test(dependsOnMethods = "testGetTraitNames") - public void testAddTrait() throws Exception { - traitName = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = - AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - atlasClientV2.addClassifications(createHiveTable().getGuid(), Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - - assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD); - AtlasClassifications classifications = atlasClientV2.getEntityClassifications(createHiveTable().getGuid(), piiTrait.getName()); - assertNotNull(classifications); - } - - @Test(dependsOnMethods = "testAddTrait") - public void testAddLabels() throws Exception { - Set set = new HashSet<>(); - set.add("lable"); - atlasClientV2.addLabels(createHiveTable().getGuid(), set); - AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid(), false, true); - assertNotNull(info); - assertNotNull(info.getEntity().getLabels()); - assertEquals(info.getEntity().getLabels().size(), 1); - } - - @Test(dependsOnMethods = "testAddLabels") - public void testSetLabels() throws Exception { - Set setNet = new HashSet<>(); - setNet.add("labelNext"); - atlasClientV2.setLabels(createHiveTable().getGuid(), setNet); - AtlasEntityWithExtInfo infoForSet = atlasClientV2.getEntityByGuid(createHiveTable().getGuid(), false, true); - assertNotNull(infoForSet); - assertNotNull(infoForSet.getEntity().getLabels()); - assertEquals(infoForSet.getEntity().getLabels().size(), 1); - } - - @Test(dependsOnMethods = "testSetLabels") - public void testDeleteLabels() throws Exception { - Set set = new HashSet<>(); - set.add("testNext"); - atlasClientV2.removeLabels(createHiveTable().getGuid(), set); - AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid(), false, true); - assertNotNull(info); - assertNotNull(info.getEntity().getLabels()); - assertEquals(info.getEntity().getLabels().size(), 1); - } - - @Test(dependsOnMethods = "testGetTraitNames") - public void testAddTraitWithValidityPeriod() throws Exception { - traitName = "PII_Trait" + randomString(); - - AtlasClassificationDef piiTrait = AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(piiTrait), Collections.emptyList()); - - createType(typesDef); - - String tableGuid = createHiveTable().getGuid(); - AtlasClassification classification = new AtlasClassification(piiTrait.getName()); - TimeBoundary validityPeriod = new TimeBoundary("2018/03/01 00:00:00", "2018/04/01 00:00:00", "GMT"); - - classification.setEntityGuid(tableGuid); - classification.addValityPeriod(validityPeriod); - classification.setPropagate(true); - classification.setRemovePropagationsOnEntityDelete(true); - - atlasClientV2.addClassifications(tableGuid, Collections.singletonList(classification)); - - assertEntityAudit(tableGuid, EntityAuditEvent.EntityAuditAction.TAG_ADD); - - AtlasClassifications classifications = atlasClientV2.getClassifications(tableGuid); - - assertNotNull(classifications); - assertNotNull(classifications.getList()); - assertTrue(classifications.getList().size() > 1); - - boolean foundClassification = false; - for (AtlasClassification entityClassification : classifications.getList()) { - if (StringUtils.equalsIgnoreCase(entityClassification.getTypeName(), piiTrait.getName())) { - foundClassification = true; - - assertEquals(entityClassification.getTypeName(), piiTrait.getName()); - assertNotNull(entityClassification.getValidityPeriods()); - assertEquals(entityClassification.getValidityPeriods().size(), 1); - assertEquals(entityClassification.getValidityPeriods().get(0), validityPeriod); - assertEquals(entityClassification, classification); - - break; - } - } - - assertTrue(foundClassification, "classification '" + piiTrait.getName() + "' is missing for entity '" + tableGuid + "'"); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testGetTraitDefinitionForEntity() throws Exception{ - traitName = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = - AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - AtlasClassificationDef classificationByName = atlasClientV2.getClassificationDefByName(traitName); - assertNotNull(classificationByName); - - AtlasEntity hiveTable = createHiveTable(); - assertEquals(hiveTable.getClassifications().size(), 7); - - AtlasClassification piiClassification = new AtlasClassification(piiTrait.getName()); - - atlasClientV2.addClassifications(hiveTable.getGuid(), Lists.newArrayList(piiClassification)); - - AtlasClassifications classifications = atlasClientV2.getClassifications(hiveTable.getGuid()); - assertNotNull(classifications); - assertTrue(classifications.getList().size() > 0); - assertEquals(classifications.getList().size(), 9); - } - - @Test(dependsOnMethods = "testGetTraitNames") - public void testAddTraitWithAttribute() throws Exception { - final String traitName = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = AtlasTypeUtil - .createTraitTypeDef(traitName, Collections.emptySet(), - AtlasTypeUtil.createRequiredAttrDef("type", "string")); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - AtlasClassification traitInstance = new AtlasClassification(traitName); - traitInstance.setAttribute("type", "SSN"); - - final String guid = createHiveTable().getGuid(); - atlasClientV2.addClassifications(guid, Collections.singletonList(traitInstance)); - - // verify the response - AtlasEntity withAssociationByGuid = atlasClientV2.getEntityByGuid(guid).getEntity(); - assertNotNull(withAssociationByGuid); - assertFalse(withAssociationByGuid.getClassifications().isEmpty()); - - boolean found = false; - for (AtlasClassification atlasClassification : withAssociationByGuid.getClassifications()) { - String attribute = (String)atlasClassification.getAttribute("type"); - if (attribute != null && attribute.equals("SSN")) { - found = true; - break; - } - } - assertTrue(found); - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testAddTraitWithNoRegistration() throws Exception { - final String traitName = "PII_Trait" + randomString(); - AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - - AtlasClassification traitInstance = new AtlasClassification(traitName); - - atlasClientV2.addClassifications("random", Collections.singletonList(traitInstance)); - } - - @Test(dependsOnMethods = "testAddTrait") - public void testDeleteTrait() throws Exception { - final String guid = createHiveTable().getGuid(); - - try { - atlasClientV2.deleteClassification(guid, traitName); - } catch (AtlasServiceException ex) { - fail("Deletion should've succeeded"); - } - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_DELETE); - } - - @Test - public void testDeleteTraitNonExistent() throws Exception { - final String traitName = "blah_trait"; - - try { - atlasClientV2.deleteClassification("random", traitName); - fail("Deletion for bogus names shouldn't have succeeded"); - } catch (AtlasServiceException ex) { - assertNotNull(ex.getStatus()); -// assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); - assertEquals(ex.getStatus(), ClientResponse.Status.BAD_REQUEST); - // Should it be a 400 or 404 - } - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testDeleteExistentTraitNonExistentForEntity() throws Exception { - - final String guid = createHiveTable().getGuid(); - final String traitName = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = AtlasTypeUtil - .createTraitTypeDef(traitName, Collections.emptySet(), - AtlasTypeUtil.createRequiredAttrDef("type", "string")); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - try { - atlasClientV2.deleteClassification(guid, traitName); - fail("Deletion should've failed for non-existent trait association"); - } catch (AtlasServiceException ex) { - Assert.assertNotNull(ex.getStatus()); - assertEquals(ex.getStatus(), ClientResponse.Status.BAD_REQUEST); - } - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testGetEntityHeaderByGuid() throws Exception { - AtlasEntityHeader header = atlasClientV2.getEntityHeaderByGuid(createHiveTable().getGuid()); - assertNotNull(header); - assertEquals(header.getGuid(), createHiveTable().getGuid()); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testGetEntityHeaderByAttribute() throws Exception { - AtlasEntity hiveDB = createHiveDB(); - String qualifiedName = (String) hiveDB.getAttribute(NAME); - AtlasEntityHeader header = atlasClientV2.getEntityHeaderByAttribute(DATABASE_TYPE_V2, toMap(NAME, qualifiedName)); - assertNotNull(header); - assertEquals(header.getTypeName(), DATABASE_TYPE_V2); - assertEquals(header.getAttribute(NAME), qualifiedName); - } - - @Test - public void testUTF8() throws Exception { - String classType = randomString(); - String attrName = randomUTF8(); - String attrValue = randomUTF8(); - - AtlasEntityDef classTypeDef = AtlasTypeUtil - .createClassTypeDef(classType, Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef(attrName, "string")); - AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); - atlasTypesDef.getEntityDefs().add(classTypeDef); - createType(atlasTypesDef); - - AtlasEntity instance = new AtlasEntity(classType); - instance.setAttribute(attrName, attrValue); - AtlasEntityHeader entity = createEntity(instance); - assertNotNull(entity); - assertNotNull(entity.getGuid()); - - AtlasEntity entityByGuid = getEntityByGuid(entity.getGuid()); - assertEquals(entityByGuid.getAttribute(attrName), attrValue); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testPartialUpdate() throws Exception { - final List columns = new ArrayList<>(); - Map values = new HashMap<>(); - values.put("name", "col1"); - values.put(NAME, "qualifiedName.col1"); - values.put("type", "string"); - values.put("comment", "col1 comment"); - - AtlasEntity colEntity = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values); - columns.add(colEntity); - AtlasEntity hiveTable = createHiveTable(); - AtlasEntity tableUpdated = hiveTable; - - hiveTable.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); - - AtlasEntityWithExtInfo entityInfo = new AtlasEntityWithExtInfo(tableUpdated); - entityInfo.addReferredEntity(colEntity); - - LOG.debug("Full Update entity= " + tableUpdated); - EntityMutationResponse updateResult = atlasClientV2.updateEntity(entityInfo); - assertNotNull(updateResult); - assertNotNull(updateResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); - assertTrue(updateResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0); - - String guid = hiveTable.getGuid(); - AtlasEntity entityByGuid1 = getEntityByGuid(guid); - assertNotNull(entityByGuid1); - entityByGuid1.getAttribute("columns"); - - values.put("type", "int"); - colEntity = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values); - columns.clear(); - columns.add(colEntity); - - tableUpdated = new AtlasEntity(HIVE_TABLE_TYPE_V2, "name", entityByGuid1.getAttribute("name")); - tableUpdated.setGuid(entityByGuid1.getGuid()); - tableUpdated.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); - - // tableUpdated = hiveTable; - // tableUpdated.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); - - LOG.debug("Partial Update entity by unique attributes= " + tableUpdated); - Map uniqAttributes = new HashMap<>(); - uniqAttributes.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, (String) hiveTable.getAttribute("name")); - - entityInfo = new AtlasEntityWithExtInfo(tableUpdated); - entityInfo.addReferredEntity(colEntity); - - EntityMutationResponse updateResponse = atlasClientV2.updateEntityByAttribute(BaseResourceIT.HIVE_TABLE_TYPE_V2, uniqAttributes, entityInfo); - - assertNotNull(updateResponse); - assertNotNull(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE)); - assertTrue(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE).size() > 0); - - AtlasEntity entityByGuid2 = getEntityByGuid(guid); - assertNotNull(entityByGuid2); - } - - private AtlasEntity getEntityByGuid(String guid) throws AtlasServiceException { - return atlasClientV2.getEntityByGuid(guid).getEntity(); - } - - @Test(dependsOnMethods = "testSubmitEntity") - public void testCompleteUpdate() throws Exception { - final List columns = new ArrayList<>(); - Map values1 = new HashMap<>(); - values1.put("name", "col3"); - values1.put(NAME, "qualifiedName.col3"); - values1.put("type", "string"); - values1.put("comment", "col3 comment"); - - Map values2 = new HashMap<>(); - values2.put("name", "col4"); - values2.put(NAME, "qualifiedName.col4"); - values2.put("type", "string"); - values2.put("comment", "col4 comment"); - - AtlasEntity colEntity1 = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values1); - AtlasEntity colEntity2 = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values2); - columns.add(colEntity1); - columns.add(colEntity2); - AtlasEntity hiveTable = createHiveTable(); - hiveTable.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); - - AtlasEntityWithExtInfo entityInfo = new AtlasEntityWithExtInfo(hiveTable); - entityInfo.addReferredEntity(colEntity1); - entityInfo.addReferredEntity(colEntity2); - - EntityMutationResponse updateEntityResult = atlasClientV2.updateEntity(entityInfo); - assertNotNull(updateEntityResult); - assertNotNull(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); - assertNotNull(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); - //2 columns are being created, and 1 hiveTable is being updated - assertEquals(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size(), 1); - assertEquals(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size(), 2); - - AtlasEntity entityByGuid = getEntityByGuid(hiveTable.getGuid()); - List refs = (List) entityByGuid.getAttribute("columns"); - assertEquals(refs.size(), 2); - } - - @Test - public void testDeleteEntities() throws Exception { - // Create 2 database entities - AtlasEntityHeader entity1Header = createRandomDatabaseEntity(); - AtlasEntityHeader entity2Header = createRandomDatabaseEntity(); - - // Delete the database entities - EntityMutationResponse deleteResponse = atlasClientV2.deleteEntitiesByGuids(Arrays.asList(entity1Header.getGuid(), entity2Header.getGuid())); - - // Verify that deleteEntities() response has database entity guids - assertNotNull(deleteResponse); - assertNotNull(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE)); - assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).size(), 2); - - // Verify entities were deleted from the repository. - } - - @Test - public void testPurgeEntities() throws Exception { - // Create 2 database entities - AtlasEntityHeader entity1Header = createRandomDatabaseEntity(); - AtlasEntityHeader entity2Header = createRandomDatabaseEntity(); - - ApplicationProperties.get().setProperty(ENTITY_NOTIFICATION_VERSION_PROPERTY, "v2"); - - // Delete the database entities - EntityMutationResponse deleteResponse = atlasClientV2.deleteEntitiesByGuids(Arrays.asList(entity1Header.getGuid(), entity2Header.getGuid())); - - // Verify that deleteEntities() response has database entity guids - assertNotNull(deleteResponse); - assertNotNull(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE)); - assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).size(), 2); - - //Wait for delete operation - Thread.sleep(5000); - - // Purge the database entities - Set guids = Stream.of(entity1Header.getGuid(), entity2Header.getGuid()).collect(Collectors.toSet()); - EntityMutationResponse purgeResponse = atlasClientV2.purgeEntitiesByGuids(guids); - - // Verify that purgeEntities() response has database entity guids - assertNotNull(purgeResponse); - assertNotNull(purgeResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PURGE)); - assertEquals(purgeResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PURGE).size(), 2); - - AuditSearchParameters auditSearchParameters = TestResourceFileUtils.readObjectFromJson("audit-search-parameter-purge", - AuditSearchParameters.class); - List res = atlasClientV2.getAtlasAuditByOperation(auditSearchParameters); - // Verify that the audit entry is set - assertNotNull(res); - } - - @Test - public void testPurgeEntitiesWithoutDelete() throws Exception { - // Create 2 database entities - AtlasEntityHeader entity1Header = createRandomDatabaseEntity(); - AtlasEntityHeader entity2Header = createRandomDatabaseEntity(); - - // Purge the database entities without delete - Set guids = Stream.of(entity1Header.getGuid(), entity2Header.getGuid()).collect(Collectors.toSet()); - EntityMutationResponse purgeResponse = atlasClientV2.purgeEntitiesByGuids(guids); - - // Verify that purgeEntities() response has database entity guids - assertNotNull(purgeResponse); - assertNull(purgeResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PURGE)); - } - - @Test - public void testDeleteEntityByUniqAttribute() throws Exception { - // Create database entity - AtlasEntity hiveDB = createHiveDB(DATABASE_NAME + randomUTF8()); - - // Delete the database entity - EntityMutationResponse deleteResponse = atlasClientV2.deleteEntityByAttribute(DATABASE_TYPE_V2, toMap(NAME, (String) hiveDB.getAttribute(NAME))); - - // Verify that deleteEntities() response has database entity guids - assertNotNull(deleteResponse); - assertNotNull(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE)); - assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).size(), 1); - - // Verify entities were deleted from the repository. - } - - @Test - public void testPartialUpdateEntityByGuid() throws Exception { - EntityMutationResponse updateResponse = atlasClientV2.partialUpdateEntityByGuid(createHiveTable().getGuid(), Collections.singletonMap("key1", "value1"), "description"); - assertNotNull(updateResponse); - assertNotNull(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE)); - assertTrue(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE).size() > 0); - } - - @Test(dependsOnMethods = "testGetTraitNames") - public void testAddClassificationsByUniqueAttribute() throws Exception { - traitName = "PII_Trait" + randomString(); - piiTrait = - AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - createdTableQualifiedName = (String) createHiveTable().getAttribute(QUALIFIED_NAME); - atlasClientV2.addClassifications(createHiveTable().getTypeName(), toMap(QUALIFIED_NAME, createdTableQualifiedName), Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD); - } - - @Test(dependsOnMethods = "testAddClassificationsByUniqueAttribute") - public void testUpdateClassifications() throws Exception { - atlasClientV2.updateClassifications(createHiveTable().getGuid(), Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_UPDATE); - } - - @Test(dependsOnMethods = "testUpdateClassifications") - public void testUpdateClassificationsByUniqueAttribute() throws Exception { - createdTableQualifiedName = (String) createHiveTable().getAttribute(QUALIFIED_NAME); - atlasClientV2.updateClassifications(createHiveTable().getTypeName(), toMap(QUALIFIED_NAME, createdTableQualifiedName), Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD); - } - - @Test(dependsOnMethods = "testUpdateClassificationsByUniqueAttribute") - public void testRemoveEntityClassification() throws Exception { - createdTableQualifiedName = (String) createHiveTable().getAttribute(QUALIFIED_NAME); - atlasClientV2.removeClassification(createHiveTable().getTypeName(), toMap(QUALIFIED_NAME, createdTableQualifiedName), piiTrait.getName()); - assertEntityAuditV2(createHiveTable().getGuid(), EntityAuditEventV2.EntityAuditActionV2.CLASSIFICATION_DELETE); - } - - @Test - public void testAddOrUpdateBusinessAttributes() throws Exception { - Map> businessAttributesMap = new HashMap<>(); - Map bmAttrMapReq = new HashMap<>(); - bmAttrMapReq.put("attr8", "01234567890123456789"); - businessAttributesMap.put("bmWithAllTypes", bmAttrMapReq); - atlasClientV2.addOrUpdateBusinessAttributes(createHiveTable().getGuid(), false, businessAttributesMap); - AtlasEntity.AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid()); - assertNotNull(info); - Map> outputMap = info.getEntity().getBusinessAttributes(); - assertNotNull(outputMap); - assertEquals(outputMap.get("bmWithAllTypes").size(), 1); - } - - @Test(dependsOnMethods = "testAddOrUpdateBusinessAttributes") - public void testRemoveBusinessAttributes() throws Exception { - Map> businessAttributesMap = new HashMap<>(); - Map bmAttrMapReq = new HashMap<>(); - bmAttrMapReq.put("attr8", "01234567890123456789"); - businessAttributesMap.put("bmWithAllTypes", bmAttrMapReq); - atlasClientV2.removeBusinessAttributes(createHiveTable().getGuid(), businessAttributesMap); - AtlasEntity.AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid()); - assertNotNull(info); - Map> outputMap = info.getEntity().getBusinessAttributes(); - assertNull(outputMap); - } - - // TODO Enable this test case after fixing addOrUpdateBusinessAttributesByBName API. - @Test(enabled = false) - public void testAddOrUpdateBusinessAttributesByBName() throws Exception { - Map> businessAttributesMap = new HashMap<>(); - Map bmAttrMapReq = new HashMap<>(); - bmAttrMapReq.put("attr8", "01234567890123456789"); - businessAttributesMap.put("bmWithAllTypes", bmAttrMapReq); - atlasClientV2.addOrUpdateBusinessAttributes(createHiveTable().getGuid(), "bmWithAllTypes", businessAttributesMap); - AtlasEntity.AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid()); - assertNotNull(info); - Map> outputMap = info.getEntity().getBusinessAttributes(); - assertNotNull(outputMap); - assertEquals(outputMap.get("bmWithAllTypes").size(), 1); - } - - // TODO Enable this test case after fixing addOrUpdateBusinessAttributesByBName API. - @Test(enabled = false, dependsOnMethods = "testAddOrUpdateBusinessAttributesByBName") - public void testRemoveBusinessAttributesByBName() throws Exception { - Map> businessAttributesMap = new HashMap<>(); - Map bmAttrMapReq = new HashMap<>(); - bmAttrMapReq.put("attr8", "01234567890123456789"); - businessAttributesMap.put("bmWithAllTypes", bmAttrMapReq); - atlasClientV2.removeBusinessAttributes(createHiveTable().getGuid(), "bmWithAllTypes", businessAttributesMap); - AtlasEntity.AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveTable().getGuid()); - assertNotNull(info); - Map> outputMap = info.getEntity().getBusinessAttributes(); - assertNull(outputMap); - } - - @Test - public void testAddLabelsByTypeName() throws Exception { - createdDBName = (String) createHiveDB().getAttribute(NAME); - Set labels = new HashSet<>(); - labels.add("labelByTypeName"); - atlasClientV2.addLabels(createHiveDB().getTypeName(), toMap(NAME, createdDBName), labels); - AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveDB().getGuid(), false, true); - assertNotNull(info); - assertNotNull(info.getEntity().getLabels()); - assertEquals(info.getEntity().getLabels().size(), 1); - } - - @Test(dependsOnMethods = "testAddLabelsByTypeName") - public void testSetLabelsByTypeName() throws Exception { - createdDBName = (String) createHiveDB().getAttribute(NAME); - Set labels = new HashSet<>(); - labels.add("labelByTypeNameNext"); - atlasClientV2.setLabels(createHiveDB().getTypeName(), toMap(NAME, createdDBName), labels); - AtlasEntityWithExtInfo infoForSet = atlasClientV2.getEntityByGuid(createHiveDB().getGuid(), false, true); - assertNotNull(infoForSet); - assertNotNull(infoForSet.getEntity().getLabels()); - assertEquals(infoForSet.getEntity().getLabels().size(), 2); - } - - - @Test(dependsOnMethods = "testSetLabelsByTypeName") - public void testDeleteLabelsByTypeName() throws Exception { - Set labels = new HashSet<>(); - labels.add("labelByTypeNameNext"); - createdDBName = (String) createHiveDB().getAttribute(NAME); - atlasClientV2.removeLabels(createHiveDB().getTypeName(), toMap(NAME, createdDBName), labels); - AtlasEntityWithExtInfo info = atlasClientV2.getEntityByGuid(createHiveDB().getGuid(), false, true); - assertNotNull(info); - assertNotNull(info.getEntity().getLabels()); - assertEquals(info.getEntity().getLabels().size(), 1); - } - - @Test() - public void testAddClassification() throws Exception { - traitName = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = - AtlasTypeUtil.createTraitTypeDef(traitName, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - ClassificationAssociateRequest request = new ClassificationAssociateRequest(); - request.setEntityGuids(Arrays.asList(createHiveTable().getGuid(), createHiveDB().getGuid())); - request.setClassification(new AtlasClassification(piiTrait.getName())); - - atlasClientV2.addClassification(request); - - assertEntityAuditV2(createHiveTable().getGuid(), EntityAuditEventV2.EntityAuditActionV2.CLASSIFICATION_ADD); - assertEntityAuditV2(createHiveDB().getGuid(), EntityAuditEventV2.EntityAuditActionV2.CLASSIFICATION_ADD); - AtlasClassifications classificationsTable = atlasClientV2.getEntityClassifications(createHiveTable().getGuid(), piiTrait.getName()); - assertNotNull(classificationsTable); - AtlasClassifications classificationsDB = atlasClientV2.getEntityClassifications(createHiveDB().getGuid(), piiTrait.getName()); - assertNotNull(classificationsDB); - } - - @Test() - public void testDeleteClassifications() throws Exception { - final String guid = createHiveTable().getGuid(); - try { - atlasClientV2.deleteClassifications(guid, Arrays.asList(new AtlasClassification(getAndAddClassification().getName()), new AtlasClassification(getAndAddClassification().getName()))); - } catch (AtlasServiceException ex) { - fail("Deletion should've succeeded"); - } - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_DELETE); - } - - @Test() - public void testRemoveEntityClassificationByGuid() throws Exception { - final String guid = createHiveTable().getGuid(); - try { - String name = getAndAddClassification().getName(); - atlasClientV2.removeClassification(guid, name, guid); - } catch (AtlasServiceException ex) { - fail("Deletion should've succeeded"); - } - assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_DELETE); - } - - @Test() - public void testProduceTemplate() { - try { - String template = atlasClientV2.getTemplateForBulkUpdateBusinessAttributes(); - assertNotNull(template); - } catch (AtlasServiceException ex) { - fail("Deletion should've succeeded"); - } - } - - //TODO Enable this test after fixing the BulkImportResponse Deserialization issue. - @Test(dependsOnMethods = "testSubmitEntity", enabled = false) - public void testImportBMAttributes() throws AtlasServiceException { - BulkImportResponse response = atlasClientV2.bulkUpdateBusinessAttributes(TestResourceFileUtils.getTestFilePath("template_metadata.csv")); - assertNotNull(response); - } - - private void createImportFile() throws Exception { - try { - String filePath = TestResourceFileUtils.getTestFilePath("template_metadata.csv"); - String dbName = (String) createHiveTable().getAttribute("name"); - String header = "TypeName,UniqueAttributeValue,BusinessAttributeName,BusinessAttributeValue,UniqueAttributeName[optional]"; - String values = "hive_table_v2," + dbName + ",bmWithAllTypes.attr8,\"Awesome Attribute 1\",qualifiedName"; - File tempFile = new File(filePath); - FileUtils.writeLines(tempFile, Arrays.asList(header, values)); - } catch (IOException e) { - fail("Should have created file"); - throw new AtlasServiceException(e); - } - } - - private void assertEntityAudit(String dbid, EntityAuditEvent.EntityAuditAction auditAction) - throws Exception { - List events = atlasClientV1.getEntityAuditEvents(dbid, (short) 100); - for (EntityAuditEvent event : events) { - if (event.getAction() == auditAction) { - return; - } - } - fail("Expected audit event with action = " + auditAction); - } - - private void assertEntityAuditV2(String guid, EntityAuditEventV2.EntityAuditActionV2 auditAction) - throws Exception { - // Passing auditAction as "null" as this feature is not added for InMemoryEntityRepository for IT testing. - List events = atlasClientV2.getAuditEvents(guid, "", null, (short) 100); - assertNotNull(events); - assertNotEquals(events.size(), 0); - ObjectMapper mapper = new ObjectMapper(); - - List auditEventV2s = mapper.convertValue( - events, - new TypeReference>() { - }); - for (EntityAuditEventV2 event : auditEventV2s) { - if (event.getAction() == auditAction) { - return; - } - } - fail("Expected audit event with action = " + auditAction); - } - - private void addProperty(String guid, String property, Object value) throws AtlasServiceException { - - AtlasEntity entityByGuid = getEntityByGuid(guid); - entityByGuid.setAttribute(property, value); - EntityMutationResponse response = atlasClientV2.updateEntity(new AtlasEntityWithExtInfo(entityByGuid)); - assertNotNull(response); - assertNotNull(response.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); - } - - private AtlasEntity createHiveDB() { - if (dbEntity == null) { - dbEntity = createHiveDB(DATABASE_NAME); - } - return dbEntity; - } - - private AtlasEntity createHiveDB(String dbName) { - AtlasEntity hiveDBInstanceV2 = createHiveDBInstanceV2(dbName); - AtlasEntityHeader entityHeader = createEntity(hiveDBInstanceV2); - assertNotNull(entityHeader); - assertNotNull(entityHeader.getGuid()); - hiveDBInstanceV2.setGuid(entityHeader.getGuid()); - return hiveDBInstanceV2; - } - - private TypesUtil.Pair createDBAndTable() throws Exception { - AtlasEntity dbInstanceV2 = createHiveDB(); - AtlasEntity hiveTableInstanceV2 = createHiveTable(); - return TypesUtil.Pair.of(dbInstanceV2, hiveTableInstanceV2); - } - - private AtlasEntity createHiveTable() throws Exception { - if (tableEntity == null) { - tableEntity = createHiveTable(createHiveDB(), TABLE_NAME); - } - return tableEntity; - } - - private AtlasEntity createHiveTable(AtlasEntity dbInstanceV2, String tableName) throws Exception { - AtlasEntity hiveTableInstanceV2 = createHiveTableInstanceV2(dbInstanceV2, tableName); - AtlasEntityHeader createdHeader = createEntity(hiveTableInstanceV2); - assertNotNull(createdHeader); - assertNotNull(createdHeader.getGuid()); - hiveTableInstanceV2.setGuid(createdHeader.getGuid()); - tableEntity = hiveTableInstanceV2; - return hiveTableInstanceV2; - } - - private AtlasClassificationDef getAndAddClassification() throws Exception { - String traitNameNext = "PII_Trait" + randomString(); - AtlasClassificationDef piiTrait = - AtlasTypeUtil.createTraitTypeDef(traitNameNext, Collections.emptySet()); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getClassificationDefs().add(piiTrait); - createType(typesDef); - - atlasClientV2.addClassifications(createHiveTable().getGuid(), Collections.singletonList(new AtlasClassification(piiTrait.getName()))); - - assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD); - AtlasClassifications classifications = atlasClientV2.getEntityClassifications(createHiveTable().getGuid(), piiTrait.getName()); - assertNotNull(classifications); - return piiTrait; - } - - private Map toMap(final String name, final String value) { - return new HashMap() {{ - put(name, value); - }}; - } - - private AtlasEntityHeader createRandomDatabaseEntity() { - AtlasEntity db = new AtlasEntity(DATABASE_TYPE_V2); - String dbName = randomString(); - db.setAttribute("name", dbName); - db.setAttribute(NAME, dbName); - db.setAttribute("clusterName", randomString()); - db.setAttribute("description", randomString()); - return createEntity(db); - } - - private String random() { - return RandomStringUtils.random(10); - } - - private String randomUTF8() throws Exception { - String ret = random(); - - if (!StandardCharsets.UTF_8.equals(Charset.defaultCharset())) { - ret = new String(ret.getBytes(), StandardCharsets.UTF_8.name()); - } - - return ret; - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/GlossaryClientV2IT.java b/webapp/src/test/java/org/apache/atlas/web/integration/GlossaryClientV2IT.java deleted file mode 100644 index cb68b11dbd..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/GlossaryClientV2IT.java +++ /dev/null @@ -1,476 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.sun.jersey.api.client.ClientResponse; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.bulkimport.BulkImportResponse; -import org.apache.atlas.model.glossary.AtlasGlossary; -import org.apache.atlas.model.glossary.AtlasGlossaryCategory; -import org.apache.atlas.model.glossary.AtlasGlossaryTerm; -import org.apache.atlas.model.glossary.enums.AtlasTermRelationshipStatus; -import org.apache.atlas.model.glossary.relations.AtlasGlossaryHeader; -import org.apache.atlas.model.glossary.relations.AtlasRelatedCategoryHeader; -import org.apache.atlas.model.glossary.relations.AtlasRelatedTermHeader; -import org.apache.atlas.model.instance.AtlasEntity; -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.instance.AtlasRelatedObjectId; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.atlas.utils.TestResourceFileUtils; -import org.apache.atlas.web.TestUtils; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.RandomStringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.fail; - -public class GlossaryClientV2IT extends BaseResourceIT { - private static final Logger LOG = LoggerFactory.getLogger(GlossaryClientV2IT.class); - private static final ObjectMapper mapper = new ObjectMapper(); - - private AtlasTypesDef typeDefinitions; - private AtlasClientV2 clientV2; - private AtlasGlossary educationGlossary, healthCareGlossary; - private AtlasGlossaryTerm educationTerm, schoolEducationTerm; - private AtlasGlossaryCategory educationCategory; - private List relatedObjectIds; - private AtlasEntityHeader entityHeader; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - typeDefinitions = setForGlossary(); - createType(typeDefinitions); - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - clientV2 = new AtlasClientV2(atlasUrls, new String[]{"admin", "admin"}); - } else { - clientV2 = new AtlasClientV2(atlasUrls); - } - } - - @AfterClass - public void tearDown() throws Exception { - emptyTypeDefs(typeDefinitions); - } - - @Test - public void testCreateGlossary() throws Exception { - educationGlossary = createAndGetGlossary("Education"); - assertNotNull(educationGlossary); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testGetGlossaryByGuid() throws Exception { - healthCareGlossary = createAndGetGlossary("HealthCare"); - AtlasGlossary atlasGlossaryByGuid = atlasClientV2.getGlossaryByGuid(healthCareGlossary.getGuid()); - assertNotNull(atlasGlossaryByGuid); - assertEquals(healthCareGlossary.getGuid(), atlasGlossaryByGuid.getGuid()); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testGetDetailGlossary() throws Exception { - AtlasGlossary.AtlasGlossaryExtInfo extInfo = atlasClientV2.getGlossaryExtInfo(educationGlossary.getGuid()); - assertNotNull(extInfo); - assertEquals(educationGlossary.getGuid(), extInfo.getGuid()); - } - - @Test(dependsOnMethods = "testGetGlossaryByGuid") - public void getAllGlossary() throws Exception { - List list = atlasClientV2.getAllGlossaries("ASC", 5, 0); - assertNotNull(list); - assertEquals(list.size(), 3); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void updateGlossaryByGuid() throws Exception { - AtlasGlossary newGlossary = new AtlasGlossary(); - newGlossary.setLanguage("English"); - newGlossary.setName("updateGlossary"); - AtlasGlossary updated = atlasClientV2.updateGlossaryByGuid(educationGlossary.getGuid(), newGlossary); - assertNotNull(updated); - assertEquals(updated.getGuid(), educationGlossary.getGuid()); - assertEquals(updated.getLanguage(), newGlossary.getLanguage()); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testCreateTerm() throws Exception { - AtlasGlossaryTerm term = new AtlasGlossaryTerm(); - AtlasGlossaryHeader header = new AtlasGlossaryHeader(); - header.setGlossaryGuid(educationGlossary.getGuid()); - header.setDisplayText(educationGlossary.getName()); - term.setAnchor(header); - term.setName("termForEducation"); - educationTerm = atlasClientV2.createGlossaryTerm(term); - assertNotNull(educationTerm); - assertNotNull(educationTerm.getGuid()); - } - - @Test(dependsOnMethods = "testCreateTerm") - public void testGetGlossaryTerm() throws Exception { - AtlasGlossaryTerm term = atlasClientV2.getGlossaryTerm(educationTerm.getGuid()); - assertNotNull(term); - assertEquals(term.getGuid(), educationTerm.getGuid()); - } - - @Test(dependsOnMethods = "testCreateTerm") - public void testGetGlossaryTerms() throws Exception { - AtlasRelatedTermHeader relatedTermHeader = new AtlasRelatedTermHeader(); - relatedTermHeader.setTermGuid(educationTerm.getGuid()); - relatedTermHeader.setDescription("test description"); - relatedTermHeader.setExpression("test expression"); - relatedTermHeader.setSource("School"); - relatedTermHeader.setSteward("School"); - relatedTermHeader.setStatus(AtlasTermRelationshipStatus.ACTIVE); - schoolEducationTerm = new AtlasGlossaryTerm(); - AtlasGlossaryHeader header = new AtlasGlossaryHeader(); - header.setGlossaryGuid(educationGlossary.getGuid()); - header.setDisplayText(educationGlossary.getName()); - schoolEducationTerm.setAnchor(header); - schoolEducationTerm.setName("termForSchool"); - schoolEducationTerm.setSeeAlso(Collections.singleton(relatedTermHeader)); - - schoolEducationTerm = clientV2.createGlossaryTerm(schoolEducationTerm); - assertNotNull(schoolEducationTerm); - assertNotNull(schoolEducationTerm.getGuid()); - - //Getting multiple terms - List terms = atlasClientV2.getGlossaryTerms(educationGlossary.getGuid(), "ASC", 2, 0); - assertNotNull(terms); - assertEquals(terms.size(), 2); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testCreateGlossaryCategory() throws Exception { - AtlasGlossaryCategory category = new AtlasGlossaryCategory(); - AtlasGlossaryHeader header = new AtlasGlossaryHeader(); - header.setGlossaryGuid(educationGlossary.getGuid()); - header.setDisplayText(educationGlossary.getName()); - category.setAnchor(header); - category.setName("categoryForEducation"); - educationCategory = atlasClientV2.createGlossaryCategory(category); - assertNotNull(educationCategory); - assertNotNull(educationCategory.getGuid()); - } - - @Test(dependsOnMethods = "testCreateGlossaryCategory") - public void testCreateGlossaryCategories() throws Exception { - List glossaryCategories = new ArrayList<>(); - - AtlasGlossaryCategory category1 = new AtlasGlossaryCategory(); - AtlasGlossaryHeader header1 = new AtlasGlossaryHeader(); - header1.setGlossaryGuid(healthCareGlossary.getGuid()); - header1.setDisplayText(healthCareGlossary.getName()); - category1.setAnchor(header1); - category1.setName("category1ForEducation"); - glossaryCategories.add(category1); - //Setting different category - AtlasGlossaryCategory category2 = new AtlasGlossaryCategory(); - category2.setAnchor(header1); - category2.setName("category2ForEducation"); - glossaryCategories.add(category2); - - List list = atlasClientV2.createGlossaryCategories(glossaryCategories); - assertNotNull(list); - assertEquals(list.size(), 2); - } - - @Test(dependsOnMethods = "testCreateGlossaryCategory") - public void testGetGlossaryByCategory() throws Exception { - AtlasGlossaryCategory atlasGlossaryCategory = atlasClientV2.getGlossaryCategory(educationCategory.getGuid()); - assertNotNull(atlasGlossaryCategory); - assertEquals(atlasGlossaryCategory.getGuid(), educationCategory.getGuid()); - } - - @Test(dependsOnMethods = "testGetGlossaryByGuid") - public void testCreateGlossaryTerms() throws Exception { - List list = new ArrayList<>(); - int index = 0; - List glossaries = atlasClientV2.getAllGlossaries("ASC", 5, 0); - List glossaryList = mapper.convertValue( - glossaries, - new TypeReference>() { - }); - - for (AtlasGlossary glossary : glossaryList) { - AtlasGlossaryTerm term = new AtlasGlossaryTerm(); - AtlasGlossaryHeader header = new AtlasGlossaryHeader(); - header.setGlossaryGuid(glossary.getGuid()); - header.setDisplayText(glossary.getName()); - term.setAnchor(header); - term.setName("termName" + index); - list.add(term); - index++; - } - List termList = atlasClientV2.createGlossaryTerms(list); - assertNotNull(termList); - assertEquals(termList.size(), 3); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testPartialUpdateGlossaryByGuid() throws Exception { - Map partialUpdates = new HashMap<>(); - partialUpdates.put("shortDescription", "shortDescription"); - partialUpdates.put("longDescription", "longDescription"); - AtlasGlossary atlasGlossary = atlasClientV2.partialUpdateGlossaryByGuid(educationGlossary.getGuid(), partialUpdates); - assertNotNull(atlasGlossary); - assertEquals(atlasGlossary.getShortDescription(), "shortDescription"); - assertEquals(atlasGlossary.getLongDescription(), "longDescription"); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testUpdateGlossaryTermByGuid() throws Exception { - AtlasGlossaryTerm term = new AtlasGlossaryTerm(educationTerm); - term.setAbbreviation("trm"); - AtlasGlossaryTerm responseTerm = atlasClientV2.updateGlossaryTermByGuid(educationTerm.getGuid(), term); - assertNotNull(responseTerm); - assertEquals(responseTerm.getAbbreviation(), term.getAbbreviation()); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testUpdateGlossaryCategoryByGuid() throws Exception { - AtlasGlossaryCategory category = new AtlasGlossaryCategory(educationCategory); - category.setLongDescription("this is about category"); - AtlasGlossaryCategory responseCategory = atlasClientV2.updateGlossaryCategoryByGuid(educationCategory.getGuid(), category); - assertNotNull(responseCategory); - assertEquals(responseCategory.getLongDescription(), category.getLongDescription()); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testPartialUpdateTermByGuid() throws Exception { - Map partialUpdates = new HashMap<>(); - partialUpdates.put("shortDescription", "shortDescriptionTerm"); - partialUpdates.put("longDescription", "longDescriptionTerm"); - - AtlasGlossaryTerm term = atlasClientV2.partialUpdateTermByGuid(educationTerm.getGuid(), partialUpdates); - assertNotNull(term); - assertEquals(term.getShortDescription(), "shortDescriptionTerm"); - assertEquals(term.getLongDescription(), "longDescriptionTerm"); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testPartialUpdateCategoryByGuid() throws Exception { - Map partialUpdates = new HashMap<>(); - partialUpdates.put("shortDescription", "shortDescriptionCategory"); - partialUpdates.put("longDescription", "longDescriptionCategory"); - - AtlasGlossaryCategory category = atlasClientV2.partialUpdateCategoryByGuid(educationCategory.getGuid(), partialUpdates); - assertNotNull(category); - assertEquals(category.getShortDescription(), "shortDescriptionCategory"); - assertEquals(category.getLongDescription(), "longDescriptionCategory"); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testGetGlossaryTermHeadersByGuid() throws Exception { - List list = atlasClientV2.getGlossaryTermHeaders(educationGlossary.getGuid(), "ASC", 2, 0); - assertNotNull(list); - assertEquals(list.size(), 1); - } - - @Test(dependsOnMethods = "testCreateGlossaryCategories") - public void testGetGlossaryCategoriesByGuid() throws Exception { - List list = atlasClientV2.getGlossaryCategories(healthCareGlossary.getGuid(), "ASC", 2, 0); - assertNotNull(list); - assertEquals(list.size(), 2); - } - - @Test(dependsOnMethods = "testCreateGlossaryCategories") - public void testGetGlossaryCategoryHeaders() throws Exception { - List list = atlasClientV2.getGlossaryCategoryHeaders(healthCareGlossary.getGuid(), "ASC", 2, 0); - assertNotNull(list); - assertEquals(list.size(), 2); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testGetCategoryTerms() throws Exception { - List list = atlasClientV2.getCategoryTerms(educationCategory.getGuid(), "ASC", 2, 0); - assertNotNull(list); - assertEquals(list.size(), 0); - } - - @Test(dependsOnMethods = "testCreateGlossary") - public void testGetAllRelatedTerms() throws Exception { - Map> map = atlasClientV2.getRelatedTerms(educationTerm.getGuid(), "ASC", 2, 0); - assertNotNull(map); - } - - @Test(dependsOnMethods = "testCreateTerm") - public void testAssignTermToEntities() throws Exception { - try { - AtlasEntity entity = new AtlasEntity("Asset"); - entity.setAttribute("qualifiedName", "testAsset"); - entity.setAttribute("name", "testAsset"); - if (entityHeader == null) { - entityHeader = createEntity(entity); - } - AtlasRelatedObjectId relatedObjectId = new AtlasRelatedObjectId(); - relatedObjectId.setGuid(entityHeader.getGuid()); - relatedObjectId.setTypeName(entityHeader.getTypeName()); - assertNotNull(relatedObjectId); - relatedObjectIds = new ArrayList<>(); - relatedObjectIds.add(relatedObjectId); - - atlasClientV2.assignTermToEntities(educationTerm.getGuid(), relatedObjectIds); - List assignedEntities = atlasClientV2.getEntitiesAssignedWithTerm(educationTerm.getGuid(), "ASC", 2, 0); - assertNotNull(assignedEntities); - assertEquals(assignedEntities.size(), 1); - List entityList = mapper.convertValue( - assignedEntities, - new TypeReference>() { - }); - String relationshipGuid = entityList.get(0).getRelationshipGuid(); - assertNotNull(relationshipGuid); - relatedObjectId.setRelationshipGuid(relationshipGuid); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @Test(dependsOnMethods = "testAssignTermToEntities") - public void testDisassociateTermAssignmentFromEntities() throws Exception { - atlasClientV2.disassociateTermFromEntities(educationTerm.getGuid(), relatedObjectIds); - AtlasGlossaryTerm term = atlasClientV2.getGlossaryTerm(educationTerm.getGuid()); - atlasClientV2.deleteEntityByGuid(entityHeader.getGuid()); - assertNotNull(term); - assertNull(term.getAssignedEntities()); - } - - @Test(dependsOnMethods = "testCreateGlossaryCategory") - public void testGetRelatedCategories() throws Exception { - Map> map = atlasClientV2.getRelatedCategories(educationCategory.getGuid(), "ASC", 1, 0); - assertEquals(map.size(), 0); - } - - @Test(dependsOnMethods = "testDeleteGlossaryTerm") - public void testDeleteGlossary() throws Exception { - emptyTypeDefs(typeDefinitions); - atlasClientV2.deleteGlossaryByGuid(educationGlossary.getGuid()); - atlasClientV2.deleteGlossaryByGuid(healthCareGlossary.getGuid()); - try { - atlasClientV2.getGlossaryByGuid(healthCareGlossary.getGuid()); - } catch (AtlasServiceException ex) { - assertNotNull(ex.getStatus()); - assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); - } - try { - atlasClientV2.getGlossaryByGuid(educationGlossary.getGuid()); - } catch (AtlasServiceException ex) { - assertNotNull(ex.getStatus()); - assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); - } - } - - @Test(dependsOnMethods = "testDisassociateTermAssignmentFromEntities") - public void testDeleteGlossaryTerm() throws Exception { - atlasClientV2.deleteGlossaryTermByGuid(educationTerm.getGuid()); - try { - atlasClientV2.getGlossaryTerm(educationTerm.getGuid()); - } catch (AtlasServiceException ex) { - assertNotNull(ex.getStatus()); - assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); - } - } - - @Test(dependsOnMethods = "testGetRelatedCategories") - public void testDeleteGlossaryCategory() throws Exception { - atlasClientV2.deleteGlossaryCategoryByGuid(educationCategory.getGuid()); - try { - atlasClientV2.getGlossaryCategory(educationCategory.getGuid()); - } catch (AtlasServiceException ex) { - assertNotNull(ex.getStatus()); - assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); - } - } - - @Test() - public void testProduceTemplate() { - try { - String template = atlasClientV2.getGlossaryImportTemplate(); - - assertNotNull(template); - } catch (AtlasServiceException ex) { - fail("Deletion should've succeeded"); - } - } - - @Test() - public void testImportGlossaryData() { - try { - String filePath = TestResourceFileUtils.getTestFilePath("template.csv"); - BulkImportResponse terms = atlasClientV2.importGlossary(filePath); - - assertNotNull(terms); - - assertEquals(terms.getSuccessImportInfoList().size(), 1); - - } catch (AtlasServiceException ex) { - fail("Import GlossaryData should've succeeded : "+ex); - } - } - - private AtlasGlossary createAndGetGlossary(String name) throws Exception { - AtlasGlossary atlasGlossary = new AtlasGlossary(); - atlasGlossary.setName(name); - return atlasClientV2.createGlossary(atlasGlossary); - } - - private void emptyTypeDefs(AtlasTypesDef def) { - def.getEnumDefs().clear(); - def.getStructDefs().clear(); - def.getClassificationDefs().clear(); - def.getEntityDefs().clear(); - def.getRelationshipDefs().clear(); - def.getBusinessMetadataDefs().clear(); - } - - private AtlasTypesDef setForGlossary() throws IOException { - String filePath = TestUtils.getGlossaryType(); - String json = FileUtils.readFileToString(new File(filePath)); - return AtlasType.fromJson(json, AtlasTypesDef.class); - } - - protected String randomString() { - //names cannot start with a digit - return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9); - } - -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/LineageClientV2IT.java b/webapp/src/test/java/org/apache/atlas/web/integration/LineageClientV2IT.java deleted file mode 100644 index e7b67f21e4..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/LineageClientV2IT.java +++ /dev/null @@ -1,133 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import org.apache.atlas.model.instance.AtlasEntityHeader; -import org.apache.atlas.model.lineage.AtlasLineageInfo; -import org.apache.atlas.v1.model.instance.Id; -import org.apache.atlas.v1.model.instance.Referenceable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.atlas.AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME; - -/** - * Entity Lineage v2 Integration Tests. - */ -public class LineageClientV2IT extends DataSetLineageJerseyResourceIT { - private static final Logger LOG = LoggerFactory.getLogger(LineageClientV2IT.class); - private String salesFactTable; - private String salesMonthlyTable; - private String salesDBName; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - createTypeDefinitionsV1(); - setupInstances(); - } - - @Test - public void testGetLineageInfo() throws Exception { - String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, - REFERENCEABLE_ATTRIBUTE_NAME, salesMonthlyTable).getId()._getId(); - - AtlasLineageInfo inputLineageInfo = atlasClientV2.getLineageInfo(tableId, AtlasLineageInfo.LineageDirection.INPUT, 5); - Assert.assertNotNull(inputLineageInfo); - Map entities = inputLineageInfo.getGuidEntityMap(); - Assert.assertNotNull(entities); - - Set relations = inputLineageInfo.getRelations(); - Assert.assertNotNull(relations); - - Assert.assertEquals(entities.size(), 6); - Assert.assertEquals(relations.size(), 5); - Assert.assertEquals(inputLineageInfo.getLineageDirection(), AtlasLineageInfo.LineageDirection.INPUT); - Assert.assertEquals(inputLineageInfo.getLineageDepth(), 5); - Assert.assertEquals(inputLineageInfo.getBaseEntityGuid(), tableId); - } - - @Test - public void testGetLineageInfoByAttribute() throws Exception { - Map attributeMap = new HashMap<>(); - attributeMap.put("qualifiedName", salesMonthlyTable); - - AtlasLineageInfo bothLineageInfo = atlasClientV2.getLineageInfo(HIVE_TABLE_TYPE, attributeMap, AtlasLineageInfo.LineageDirection.BOTH, 5); - Assert.assertNotNull(bothLineageInfo); - Map entities = bothLineageInfo.getGuidEntityMap(); - Assert.assertNotNull(entities); - - Set relations = bothLineageInfo.getRelations(); - Assert.assertNotNull(relations); - - Assert.assertEquals(entities.size(), 6); - Assert.assertEquals(relations.size(), 5); - Assert.assertEquals(bothLineageInfo.getLineageDirection(), AtlasLineageInfo.LineageDirection.BOTH); - Assert.assertEquals(bothLineageInfo.getLineageDepth(), 5); - } - - private void setupInstances() throws Exception { - salesDBName = "Sales" + randomString(); - Id salesDB = database(salesDBName, "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - - List salesFactColumns = Arrays.asList(column("time_id", "int", "time id"), column("product_id", "int", "product id"), - column("customer_id", "int", "customer id"), - column("sales", "double", "product id")); - - salesFactTable = "sales_fact" + randomString(); - Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns); - - List timeDimColumns = Arrays.asList(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"), - column("weekDay", "int", "week Day")); - - Id timeDim = - table("time_dim" + randomString(), "time dimension table", salesDB, "John Doe", "EXTERNAL", - timeDimColumns); - - Id reportingDB = - database("Reporting" + randomString(), "reporting database", "Jane BI", - "hdfs://host:8000/apps/warehouse/reporting"); - - Id salesFactDaily = - table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB, - "Joe BI", "MANAGED", salesFactColumns); - - loadProcess("loadSalesDaily" + randomString(), "John ETL", Arrays.asList(salesFact, timeDim), - Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph"); - - salesMonthlyTable = "sales_fact_monthly_mv" + randomString(); - Id salesFactMonthly = - table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI", - "MANAGED", salesFactColumns); - - loadProcess("loadSalesMonthly" + randomString(), "John ETL", Collections.singletonList(salesFactDaily), - Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph"); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java deleted file mode 100644 index 9506c6240d..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java +++ /dev/null @@ -1,477 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.SearchFilter; -import org.apache.atlas.model.TypeCategory; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.model.typedef.AtlasBusinessMetadataDef; -import org.apache.atlas.model.typedef.AtlasClassificationDef; -import org.apache.atlas.model.typedef.AtlasEntityDef; -import org.apache.atlas.model.typedef.AtlasEnumDef; -import org.apache.atlas.model.typedef.AtlasRelationshipDef; -import org.apache.atlas.model.typedef.AtlasStructDef; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; -import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef; -import org.apache.atlas.model.typedef.AtlasTypeDefHeader; -import org.apache.atlas.model.typedef.AtlasTypesDef; -import org.apache.atlas.type.AtlasTypeUtil; -import org.apache.atlas.utils.AuthenticationUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; - -import static org.apache.atlas.AtlasErrorCode.TYPE_NAME_NOT_FOUND; -import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality; -import static org.apache.atlas.type.AtlasTypeUtil.createClassTypeDef; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -/** - * Integration test for types jersey resource. - */ -public class TypedefsJerseyResourceIT extends BaseResourceIT { - - private AtlasTypesDef typeDefinitions; - - private AtlasClientV2 clientV2; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - typeDefinitions = createHiveTypesV2(); - - if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { - clientV2 = new AtlasClientV2(atlasUrls, new String[]{"admin", "admin"}); - } else { - clientV2 = new AtlasClientV2(atlasUrls); - } - } - - @AfterClass - public void tearDown() throws Exception { - emptyTypeDefs(typeDefinitions); - } - - @Test - public void testCreate() throws Exception { - createType(typeDefinitions); - - // validate if all types created successfully - for (AtlasEnumDef enumDef : typeDefinitions.getEnumDefs()) { - checkIfTypeExists(enumDef.getName()); - } - - for (AtlasStructDef structDef : typeDefinitions.getStructDefs()) { - checkIfTypeExists(structDef.getName()); - } - - for (AtlasClassificationDef classificationDef : typeDefinitions.getClassificationDefs()) { - checkIfTypeExists(classificationDef.getName()); - } - - for (AtlasEntityDef entityDef : typeDefinitions.getEntityDefs()) { - checkIfTypeExists(entityDef.getName()); - } - - for (AtlasRelationshipDef relationshipDef : typeDefinitions.getRelationshipDefs()) { - checkIfTypeExists(relationshipDef.getName()); - } - - for (AtlasBusinessMetadataDef businessMetadataDef : typeDefinitions.getBusinessMetadataDefs()) { - checkIfTypeExists(businessMetadataDef.getName()); - } - } - - @Test - public void testGetHeaders() throws Exception { - MultivaluedMap filterParams = new MultivaluedMapImpl(); - filterParams.add(SearchFilter.PARAM_TYPE, "ENTITY"); - List headers = clientV2.getAllTypeDefHeaders(new SearchFilter(filterParams)); - assertNotNull(headers); - } - - @Test(dependsOnMethods = "testGetDefinition") - public void testDeleteAtlasTypeByName() throws Exception { - String typeName = "table"; - boolean typeExists = atlasClientV2.typeWithNameExists(typeName); - if (typeExists) { - clientV2.deleteTypeByName(typeName); - boolean afterDelete = atlasClientV2.typeWithNameExists(typeName); - assertEquals(afterDelete, false); - } - } - - @Test - public void testDuplicateCreate() throws Exception { - AtlasEntityDef type = createClassTypeDef(randomString(), - Collections.emptySet(), AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")); - AtlasTypesDef typesDef = new AtlasTypesDef(); - typesDef.getEntityDefs().add(type); - - AtlasTypesDef created = clientV2.createAtlasTypeDefs(typesDef); - assertNotNull(created); - - try { - created = clientV2.createAtlasTypeDefs(typesDef); - fail("Expected 409"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.CONFLICT.getStatusCode()); - } - } - - @Test - public void testUpdate() throws Exception { - String entityType = randomString(); - AtlasEntityDef typeDefinition = - createClassTypeDef(entityType, Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")); - - AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); - atlasTypesDef.getEntityDefs().add(typeDefinition); - - AtlasTypesDef createdTypeDefs = clientV2.createAtlasTypeDefs(atlasTypesDef); - assertNotNull(createdTypeDefs); - assertEquals(createdTypeDefs.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); - - //Add attribute description - typeDefinition = createClassTypeDef(typeDefinition.getName(), - Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), - AtlasTypeUtil.createOptionalAttrDef("description", "string")); - - emptyTypeDefs(atlasTypesDef); - - atlasTypesDef.getEntityDefs().add(typeDefinition); - - AtlasTypesDef updatedTypeDefs = clientV2.updateAtlasTypeDefs(atlasTypesDef); - assertNotNull(updatedTypeDefs); - assertEquals(updatedTypeDefs.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); - assertEquals(updatedTypeDefs.getEntityDefs().get(0).getName(), atlasTypesDef.getEntityDefs().get(0).getName()); - - MultivaluedMap filterParams = new MultivaluedMapImpl(); - filterParams.add(SearchFilter.PARAM_TYPE, "ENTITY"); - AtlasTypesDef allTypeDefs = clientV2.getAllTypeDefs(new SearchFilter(filterParams)); - assertNotNull(allTypeDefs); - Boolean entityDefFound = false; - for (AtlasEntityDef atlasEntityDef : allTypeDefs.getEntityDefs()){ - if (atlasEntityDef.getName().equals(typeDefinition.getName())) { - assertEquals(atlasEntityDef.getAttributeDefs().size(), 2); - entityDefFound = true; - break; - } - } - assertTrue(entityDefFound, "Required entityDef not found."); - } - - @Test(dependsOnMethods = "testCreate") - public void testGetDefinition() throws Exception { - if (CollectionUtils.isNotEmpty(typeDefinitions.getEnumDefs())) { - for (AtlasEnumDef atlasEnumDef : typeDefinitions.getEnumDefs()) { - verifyByNameAndGUID(atlasEnumDef); - } - } - - if (CollectionUtils.isNotEmpty(typeDefinitions.getStructDefs())) { - for (AtlasStructDef structDef : typeDefinitions.getStructDefs()) { - verifyByNameAndGUID(structDef); - } - } - - if (CollectionUtils.isNotEmpty(typeDefinitions.getClassificationDefs())) { - for (AtlasClassificationDef classificationDef : typeDefinitions.getClassificationDefs()) { - verifyByNameAndGUID(classificationDef); - } - } - - if (CollectionUtils.isNotEmpty(typeDefinitions.getEntityDefs())) { - for (AtlasEntityDef entityDef : typeDefinitions.getEntityDefs()) { - verifyByNameAndGUID(entityDef); - } - } - - if (CollectionUtils.isNotEmpty(typeDefinitions.getRelationshipDefs())) { - for (AtlasRelationshipDef relationshipDef : typeDefinitions.getRelationshipDefs()) { - verifyByNameAndGUID(relationshipDef); - } - } - - if (CollectionUtils.isNotEmpty(typeDefinitions.getBusinessMetadataDefs())) { - for (AtlasBusinessMetadataDef businessMetadataDef : typeDefinitions.getBusinessMetadataDefs()) { - verifyByNameAndGUID(businessMetadataDef); - } - } - } - - @Test() - public void testInvalidGets() throws Exception { - try { - AtlasEnumDef byName = clientV2.getEnumDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasEnumDef byGuid = clientV2.getEnumDefByGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasStructDef byName = clientV2.getStructDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasStructDef byGuid = clientV2.getStructDefByGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasClassificationDef byName = clientV2.getClassificationDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasClassificationDef byGuid = clientV2.getClassificationDefByGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasEntityDef byName = clientV2.getEntityDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasEntityDef byGuid = clientV2.getEntityDefByGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasRelationshipDef byName = clientV2.getRelationshipDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasRelationshipDef byGuid = clientV2.getRelationshipDefByGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasBusinessMetadataDef byName = clientV2.getBusinessMetadataDefByName("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - try { - AtlasBusinessMetadataDef byGuid = clientV2.getBusinessMetadataDefGuid("blah"); - fail("Get for invalid name should have reported a failure"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), - "Should've returned a 404"); - } - - } - - @Test - public void testListTypesByFilter() throws Exception { - AtlasAttributeDef attr = AtlasTypeUtil.createOptionalAttrDef("attr", "string"); - AtlasEntityDef classDefA = AtlasTypeUtil.createClassTypeDef("A" + randomString(), Collections.emptySet(), attr); - AtlasEntityDef classDefA1 = AtlasTypeUtil.createClassTypeDef("A1" + randomString(), Collections.singleton(classDefA.getName())); - AtlasEntityDef classDefB = AtlasTypeUtil.createClassTypeDef("B" + randomString(), Collections.emptySet(), attr); - AtlasEntityDef classDefC = AtlasTypeUtil.createClassTypeDef("C" + randomString(), new HashSet<>(Arrays.asList(classDefB.getName(), classDefA.getName()))); - - AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); - atlasTypesDef.getEntityDefs().add(classDefA); - atlasTypesDef.getEntityDefs().add(classDefA1); - atlasTypesDef.getEntityDefs().add(classDefB); - atlasTypesDef.getEntityDefs().add(classDefC); - - AtlasTypesDef created = clientV2.createAtlasTypeDefs(atlasTypesDef); - assertNotNull(created); - assertEquals(created.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); - - MultivaluedMap searchParams = new MultivaluedMapImpl(); - searchParams.add(SearchFilter.PARAM_TYPE, "CLASS"); - searchParams.add(SearchFilter.PARAM_SUPERTYPE, classDefA.getName()); - SearchFilter searchFilter = new SearchFilter(searchParams); - AtlasTypesDef searchDefs = clientV2.getAllTypeDefs(searchFilter); - assertNotNull(searchDefs); - assertEquals(searchDefs.getEntityDefs().size(), 2); - - searchParams.add(SearchFilter.PARAM_NOT_SUPERTYPE, classDefB.getName()); - searchFilter = new SearchFilter(searchParams); - searchDefs = clientV2.getAllTypeDefs(searchFilter); - assertNotNull(searchDefs); - assertEquals(searchDefs.getEntityDefs().size(), 1); - } - - private AtlasTypesDef createHiveTypesV2() throws Exception { - AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); - - AtlasEntityDef databaseTypeDefinition = - createClassTypeDef("database", Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), - AtlasTypeUtil.createRequiredAttrDef("description", "string")); - atlasTypesDef.getEntityDefs().add(databaseTypeDefinition); - - AtlasEntityDef tableTypeDefinition = - createClassTypeDef("table", Collections.emptySet(), - AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), - AtlasTypeUtil.createRequiredAttrDef("description", "string"), - AtlasTypeUtil.createOptionalAttrDef("columnNames", AtlasBaseTypeDef.getArrayTypeName("string")), - AtlasTypeUtil.createOptionalAttrDef("created", "date"), - AtlasTypeUtil.createOptionalAttrDef("parameters", - AtlasBaseTypeDef.getMapTypeName("string", "string")), - AtlasTypeUtil.createRequiredAttrDef("type", "string"), - new AtlasAttributeDef("database", "database", - false, - Cardinality.SINGLE, 1, 1, - true, true, false, - Collections.emptyList())); - atlasTypesDef.getEntityDefs().add(tableTypeDefinition); - - AtlasClassificationDef fetlTypeDefinition = AtlasTypeUtil - .createTraitTypeDef("fetl", Collections.emptySet(), - AtlasTypeUtil.createRequiredAttrDef("level", "int")); - atlasTypesDef.getClassificationDefs().add(fetlTypeDefinition); - - return atlasTypesDef; - } - - private void verifyByNameAndGUID(AtlasBaseTypeDef typeDef) { - try { - AtlasBaseTypeDef byName = null; - if (typeDef.getCategory() == TypeCategory.ENUM) { - byName = clientV2.getEnumDefByName(typeDef.getName()); - } else if (typeDef.getCategory() == TypeCategory.ENTITY) { - byName = clientV2.getEntityDefByName(typeDef.getName()); - } else if (typeDef.getCategory() == TypeCategory.CLASSIFICATION) { - byName = clientV2.getClassificationDefByName(typeDef.getName()); - } else if (typeDef.getCategory() == TypeCategory.STRUCT) { - byName = clientV2.getStructDefByName(typeDef.getName()); - } else if (typeDef.getCategory() == TypeCategory.RELATIONSHIP) { - byName = clientV2.getRelationshipDefByName(typeDef.getName()); - } else if (typeDef.getCategory() == TypeCategory.BUSINESS_METADATA) { - byName = clientV2.getBusinessMetadataDefByName(typeDef.getName()); - } - assertNotNull(byName); - } catch (AtlasServiceException e) { - fail("Get byName should've succeeded", e); - } - if (StringUtils.isNotBlank(typeDef.getGuid())) { - try { - AtlasBaseTypeDef byGuid = null; - if (typeDef.getCategory() == TypeCategory.ENUM) { - byGuid = clientV2.getEnumDefByGuid(typeDef.getGuid()); - } else if (typeDef.getCategory() == TypeCategory.ENTITY) { - byGuid = clientV2.getEntityDefByGuid(typeDef.getGuid()); - } else if (typeDef.getCategory() == TypeCategory.CLASSIFICATION) { - byGuid = clientV2.getClassificationDefByGuid(typeDef.getGuid()); - } else if (typeDef.getCategory() == TypeCategory.STRUCT) { - byGuid = clientV2.getStructDefByGuid(typeDef.getGuid()); - } else if (typeDef.getCategory() == TypeCategory.RELATIONSHIP) { - byGuid = clientV2.getRelationshipDefByGuid(typeDef.getGuid()); - } else if (typeDef.getCategory() == TypeCategory.BUSINESS_METADATA) { - byGuid = clientV2.getBusinessMetadataDefGuid(typeDef.getGuid()); - } - assertNotNull(byGuid); - } catch (AtlasServiceException e) { - fail("Get byGuid should've succeeded", e); - } - } - } - - private void emptyTypeDefs(AtlasTypesDef def) { - def.getEnumDefs().clear(); - def.getStructDefs().clear(); - def.getClassificationDefs().clear(); - def.getEntityDefs().clear(); - def.getRelationshipDefs().clear(); - def.getBusinessMetadataDefs().clear(); - } - - private void checkIfTypeExists(String typeName) throws Exception { - int retryCount = 0; - int maxRetries = 3; - int sleepTime = 5000; - - while (true) { - try { - boolean typeExists = atlasClientV2.typeWithNameExists(typeName); - - if (!typeExists) { - throw new AtlasBaseException(TYPE_NAME_NOT_FOUND, typeName); - } else { - break; - } - } catch (AtlasBaseException e) { - Thread.sleep(sleepTime); - - if (++retryCount == maxRetries) throw e; - } - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java deleted file mode 100755 index 6a0bbecfa0..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java +++ /dev/null @@ -1,280 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.integration; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.sun.jersey.core.util.MultivaluedMapImpl; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.model.typedef.AtlasBaseTypeDef; -import org.apache.atlas.v1.model.typedef.*; -import org.apache.atlas.type.AtlasType; -import org.apache.atlas.typesystem.types.DataTypes; -import org.apache.atlas.v1.typesystem.types.utils.TypesUtil; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import java.util.*; - -import static org.apache.atlas.v1.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -/** - * Integration test for types jersey resource. - */ -public class TypesJerseyResourceIT extends BaseResourceIT { - - private List typeDefinitions; - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - - typeDefinitions = createHiveTypes(); - } - - @AfterClass - public void tearDown() throws Exception { - typeDefinitions.clear(); - } - - @Test - public void testSubmit() throws Exception { - for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) { - try{ - atlasClientV1.getType(typeDefinition.getTypeName()); - } catch (AtlasServiceException ase){ - TypesDef typesDef = null; - - if (typeDefinition instanceof ClassTypeDefinition) { - typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.singletonList((ClassTypeDefinition) typeDefinition)); - } else if (typeDefinition instanceof TraitTypeDefinition) { - typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), - Collections.singletonList((TraitTypeDefinition) typeDefinition), Collections.emptyList()); - } - - String typesAsJSON = AtlasType.toV1Json(typesDef); - - System.out.println("typesAsJSON = " + typesAsJSON); - - ObjectNode response = atlasClientV1.callAPIWithBody(AtlasClient.API_V1.CREATE_TYPE, typesAsJSON); - Assert.assertNotNull(response); - - - ArrayNode typesAdded = (ArrayNode) response.get(AtlasClient.TYPES); - assertEquals(typesAdded.size(), 1); - assertEquals(typesAdded.get(0).get(NAME).asText(), typeDefinition.getTypeName()); - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));} - } - } - - @Test - public void testDuplicateSubmit() throws Exception { - ClassTypeDefinition type = TypesUtil.createClassTypeDef(randomString(), null, - Collections.emptySet(), TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - TypesDef typesDef = - new TypesDef(Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.singletonList(type)); - atlasClientV1.createType(typesDef); - - try { - atlasClientV1.createType(typesDef); - fail("Expected 409"); - } catch (AtlasServiceException e) { - assertEquals(e.getStatus().getStatusCode(), Response.Status.CONFLICT.getStatusCode()); - } - } - - @Test - public void testUpdate() throws Exception { - ClassTypeDefinition classTypeDef = TypesUtil - .createClassTypeDef(randomString(), null, "1.0", Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - - List typesCreated = atlasClientV1.createType(AtlasType.toV1Json(typesDef)); - assertEquals(typesCreated.size(), 1); - assertEquals(typesCreated.get(0), classTypeDef.getTypeName()); - - //Add attribute description - classTypeDef = TypesUtil.createClassTypeDef(classTypeDef.getTypeName(), null, "2.0", - Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - createOptionalAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - TypesDef typeDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - List typesUpdated = atlasClientV1.updateType(typeDef); - assertEquals(typesUpdated.size(), 1); - Assert.assertTrue(typesUpdated.contains(classTypeDef.getTypeName())); - - TypesDef updatedTypeDef = atlasClientV1.getType(classTypeDef.getTypeName()); - assertNotNull(updatedTypeDef); - - ClassTypeDefinition updatedType = updatedTypeDef.getClassTypes().get(0); - assertEquals(updatedType.getAttributeDefinitions().size(), 2); - } - - @Test(dependsOnMethods = "testSubmit") - public void testGetDefinition() throws Exception { - for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) { - System.out.println("typeName = " + typeDefinition.getTypeName()); - - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LIST_TYPES, null, typeDefinition.getTypeName()); - - Assert.assertNotNull(response); - Assert.assertNotNull(response.get(AtlasClient.DEFINITION)); - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - TypesDef typesDef = AtlasType.fromV1Json(AtlasType.toJson(response.get(AtlasClient.DEFINITION)), TypesDef.class); - - List hierarchicalTypeDefs = Collections.emptyList(); - - if (typeDefinition instanceof ClassTypeDefinition) { - hierarchicalTypeDefs = typesDef.getClassTypes(); - } else if (typeDefinition instanceof TraitTypeDefinition) { - hierarchicalTypeDefs = typesDef.getTraitTypes(); - } - - for (HierarchicalTypeDefinition hierarchicalTypes : hierarchicalTypeDefs) { - for (AttributeDefinition attrDef : hierarchicalTypes.getAttributeDefinitions()) { - if (NAME.equals(attrDef.getName())) { - assertEquals(attrDef.getIsIndexable(), true); - assertEquals(attrDef.getIsUnique(), true); - } - } - } - } - } - - @Test(expectedExceptions = AtlasServiceException.class) - public void testGetDefinitionForNonexistentType() throws Exception { - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LIST_TYPES, null, "blah"); - } - - @Test(dependsOnMethods = "testSubmit") - public void testGetTypeNames() throws Exception { - ObjectNode response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LIST_TYPES, null, (String[]) null); - Assert.assertNotNull(response); - - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - final ArrayNode list = (ArrayNode) response.get(AtlasClient.RESULTS); - Assert.assertNotNull(list); - - //Verify that primitive and core types are not returned - String typesString = list.toString(); - Assert.assertFalse(typesString.contains(" \"__IdType\" ")); - Assert.assertFalse(typesString.contains(" \"string\" ")); - } - - @Test - public void testGetTraitNames() throws Exception { - String[] traitsAdded = addTraits(); - - MultivaluedMap queryParams = new MultivaluedMapImpl(); - queryParams.add("type", DataTypes.TypeCategory.TRAIT.name()); - - ObjectNode response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API_V1.LIST_TYPES, queryParams); - Assert.assertNotNull(response); - - Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); - - final ArrayNode list = (ArrayNode) response.get(AtlasClient.RESULTS); - Assert.assertNotNull(list); - Assert.assertTrue(list.size() >= traitsAdded.length); - } - - @Test - public void testListTypesByFilter() throws Exception { - AttributeDefinition attr = TypesUtil.createOptionalAttrDef("attr", AtlasBaseTypeDef.ATLAS_TYPE_STRING); - - ClassTypeDefinition classTypeDef = TypesUtil.createClassTypeDef("A" + randomString(), null, Collections.emptySet(), attr); - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - String a = createType(AtlasType.toV1Json(typesDef)).get(0); - - classTypeDef = TypesUtil.createClassTypeDef("A1" + randomString(), null, Collections.singleton(a)); - typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - String a1 = createType(AtlasType.toV1Json(typesDef)).get(0); - - classTypeDef = TypesUtil.createClassTypeDef("B" + randomString(), null, Collections.emptySet(), attr); - typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - String b = createType(AtlasType.toV1Json(typesDef)).get(0); - - classTypeDef = TypesUtil.createClassTypeDef("C" + randomString(), null, new HashSet<>(Arrays.asList(a, b))); - typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(classTypeDef)); - String c = createType(AtlasType.toV1Json(typesDef)).get(0); - - List results = atlasClientV1.listTypes(DataTypes.TypeCategory.CLASS, a, b); - assertEquals(results, Arrays.asList(a1), "Results: " + results); - } - - private String[] addTraits() throws Exception { - String[] traitNames = {"class_trait", "secure_trait", "pii_trait", "ssn_trait", "salary_trait", "sox_trait",}; - - for (String traitName : traitNames) { - TraitTypeDefinition traitTypeDef = - TypesUtil.createTraitTypeDef(traitName, null, Collections.emptySet()); - TypesDef typesDef = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.singletonList(traitTypeDef), Collections.emptyList()); - - String json = AtlasType.toV1Json(typesDef); - createType(json); - } - - return traitNames; - } - - private List createHiveTypes() throws Exception { - ArrayList typeDefinitions = new ArrayList<>(); - - ClassTypeDefinition databaseTypeDefinition = TypesUtil - .createClassTypeDef("database", null, Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING)); - typeDefinitions.add(databaseTypeDefinition); - - ClassTypeDefinition tableTypeDefinition = TypesUtil - .createClassTypeDef("table", null, Collections.emptySet(), - TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING), - createOptionalAttrDef("columnNames", AtlasBaseTypeDef.getArrayTypeName(AtlasBaseTypeDef.ATLAS_TYPE_STRING)), - createOptionalAttrDef("created", AtlasBaseTypeDef.ATLAS_TYPE_DATE), - createOptionalAttrDef("parameters", - AtlasBaseTypeDef.getMapTypeName(AtlasBaseTypeDef.ATLAS_TYPE_STRING, AtlasBaseTypeDef.ATLAS_TYPE_STRING)), - TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING), - new AttributeDefinition("database", "database", Multiplicity.REQUIRED, false, null)); - typeDefinitions.add(tableTypeDefinition); - - TraitTypeDefinition fetlTypeDefinition = TypesUtil - .createTraitTypeDef("fetl", null, Collections.emptySet(), - TypesUtil.createRequiredAttrDef("level", AtlasBaseTypeDef.ATLAS_TYPE_INT)); - typeDefinitions.add(fetlTypeDefinition); - - return typeDefinitions; - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/resources/AdminExportImportTestIT.java b/webapp/src/test/java/org/apache/atlas/web/resources/AdminExportImportTestIT.java deleted file mode 100644 index 51580afe7d..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/resources/AdminExportImportTestIT.java +++ /dev/null @@ -1,146 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.resources; - - -import org.apache.atlas.AtlasClientV2; -import org.apache.atlas.AtlasServiceException; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.model.impexp.AtlasExportResult; -import org.apache.atlas.model.impexp.AtlasServer; -import org.apache.atlas.model.impexp.AtlasExportRequest; -import org.apache.atlas.model.impexp.AtlasImportRequest; -import org.apache.atlas.model.impexp.AtlasImportResult; -import org.apache.atlas.repository.impexp.ZipSource; -import org.apache.atlas.utils.TestResourceFileUtils; -import org.apache.atlas.web.integration.BaseResourceIT; -import org.testng.SkipException; -import org.testng.annotations.AfterClass; -import org.testng.annotations.Test; - -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; - -public class AdminExportImportTestIT extends BaseResourceIT { - private final String FILE_TO_IMPORT = "stocks-base.zip"; - private final String FILE_TO_IMPORT_EMPTY = "empty-1.zip"; - private final String EXPORT_REQUEST_FILE = "export-incremental"; - private final String SOURCE_SERVER_NAME = "cl1"; - - static final String IMPORT_TRANSFORM_CLEAR_ATTRS = - "{ \"Asset\": { \"*\":[ \"clearAttrValue:replicatedTo,replicatedFrom\" ] } }"; - static final String IMPORT_TRANSFORM_SET_DELETED = - "{ \"Referenceable\": { \"*\":[ \"setDeleted\" ] } }"; - - @Test - public void isActive() throws AtlasServiceException { - assertEquals(atlasClientV2.getAdminStatus(), "ACTIVE"); - } - - @Test(dependsOnMethods = "isActive") - public void importData() throws AtlasServiceException { - performImport(FILE_TO_IMPORT, 37); - assertReplicationData("cl1"); - } - - @Test(dependsOnMethods = "isActive") - public void importEmptyData() throws AtlasServiceException { - AtlasImportResult result = performImportUsing(FILE_TO_IMPORT_EMPTY, new AtlasImportRequest()); - assertNotNull(result); - assertEquals(AtlasExportResult.OperationStatus.FAIL.toString(), result.getOperationStatus().toString()); - } - - @Test(dependsOnMethods = "importData") - public void exportData() throws AtlasServiceException, IOException, AtlasBaseException { - final int EXPECTED_CREATION_ORDER_SIZE = 6; - - AtlasExportRequest request = TestResourceFileUtils.readObjectFromJson(".", EXPORT_REQUEST_FILE, AtlasExportRequest.class); - InputStream exportedStream = atlasClientV2.exportData(request); - assertNotNull(exportedStream); - - ZipSource zs = new ZipSource(exportedStream); - assertNotNull(zs.getExportResult()); - assertTrue(zs.getCreationOrder().size() >= EXPECTED_CREATION_ORDER_SIZE, "expected creationOrderSize > " + EXPECTED_CREATION_ORDER_SIZE + ", but found " + zs.getCreationOrder().size()); - } - - @Test - public void unAuthExportData() throws IOException { - AtlasClientV2 unAuthClient = new AtlasClientV2(atlasUrls, new String[]{"admin", "wr0ng_pa55w0rd"}); - AtlasExportRequest request = TestResourceFileUtils.readObjectFromJson(".", EXPORT_REQUEST_FILE, AtlasExportRequest.class); - try { - InputStream exportedStream = unAuthClient.exportData(request); - } catch(AtlasServiceException e) { - assertNotNull(e.getStatus(), "expected server error code in the status"); - } - } - - private void performImport(String fileToImport, int expectedProcessedEntitiesCount) throws AtlasServiceException { - AtlasImportRequest request = new AtlasImportRequest(); - request.getOptions().put(AtlasImportRequest.OPTION_KEY_REPLICATED_FROM, SOURCE_SERVER_NAME); - request.getOptions().put(AtlasImportRequest.TRANSFORMS_KEY, IMPORT_TRANSFORM_CLEAR_ATTRS); - - performImport(fileToImport, request, expectedProcessedEntitiesCount); - } - - private void performImport(String fileToImport, AtlasImportRequest request, int expectedProcessedEntitiesCount) throws AtlasServiceException { - - AtlasImportResult result = performImportUsing(fileToImport, request); - assertNotNull(result); - assertEquals(result.getOperationStatus(), AtlasImportResult.OperationStatus.SUCCESS); - assertNotNull(result.getMetrics()); - assertEquals(result.getProcessedEntities().size(), expectedProcessedEntitiesCount, "processedEntities: expected=" + expectedProcessedEntitiesCount + ", found=" + result.getProcessedEntities().size() + ". result=" + result); - } - - private AtlasImportResult performImportUsing(String fileToImport, AtlasImportRequest request) throws AtlasServiceException { - FileInputStream fileInputStream = null; - - try { - fileInputStream = new FileInputStream(TestResourceFileUtils.getTestFilePath(fileToImport)); - } catch (IOException e) { - assertFalse(true, "Exception: " + e.getMessage()); - } - - return atlasClientV2.importData(request, fileInputStream); - } - - private void assertReplicationData(String serverName) throws AtlasServiceException { - AtlasServer server = atlasClientV2.getServer(serverName); - assertNotNull(server); - assertNotNull(server.getAdditionalInfo()); - assertTrue(server.getAdditionalInfo().size() > 0); - } - - @AfterClass - public void teardown() { - AtlasImportRequest request = new AtlasImportRequest(); - request.getOptions().put(AtlasImportRequest.TRANSFORMS_KEY, IMPORT_TRANSFORM_SET_DELETED); - - try { - performImport(FILE_TO_IMPORT, request, 32); // initial import has 5 entities already in deleted state, hence current import will have 32 processed-entities - } catch (AtlasServiceException e) { - throw new SkipException("performTeardown: failed! Subsequent tests results may be affected."); - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/security/NegativeSSLAndKerberosTest.java b/webapp/src/test/java/org/apache/atlas/web/security/NegativeSSLAndKerberosTest.java deleted file mode 100755 index d3cf35ca0f..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/security/NegativeSSLAndKerberosTest.java +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.security; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.web.TestUtils; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.alias.JavaKeyStoreProvider; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.net.URL; -import java.nio.file.Files; - -import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED; - -/** - * Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a - * kerberos user for the invocation. Need a separate use case since the Jersey layer cached the URL connection handler, - * which indirectly caches the kerberos delegation token. - */ -public class NegativeSSLAndKerberosTest extends BaseSSLAndKerberosTest { - - private TestSecureEmbeddedServer secureEmbeddedServer; - private String originalConf; - private AtlasClient dgiClient; - - //@BeforeClass - public void setUp() throws Exception { - jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks"); - providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri(); - - String persistDir = TestUtils.getTempDirectory(); - - setupKDCAndPrincipals(); - setupCredentials(); - - // client will actually only leverage subset of these properties - final PropertiesConfiguration configuration = getSSLConfiguration(providerUrl); - - persistSSLClientConfiguration(configuration); - - TestUtils.writeConfiguration(configuration, persistDir + File.separator + - ApplicationProperties.APPLICATION_PROPERTIES); - - String confLocation = System.getProperty("atlas.conf"); - URL url; - if (confLocation == null) { - url = NegativeSSLAndKerberosTest.class.getResource("/" + ApplicationProperties.APPLICATION_PROPERTIES); - } else { - url = new File(confLocation, ApplicationProperties.APPLICATION_PROPERTIES).toURI().toURL(); - } - configuration.load(url); - - configuration.setProperty(TLS_ENABLED, true); - configuration.setProperty("atlas.authentication.method.kerberos", "true"); - configuration.setProperty("atlas.authentication.keytab",userKeytabFile.getAbsolutePath()); - configuration.setProperty("atlas.authentication.principal","dgi/localhost@"+kdc.getRealm()); - - configuration.setProperty("atlas.authentication.method.file", "false"); - configuration.setProperty("atlas.authentication.method.kerberos", "true"); - configuration.setProperty("atlas.authentication.method.kerberos.principal", "HTTP/localhost@" + kdc.getRealm()); - configuration.setProperty("atlas.authentication.method.kerberos.keytab", httpKeytabFile.getAbsolutePath()); - configuration.setProperty("atlas.authentication.method.kerberos.name.rules", - "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"); - - configuration.setProperty("atlas.authentication.method.file", "true"); - configuration.setProperty("atlas.authentication.method.file.filename", persistDir - + "/users-credentials"); - configuration.setProperty("atlas.auth.policy.file",persistDir - + "/policy-store.txt" ); - configuration.setProperty("atlas.authentication.method.trustedproxy", "false"); - - TestUtils.writeConfiguration(configuration, persistDir + File.separator + - ApplicationProperties.APPLICATION_PROPERTIES); - - setupUserCredential(persistDir); - setUpPolicyStore(persistDir); - - // save original setting - originalConf = System.getProperty("atlas.conf"); - System.setProperty("atlas.conf", persistDir); - - dgiClient = new AtlasClient(configuration, DGI_URL); - - - secureEmbeddedServer = new TestSecureEmbeddedServer(21443, getWarPath()) { - @Override - public Configuration getConfiguration() { - return configuration; - } - }; - secureEmbeddedServer.getServer().start(); - } - - //@AfterClass - public void tearDown() throws Exception { - if (secureEmbeddedServer != null) { - secureEmbeddedServer.getServer().stop(); - } - - if (kdc != null) { - kdc.stop(); - } - - if (originalConf != null) { - System.setProperty("atlas.conf", originalConf); - } - } - - @Test(enabled = false) - public void testUnsecuredClient() throws Exception { - try { - dgiClient.listTypes(); - Assert.fail("Should have failed with GSSException"); - } catch(Exception e) { - e.printStackTrace(); - Assert.assertTrue(e.getMessage().contains("Mechanism level: Failed to find any Kerberos tgt")); - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/security/SSLAndKerberosTest.java b/webapp/src/test/java/org/apache/atlas/web/security/SSLAndKerberosTest.java deleted file mode 100755 index 4384aa1535..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/security/SSLAndKerberosTest.java +++ /dev/null @@ -1,184 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.security; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasClient; -import org.apache.atlas.web.TestUtils; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.alias.JavaKeyStoreProvider; -import org.testng.annotations.Test; -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Files; -import java.security.PrivilegedExceptionAction; - -import javax.security.auth.Subject; -import javax.security.auth.callback.Callback; -import javax.security.auth.callback.CallbackHandler; -import javax.security.auth.callback.NameCallback; -import javax.security.auth.callback.PasswordCallback; -import javax.security.auth.callback.UnsupportedCallbackException; -import javax.security.auth.login.LoginContext; -import javax.security.auth.login.LoginException; - -import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED; - -public class SSLAndKerberosTest extends BaseSSLAndKerberosTest { - public static final String TEST_USER_JAAS_SECTION = "TestUser"; - public static final String TESTUSER = "testuser"; - public static final String TESTPASS = "testpass"; - - private static final String DGI_URL = "https://localhost:21443/"; - private AtlasClient dgiCLient; - private TestSecureEmbeddedServer secureEmbeddedServer; - private Subject subject; - private String originalConf; - private String originalHomeDir; - - //@BeforeClass - public void setUp() throws Exception { - jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks"); - providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri(); - - String persistDir = TestUtils.getTempDirectory(); - - setupKDCAndPrincipals(); - setupCredentials(); - - // client will actually only leverage subset of these properties - final PropertiesConfiguration configuration = getSSLConfiguration(providerUrl); - - persistSSLClientConfiguration(configuration); - - TestUtils.writeConfiguration(configuration, persistDir + File.separator + - ApplicationProperties.APPLICATION_PROPERTIES); - - String confLocation = System.getProperty("atlas.conf"); - URL url; - if (confLocation == null) { - url = SSLAndKerberosTest.class.getResource("/" + ApplicationProperties.APPLICATION_PROPERTIES); - } else { - url = new File(confLocation, ApplicationProperties.APPLICATION_PROPERTIES).toURI().toURL(); - } - configuration.load(url); - configuration.setProperty(TLS_ENABLED, true); - configuration.setProperty("atlas.authentication.method.kerberos", "true"); - configuration.setProperty("atlas.authentication.keytab",userKeytabFile.getAbsolutePath()); - configuration.setProperty("atlas.authentication.principal","dgi/localhost@"+kdc.getRealm()); - - configuration.setProperty("atlas.authentication.method.file", "false"); - configuration.setProperty("atlas.authentication.method.trustedproxy", "false"); - configuration.setProperty("atlas.authentication.method.kerberos", "true"); - configuration.setProperty("atlas.authentication.method.kerberos.principal", "HTTP/localhost@" + kdc.getRealm()); - configuration.setProperty("atlas.authentication.method.kerberos.keytab", httpKeytabFile.getAbsolutePath()); - configuration.setProperty("atlas.authentication.method.kerberos.name.rules", - "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"); - - configuration.setProperty("atlas.authentication.method.file", "true"); - configuration.setProperty("atlas.authentication.method.file.filename", persistDir - + "/users-credentials"); - configuration.setProperty("atlas.auth.policy.file",persistDir - + "/policy-store.txt" ); - TestUtils.writeConfiguration(configuration, persistDir + File.separator + - "atlas-application.properties"); - - setupUserCredential(persistDir); - setUpPolicyStore(persistDir); - - subject = loginTestUser(); - UserGroupInformation.loginUserFromSubject(subject); - UserGroupInformation proxyUser = UserGroupInformation.createProxyUser( - "testUser", - UserGroupInformation.getLoginUser()); - - // save original setting - originalConf = System.getProperty("atlas.conf"); - System.setProperty("atlas.conf", persistDir); - - originalHomeDir = System.getProperty("atlas.home"); - System.setProperty("atlas.home", TestUtils.getTargetDirectory()); - - dgiCLient = proxyUser.doAs(new PrivilegedExceptionAction() { - @Override - public AtlasClient run() throws Exception { - return new AtlasClient(configuration, DGI_URL); - } - }); - - - secureEmbeddedServer = new TestSecureEmbeddedServer(21443, getWarPath()) { - @Override - public PropertiesConfiguration getConfiguration() { - return configuration; - } - }; - secureEmbeddedServer.getServer().start(); - } - - //@AfterClass - public void tearDown() throws Exception { - if (secureEmbeddedServer != null) { - secureEmbeddedServer.getServer().stop(); - } - - if (kdc != null) { - kdc.stop(); - } - - if (originalConf != null) { - System.setProperty("atlas.conf", originalConf); - } - - if(originalHomeDir !=null){ - System.setProperty("atlas.home", originalHomeDir); - } - } - - protected Subject loginTestUser() throws LoginException, IOException { - LoginContext lc = new LoginContext(TEST_USER_JAAS_SECTION, new CallbackHandler() { - - @Override - public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { - for (Callback callback : callbacks) { - if (callback instanceof PasswordCallback) { - PasswordCallback passwordCallback = (PasswordCallback) callback; - passwordCallback.setPassword(TESTPASS.toCharArray()); - } - if (callback instanceof NameCallback) { - NameCallback nameCallback = (NameCallback) callback; - nameCallback.setName(TESTUSER); - } - } - } - }); - // attempt authentication - lc.login(); - return lc.getSubject(); - } - - @Test(enabled = false) - public void testService() throws Exception { - dgiCLient.listTypes(); - } - -} diff --git a/webapp/src/test/java/org/apache/atlas/web/security/SSLTest.java b/webapp/src/test/java/org/apache/atlas/web/security/SSLTest.java deleted file mode 100755 index ee652f0570..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/security/SSLTest.java +++ /dev/null @@ -1,149 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.security; - -import org.apache.atlas.AtlasClient; -import org.apache.atlas.web.TestUtils; -import org.apache.atlas.web.service.SecureEmbeddedServer; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.alias.CredentialProvider; -import org.apache.hadoop.security.alias.CredentialProviderFactory; -import org.apache.hadoop.security.alias.JavaKeyStoreProvider; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.webapp.WebAppContext; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; - -import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY; -import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY; -import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY; - -public class SSLTest extends BaseSSLAndKerberosTest { - private AtlasClient atlasClient; - private Path jksPath; - private String providerUrl; - private TestSecureEmbeddedServer secureEmbeddedServer; - private String originalConf; - private String originalHomeDir; - - class TestSecureEmbeddedServer extends SecureEmbeddedServer { - - public TestSecureEmbeddedServer(int port, String path) throws IOException { - super(ATLAS_DEFAULT_BIND_ADDRESS, port, path); - } - - public Server getServer() { - return server; - } - - @Override - protected WebAppContext getWebAppContext(String path) { - WebAppContext application = new WebAppContext(path, "/"); - application.setDescriptor(System.getProperty("projectBaseDir") + "/webapp/src/test/webapp/WEB-INF/web.xml"); - application.setClassLoader(Thread.currentThread().getContextClassLoader()); - return application; - } - } - - //@BeforeClass - public void setUp() throws Exception { - jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks"); - providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri(); - - setupCredentials(); - final PropertiesConfiguration configuration = getSSLConfiguration(providerUrl); - String persistDir = writeConfiguration(configuration); - persistSSLClientConfiguration(configuration); - - originalConf = System.getProperty("atlas.conf"); - System.setProperty("atlas.conf", persistDir); - - originalHomeDir = System.getProperty("atlas.home"); - System.setProperty("atlas.home", TestUtils.getTargetDirectory()); - - atlasClient = new AtlasClient(configuration, new String[]{DGI_URL},new String[]{"admin","admin"}); - - secureEmbeddedServer = new TestSecureEmbeddedServer(21443, getWarPath()) { - @Override - public org.apache.commons.configuration.Configuration getConfiguration() { - return configuration; - } - }; - secureEmbeddedServer.getServer().start(); - } - - //@AfterClass - public void tearDown() throws Exception { - if (secureEmbeddedServer != null) { - secureEmbeddedServer.getServer().stop(); - } - - if (originalConf != null) { - System.setProperty("atlas.conf", originalConf); - } - - if(originalHomeDir !=null){ - System.setProperty("atlas.home", originalHomeDir); - } - } - - protected void setupCredentials() throws Exception { - Configuration conf = new Configuration(false); - - File file = new File(jksPath.toUri().getPath()); - file.delete(); - conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl); - - CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0); - - // create new aliases - try { - - char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass); - - char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass); - - char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry("ssl.client.truststore.password", trustpass2); - - char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass); - - // write out so that it can be found in checks - provider.flush(); - } catch (Exception e) { - e.printStackTrace(); - throw e; - } - } - - //@Test - public void testService() throws Exception { - atlasClient.listTypes(); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceElectorServiceTest.java b/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceElectorServiceTest.java deleted file mode 100644 index 3ce0c4b85e..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceElectorServiceTest.java +++ /dev/null @@ -1,374 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import org.apache.atlas.AtlasConstants; -import org.apache.atlas.AtlasException; -import org.apache.atlas.exception.AtlasBaseException; -import org.apache.atlas.ha.HAConfiguration; -import org.apache.atlas.listener.ActiveStateChangeHandler; -import org.apache.atlas.util.AtlasMetricsUtil; -import org.apache.commons.configuration.Configuration; -import org.apache.curator.framework.recipes.leader.LeaderLatch; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - -import static org.mockito.Mockito.*; - -public class ActiveInstanceElectorServiceTest { - - @Mock - private Configuration configuration; - - @Mock - private CuratorFactory curatorFactory; - - @Mock - private ActiveInstanceState activeInstanceState; - - @Mock - private ServiceState serviceState; - - @Mock - private AtlasMetricsUtil metricsUtil; - - @BeforeMethod - public void setup() { - System.setProperty(AtlasConstants.SYSTEM_PROPERTY_APP_PORT, "21000"); - MockitoAnnotations.initMocks(this); - } - - @Test - public void testLeaderElectionIsJoinedOnStart() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - - verify(leaderLatch).start(); - } - - @Test - public void testListenerIsAddedForActiveInstanceCallbacks() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - - verify(leaderLatch).addListener(activeInstanceElectorService); - } - - @Test - public void testLeaderElectionIsNotStartedIfNotInHAMode() throws AtlasException { - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - - verifyZeroInteractions(curatorFactory); - } - - @Test - public void testLeaderElectionIsLeftOnStop() throws IOException, AtlasException { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.stop(); - - verify(leaderLatch).close(); - } - - @Test - public void testCuratorFactoryIsClosedOnStop() throws AtlasException { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.stop(); - - verify(curatorFactory).close(); - } - - @Test - public void testNoActionOnStopIfHAModeIsDisabled() { - - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.stop(); - - verifyZeroInteractions(curatorFactory); - } - - @Test - public void testRegisteredHandlersAreNotifiedWhenInstanceIsActive() throws AtlasException { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - Set changeHandlers = new HashSet<>(); - final ActiveStateChangeHandler handler1 = mock(ActiveStateChangeHandler.class); - final ActiveStateChangeHandler handler2 = mock(ActiveStateChangeHandler.class); - - changeHandlers.add(handler1); - changeHandlers.add(handler2); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, changeHandlers, curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.isLeader(); - - verify(handler1).instanceIsActive(); - verify(handler2).instanceIsActive(); - } - - @Test - public void testSharedStateIsUpdatedWhenInstanceIsActive() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - - activeInstanceElectorService.start(); - activeInstanceElectorService.isLeader(); - - verify(activeInstanceState).update("id1"); - } - - @Test - public void testRegisteredHandlersAreNotifiedOfPassiveWhenStateUpdateFails() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - Set changeHandlers = new HashSet<>(); - final ActiveStateChangeHandler handler1 = mock(ActiveStateChangeHandler.class); - final ActiveStateChangeHandler handler2 = mock(ActiveStateChangeHandler.class); - - changeHandlers.add(handler1); - changeHandlers.add(handler2); - - doThrow(new AtlasBaseException()).when(activeInstanceState).update("id1"); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, changeHandlers, curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.isLeader(); - - verify(handler1).instanceIsPassive(); - verify(handler2).instanceIsPassive(); - } - - @Test - public void testElectionIsRejoinedWhenStateUpdateFails() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - doThrow(new AtlasBaseException()).when(activeInstanceState).update("id1"); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), curatorFactory, - activeInstanceState, serviceState, metricsUtil); - - activeInstanceElectorService.start(); - activeInstanceElectorService.isLeader(); - - InOrder inOrder = inOrder(leaderLatch, curatorFactory); - inOrder.verify(leaderLatch).close(); - inOrder.verify(curatorFactory).leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - inOrder.verify(leaderLatch).addListener(activeInstanceElectorService); - inOrder.verify(leaderLatch).start(); - } - - @Test - public void testRegisteredHandlersAreNotifiedOfPassiveWhenInstanceIsPassive() throws AtlasException { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - Set changeHandlers = new HashSet<>(); - final ActiveStateChangeHandler handler1 = mock(ActiveStateChangeHandler.class); - final ActiveStateChangeHandler handler2 = mock(ActiveStateChangeHandler.class); - - changeHandlers.add(handler1); - changeHandlers.add(handler2); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, changeHandlers, curatorFactory, - activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.notLeader(); - - verify(handler1).instanceIsPassive(); - verify(handler2).instanceIsPassive(); - } - - @Test - public void testActiveStateSetOnBecomingLeader() { - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), - curatorFactory, activeInstanceState, serviceState, metricsUtil); - - activeInstanceElectorService.isLeader(); - - InOrder inOrder = inOrder(serviceState); - inOrder.verify(serviceState).becomingActive(); - inOrder.verify(serviceState).setActive(); - } - - @Test - public void testPassiveStateSetOnLoosingLeadership() { - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), - curatorFactory, activeInstanceState, serviceState, metricsUtil); - - activeInstanceElectorService.notLeader(); - - InOrder inOrder = inOrder(serviceState); - inOrder.verify(serviceState).becomingPassive(); - inOrder.verify(serviceState).setPassive(); - } - - @Test - public void testPassiveStateSetIfActivationFails() throws Exception { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getStringArray(HAConfiguration.ATLAS_SERVER_IDS)).thenReturn(new String[] {"id1"}); - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn("127.0.0.1:21000"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - LeaderLatch leaderLatch = mock(LeaderLatch.class); - when(curatorFactory.leaderLatchInstance("id1", HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)).thenReturn(leaderLatch); - - doThrow(new AtlasBaseException()).when(activeInstanceState).update("id1"); - - ActiveInstanceElectorService activeInstanceElectorService = - new ActiveInstanceElectorService(configuration, new HashSet(), - curatorFactory, activeInstanceState, serviceState, metricsUtil); - activeInstanceElectorService.start(); - activeInstanceElectorService.isLeader(); - - InOrder inOrder = inOrder(serviceState); - inOrder.verify(serviceState).becomingActive(); - inOrder.verify(serviceState).becomingPassive(); - inOrder.verify(serviceState).setPassive(); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java b/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java deleted file mode 100644 index 713be8d426..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java +++ /dev/null @@ -1,191 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import org.apache.atlas.ha.HAConfiguration; -import org.apache.commons.configuration.Configuration; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.api.CreateBuilder; -import org.apache.curator.framework.api.ExistsBuilder; -import org.apache.curator.framework.api.GetDataBuilder; -import org.apache.curator.framework.api.SetDataBuilder; -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; -import org.apache.zookeeper.data.Stat; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeTest; -import org.testng.annotations.Test; - -import java.util.Arrays; -import java.nio.charset.Charset; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; - -public class ActiveInstanceStateTest { - - private static final String HOST_PORT = "127.0.0.1:21000"; - public static final String SERVER_ADDRESS = "http://" + HOST_PORT; - @Mock - private Configuration configuration; - - @Mock - private CuratorFactory curatorFactory; - - @Mock - private CuratorFramework curatorFramework; - - @BeforeTest - public void setup() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void testSharedPathIsCreatedIfNotExists() throws Exception { - - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn(HOST_PORT); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - when(curatorFactory.clientInstance()).thenReturn(curatorFramework); - - ExistsBuilder existsBuilder = mock(ExistsBuilder.class); - when(curatorFramework.checkExists()).thenReturn(existsBuilder); - when(existsBuilder.forPath(getPath())).thenReturn(null); - - CreateBuilder createBuilder = mock(CreateBuilder.class); - when(curatorFramework.create()).thenReturn(createBuilder); - when(createBuilder.withMode(CreateMode.EPHEMERAL)).thenReturn(createBuilder); - when(createBuilder.withACL(ZooDefs.Ids.OPEN_ACL_UNSAFE)).thenReturn(createBuilder); - - SetDataBuilder setDataBuilder = mock(SetDataBuilder.class); - when(curatorFramework.setData()).thenReturn(setDataBuilder); - - ActiveInstanceState activeInstanceState = new ActiveInstanceState(configuration, curatorFactory); - activeInstanceState.update("id1"); - - verify(createBuilder).forPath(getPath()); - } - - private String getPath() { - return HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT - + ActiveInstanceState.APACHE_ATLAS_ACTIVE_SERVER_INFO; - } - - @Test - public void testSharedPathIsCreatedWithRightACLIfNotExists() throws Exception { - - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn(HOST_PORT); - when(configuration.getString(HAConfiguration.HA_ZOOKEEPER_ACL)).thenReturn("sasl:myclient@EXAMPLE.COM"); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - when(curatorFactory.clientInstance()).thenReturn(curatorFramework); - - ExistsBuilder existsBuilder = mock(ExistsBuilder.class); - when(curatorFramework.checkExists()).thenReturn(existsBuilder); - when(existsBuilder.forPath(getPath())).thenReturn(null); - - CreateBuilder createBuilder = mock(CreateBuilder.class); - when(curatorFramework.create()).thenReturn(createBuilder); - when(createBuilder.withMode(CreateMode.EPHEMERAL)).thenReturn(createBuilder); - ACL expectedAcl = new ACL(ZooDefs.Perms.ALL, new Id("sasl", "myclient@EXAMPLE.COM")); - ACL expectedAcl1 = new ACL(ZooDefs.Perms.READ, new Id("world", "anyone")); - when(createBuilder. - withACL(Arrays.asList(new ACL[]{expectedAcl,expectedAcl1}))).thenReturn(createBuilder); - - - SetDataBuilder setDataBuilder = mock(SetDataBuilder.class); - when(curatorFramework.setData()).thenReturn(setDataBuilder); - - ActiveInstanceState activeInstanceState = new ActiveInstanceState(configuration, curatorFactory); - activeInstanceState.update("id1"); - - verify(createBuilder).forPath(getPath()); - } - - @Test - public void testDataIsUpdatedWithAtlasServerAddress() throws Exception { - when(configuration.getString(HAConfiguration.ATLAS_SERVER_ADDRESS_PREFIX +"id1")).thenReturn(HOST_PORT); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - when(curatorFactory.clientInstance()).thenReturn(curatorFramework); - ExistsBuilder existsBuilder = mock(ExistsBuilder.class); - when(curatorFramework.checkExists()).thenReturn(existsBuilder); - when(existsBuilder.forPath(getPath())).thenReturn(new Stat()); - - SetDataBuilder setDataBuilder = mock(SetDataBuilder.class); - when(curatorFramework.setData()).thenReturn(setDataBuilder); - - ActiveInstanceState activeInstanceState = new ActiveInstanceState(configuration, curatorFactory); - activeInstanceState.update("id1"); - - verify(setDataBuilder).forPath( - getPath(), - SERVER_ADDRESS.getBytes(Charset.forName("UTF-8"))); - } - - @Test - public void testShouldReturnActiveServerAddress() throws Exception { - when(curatorFactory.clientInstance()).thenReturn(curatorFramework); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - GetDataBuilder getDataBuilder = mock(GetDataBuilder.class); - when(curatorFramework.getData()).thenReturn(getDataBuilder); - when(getDataBuilder.forPath(getPath())). - thenReturn(SERVER_ADDRESS.getBytes(Charset.forName("UTF-8"))); - - ActiveInstanceState activeInstanceState = new ActiveInstanceState(configuration, curatorFactory); - String actualServerAddress = activeInstanceState.getActiveServerAddress(); - - assertEquals(SERVER_ADDRESS, actualServerAddress); - } - - @Test - public void testShouldHandleExceptionsInFetchingServerAddress() throws Exception { - when(curatorFactory.clientInstance()).thenReturn(curatorFramework); - when(configuration.getString( - HAConfiguration.ATLAS_SERVER_HA_ZK_ROOT_KEY, HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT)). - thenReturn(HAConfiguration.ATLAS_SERVER_ZK_ROOT_DEFAULT); - - - GetDataBuilder getDataBuilder = mock(GetDataBuilder.class); - when(curatorFramework.getData()).thenReturn(getDataBuilder); - when(getDataBuilder.forPath(getPath())). - thenThrow(new Exception()); - - ActiveInstanceState activeInstanceState = new ActiveInstanceState(configuration, curatorFactory); - assertNull(activeInstanceState.getActiveServerAddress()); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/AtlasZookeeperSecurityPropertiesTest.java b/webapp/src/test/java/org/apache/atlas/web/service/AtlasZookeeperSecurityPropertiesTest.java deleted file mode 100644 index 322fe21607..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/AtlasZookeeperSecurityPropertiesTest.java +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import com.google.common.base.Charsets; -import org.apache.curator.framework.AuthInfo; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.testng.annotations.Test; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.fail; - -public class AtlasZookeeperSecurityPropertiesTest { - - @Test - public void shouldGetAcl() { - ACL acl = AtlasZookeeperSecurityProperties.parseAcl("sasl:myclient@EXAMPLE.COM"); - assertEquals(acl.getId().getScheme(), "sasl"); - assertEquals(acl.getId().getId(), "myclient@EXAMPLE.COM"); - assertEquals(acl.getPerms(), ZooDefs.Perms.ALL); - } - - @Test(expectedExceptions = IllegalArgumentException.class) - public void shouldThrowExceptionForNullAcl() { - ACL acl = AtlasZookeeperSecurityProperties.parseAcl(null); - fail("Should have thrown exception for null ACL string"); - } - - @Test(expectedExceptions = IllegalArgumentException.class) - public void shouldThrowExceptionForInvalidAclString() { - ACL acl = AtlasZookeeperSecurityProperties.parseAcl("randomAcl"); - fail("Should have thrown exception for null ACL string"); - } - - @Test - public void idsWithColonsAreValid() { - ACL acl = AtlasZookeeperSecurityProperties.parseAcl("auth:user:password"); - assertEquals(acl.getId().getScheme(), "auth"); - assertEquals(acl.getId().getId(), "user:password"); - } - - @Test - public void shouldGetAuth() { - AuthInfo authInfo = AtlasZookeeperSecurityProperties.parseAuth("digest:user:password"); - assertEquals(authInfo.getScheme(), "digest"); - assertEquals(authInfo.getAuth(), "user:password".getBytes(Charsets.UTF_8)); - } - - @Test - public void shouldReturnDefaultAclIfNullOrEmpty() { - ACL acl = AtlasZookeeperSecurityProperties.parseAcl(null, ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0)); - assertEquals(acl, ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0)); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/CuratorFactoryTest.java b/webapp/src/test/java/org/apache/atlas/web/service/CuratorFactoryTest.java deleted file mode 100644 index 385f250df5..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/CuratorFactoryTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import com.google.common.base.Charsets; -import org.apache.atlas.ha.HAConfiguration; -import org.apache.commons.configuration.Configuration; -import org.apache.curator.framework.CuratorFrameworkFactory; -import org.apache.curator.framework.api.ACLProvider; -import org.apache.zookeeper.data.ACL; -import org.mockito.ArgumentMatcher; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.argThat; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; - -public class CuratorFactoryTest { - - @Mock - private Configuration configuration; - - @Mock - private HAConfiguration.ZookeeperProperties zookeeperProperties; - - @Mock - private CuratorFrameworkFactory.Builder builder; - - @BeforeMethod - public void setup() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void shouldAddAuthorization() { - when(zookeeperProperties.hasAcl()).thenReturn(true); - when(zookeeperProperties.getAcl()).thenReturn("sasl:myclient@EXAMPLE.COM"); - when(zookeeperProperties.hasAuth()).thenReturn(true); - when(zookeeperProperties.getAuth()).thenReturn("sasl:myclient@EXAMPLE.COM"); - CuratorFactory curatorFactory = new CuratorFactory(configuration) { - @Override - protected void initializeCuratorFramework() { - } - }; - curatorFactory.enhanceBuilderWithSecurityParameters(zookeeperProperties, builder); - verify(builder).aclProvider(any(ACLProvider.class)); - verify(builder).authorization(eq("sasl"), eq("myclient@EXAMPLE.COM".getBytes(Charsets.UTF_8))); - } - - @Test - public void shouldAddAclProviderWithRightACL() { - when(zookeeperProperties.hasAcl()).thenReturn(true); - when(zookeeperProperties.getAcl()).thenReturn("sasl:myclient@EXAMPLE.COM"); - when(zookeeperProperties.hasAuth()).thenReturn(false); - CuratorFactory curatorFactory = new CuratorFactory(configuration) { - @Override - protected void initializeCuratorFramework() { - } - }; - curatorFactory.enhanceBuilderWithSecurityParameters(zookeeperProperties, builder); - verify(builder).aclProvider(argThat(new ArgumentMatcher() { - @Override - public boolean matches(ACLProvider aclProvider) { - ACL acl = aclProvider.getDefaultAcl().get(0); - return acl.getId().getId().equals("myclient@EXAMPLE.COM") - && acl.getId().getScheme().equals("sasl"); - } - })); - } - - @Test - public void shouldNotAddAnySecureParameters() { - when(zookeeperProperties.hasAcl()).thenReturn(false); - when(zookeeperProperties.hasAuth()).thenReturn(false); - CuratorFactory curatorFactory = new CuratorFactory(configuration) { - @Override - protected void initializeCuratorFramework() { - } - }; - verifyZeroInteractions(builder); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTest.java b/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTest.java deleted file mode 100644 index f941941953..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.web.TestUtils; -import org.apache.atlas.web.security.BaseSecurityTest; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.eclipse.jetty.webapp.WebAppContext; -import org.testng.Assert; -import org.testng.annotations.Test; - -import java.net.HttpURLConnection; -import java.net.URL; - -import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH; - -public class SecureEmbeddedServerTest extends SecureEmbeddedServerTestBase { - @Test - public void testServerConfiguredUsingCredentialProvider() throws Exception { - // setup the configuration - final PropertiesConfiguration configuration = new PropertiesConfiguration(); - configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); - configuration.setProperty("atlas.services.enabled", false); - configuration.setProperty("atlas.notification.embedded", "false"); - // setup the credential provider - setupCredentials(); - - String persistDir = BaseSecurityTest.writeConfiguration(configuration); - String originalConf = System.getProperty("atlas.conf"); - System.setProperty("atlas.conf", persistDir); - - ApplicationProperties.forceReload(); - SecureEmbeddedServer secureEmbeddedServer = null; - try { - secureEmbeddedServer = new SecureEmbeddedServer(EmbeddedServer.ATLAS_DEFAULT_BIND_ADDRESS, - 21443, TestUtils.getWarPath()) { - @Override - protected PropertiesConfiguration getConfiguration() { - return configuration; - } - - @Override - protected WebAppContext getWebAppContext(String path) { - WebAppContext application = new WebAppContext(path, "/"); - application.setDescriptor( - System.getProperty("projectBaseDir") + "/webapp/src/test/webapp/WEB-INF/web.xml"); - application.setClassLoader(Thread.currentThread().getContextClassLoader()); - return application; - } - - }; - secureEmbeddedServer.server.start(); - - URL url = new URL("https://localhost:21443/api/atlas/admin/status"); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("GET"); - connection.connect(); - - // test to see whether server is up and root page can be served - Assert.assertEquals(connection.getResponseCode(), 200); - } catch(Throwable e) { - Assert.fail("War deploy failed", e); - } finally { - secureEmbeddedServer.server.stop(); - - if (originalConf == null) { - System.clearProperty("atlas.conf"); - } else { - System.setProperty("atlas.conf", originalConf); - } - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTestBase.java b/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTestBase.java deleted file mode 100755 index 57807f3369..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/SecureEmbeddedServerTestBase.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.web.service; - -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.Atlas; -import org.apache.atlas.AtlasException; -import org.apache.atlas.web.TestUtils; -import org.apache.atlas.web.integration.AdminJerseyResourceIT; -import org.apache.atlas.web.integration.EntityJerseyResourceIT; -import org.apache.atlas.web.integration.TypesJerseyResourceIT; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.alias.CredentialProvider; -import org.apache.hadoop.security.alias.CredentialProviderFactory; -import org.apache.hadoop.security.alias.JavaKeyStoreProvider; -import org.testng.Assert; -import org.testng.TestListenerAdapter; -import org.testng.TestNG; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.ws.rs.core.UriBuilder; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; - -import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH; -import static org.apache.atlas.security.SecurityProperties.DEFAULT_KEYSTORE_FILE_LOCATION; -import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY; -import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY; -import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY; - -/** - * Secure Test class for jersey resources. - */ -public class SecureEmbeddedServerTestBase { - - - public static final int ATLAS_DEFAULT_HTTPS_PORT = 21443; - private SecureEmbeddedServer secureEmbeddedServer; - protected String providerUrl; - private Path jksPath; - protected WebResource service; - private int securePort; - - static { - //for localhost testing only - javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier(new javax.net.ssl.HostnameVerifier() { - - public boolean verify(String hostname, javax.net.ssl.SSLSession sslSession) { - return hostname.equals("localhost"); - } - }); - System.setProperty("javax.net.ssl.trustStore", DEFAULT_KEYSTORE_FILE_LOCATION); - System.setProperty("javax.net.ssl.trustStorePassword", "keypass"); - System.setProperty("javax.net.ssl.trustStoreType", "JKS"); - System.setProperty("https.protocols", "TLSv1.2"); - } - - @BeforeClass - public void setupSecurePort() throws AtlasException { - org.apache.commons.configuration.Configuration configuration = ApplicationProperties.get(); - securePort = configuration.getInt(Atlas.ATLAS_SERVER_HTTPS_PORT, ATLAS_DEFAULT_HTTPS_PORT); - } - - @BeforeMethod - public void setup() throws Exception { - jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks"); - providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri(); - - String baseUrl = String.format("https://localhost:%d/", securePort); - - DefaultClientConfig config = new DefaultClientConfig(); - Client client = Client.create(config); - client.resource(UriBuilder.fromUri(baseUrl).build()); - - service = client.resource(UriBuilder.fromUri(baseUrl).build()); - } - - @Test - public void testNoConfiguredCredentialProvider() throws Exception { - String originalConf = null; - try { - originalConf = System.getProperty("atlas.conf"); - System.clearProperty("atlas.conf"); - ApplicationProperties.forceReload(); - secureEmbeddedServer = new SecureEmbeddedServer( - EmbeddedServer.ATLAS_DEFAULT_BIND_ADDRESS, securePort, TestUtils.getWarPath()); - secureEmbeddedServer.server.start(); - - Assert.fail("Should have thrown an exception"); - } catch (IOException e) { - Assert.assertEquals(e.getMessage(), - "No credential provider path cert.stores.credential.provider.path configured for storage of certificate store passwords"); - } finally { - if (secureEmbeddedServer != null) { - secureEmbeddedServer.server.stop(); - } - - if (originalConf == null) { - System.clearProperty("atlas.conf"); - } else { - System.setProperty("atlas.conf", originalConf); - } - } - } - - @Test - public void testMissingEntriesInCredentialProvider() throws Exception { - // setup the configuration - final PropertiesConfiguration configuration = new PropertiesConfiguration(); - configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); - - try { - secureEmbeddedServer = new SecureEmbeddedServer( - EmbeddedServer.ATLAS_DEFAULT_BIND_ADDRESS, securePort, TestUtils.getWarPath()) { - @Override - protected PropertiesConfiguration getConfiguration() { - return configuration; - } - }; - Assert.fail("No entries should generate an exception"); - } catch (IOException e) { - Assert.assertTrue(e.getMessage().startsWith("No credential entry found for")); - } finally { - secureEmbeddedServer.server.stop(); - } - - } - - /** - * Runs the existing webapp test cases, this time against the initiated secure server instance. - * @throws Exception - */ - @Test - public void runOtherSuitesAgainstSecureServer() throws Exception { - final PropertiesConfiguration configuration = new PropertiesConfiguration(); - configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); - // setup the credential provider - setupCredentials(); - - try { - secureEmbeddedServer = new SecureEmbeddedServer( - EmbeddedServer.ATLAS_DEFAULT_BIND_ADDRESS, securePort, TestUtils.getWarPath()) { - @Override - protected PropertiesConfiguration getConfiguration() { - return configuration; - } - }; - secureEmbeddedServer.server.start(); - - TestListenerAdapter tla = new TestListenerAdapter(); - TestNG testng = new TestNG(); - testng.setTestClasses(new Class[]{AdminJerseyResourceIT.class, EntityJerseyResourceIT.class, - TypesJerseyResourceIT.class}); - testng.addListener(tla); - testng.run(); - - } finally { - secureEmbeddedServer.server.stop(); - } - - } - - protected void setupCredentials() throws Exception { - Configuration conf = new Configuration(false); - - File file = new File(jksPath.toUri().getPath()); - file.delete(); - conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl); - - CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0); - - // create new aliases - try { - - char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass); - - char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass); - - char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; - provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass); - - // write out so that it can be found in checks - provider.flush(); - } catch (Exception e) { - e.printStackTrace(); - throw e; - } - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/service/ServiceStateTest.java b/webapp/src/test/java/org/apache/atlas/web/service/ServiceStateTest.java deleted file mode 100644 index c171dfdf73..0000000000 --- a/webapp/src/test/java/org/apache/atlas/web/service/ServiceStateTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.service; - -import org.apache.atlas.ha.HAConfiguration; -import org.apache.commons.configuration.Configuration; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.fail; - -public class ServiceStateTest { - - @Mock - private Configuration configuration; - - @BeforeMethod - public void setup() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void testShouldBeActiveIfHAIsDisabled() { - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false); - - ServiceState serviceState = new ServiceState(configuration); - assertEquals(ServiceState.ServiceStateValue.ACTIVE, serviceState.getState()); - } - - @Test(expectedExceptions = IllegalStateException.class) - public void testShouldDisallowTransitionIfHAIsDisabled() { - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false); - - ServiceState serviceState = new ServiceState(configuration); - serviceState.becomingPassive(); - fail("Should not allow transition"); - } - - @Test - public void testShouldChangeStateIfHAIsEnabled() { - when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true); - - ServiceState serviceState = new ServiceState(configuration); - serviceState.becomingPassive(); - assertEquals(ServiceState.ServiceStateValue.BECOMING_PASSIVE, serviceState.getState()); - } -} diff --git a/webapp/src/test/java/org/apache/atlas/web/util/ServletsTest.java b/webapp/src/test/java/org/apache/atlas/web/util/ServletsTest.java index 4c079539ef..ca20607c8f 100644 --- a/webapp/src/test/java/org/apache/atlas/web/util/ServletsTest.java +++ b/webapp/src/test/java/org/apache/atlas/web/util/ServletsTest.java @@ -19,10 +19,11 @@ package org.apache.atlas.web.util; import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.atlas.AtlasClient; import org.testng.annotations.Test; import javax.ws.rs.core.Response; + +import static org.apache.atlas.repository.Constants.ERROR; import static org.testng.Assert.*; @Test @@ -35,6 +36,6 @@ public void testEmptyMessage() throws Exception { assertNotNull(response); ObjectNode responseEntity = (ObjectNode) response.getEntity(); assertNotNull(responseEntity); - assertNotNull(responseEntity.get(AtlasClient.ERROR)); + assertNotNull(responseEntity.get(ERROR)); } }