<?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <!-- /** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ --> <configuration> <property> <name>datanucleus.schema.autoCreateTables</name> <value>true</value> </property> <property> <name>hbase.zookeeper.quorum</name> <value>10.254.20.23:2181,10.254.20.29:2181,10.254.20.26:2181</value> </property> <property> <name>hive.downloaded.resources.dir</name> <value>/data/emr/hive/tmp/${hive.session.id}_resources</value> </property> <property> <name>hive.exec.dynamic.partition.mode</name> <value>nonstrict</value> </property> <property> <name>hive.exec.failure.hooks</name> <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value> </property> <property> <name>hive.exec.local.scratchdir</name> <value>/data/emr/hive/tmp</value> </property> <property> <name>hive.exec.parallel = true</name> <value>true</value> </property> <property> <name>hive.exec.parallel.thread.number</name> <value>16</value> </property> <property> <name>hive.exec.post.hooks</name> <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value> </property> <property> <name>hive.exec.pre.hooks</name> <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value> </property> <property> <name>hive.execution.engine</name> <value>tez</value> </property> <property> <name>hive.fetch.task.aggr</name> <value>true</value> </property> <property> <name>hive.hwi.listen.host</name> <value>0.0.0.0</value> </property> <property> <name>hive.hwi.listen.port</name> <value>7002</value> </property> <property> <name>hive.llap.daemon.output.service.port</name> <value>7009</value> </property> <property> <name>hive.llap.daemon.rpc.port</name> <value>7007</value> </property> <property> <name>hive.llap.daemon.web.port</name> <value>7008</value> </property> <property> <name>hive.llap.daemon.yarn.shuffle.port</name> <value>7006</value> </property> <property> <name>hive.llap.management.rpc.port</name> <value>7005</value> </property> <property> <name>hive.merge.mapfiles</name> <value>true</value> </property> <property> <name>hive.merge.mapredfiles</name> <value>true</value> </property> <property> <name>hive.merge.size.per.task</name> <value>134217728</value> </property> <property> <name>hive.merge.smallfiles.avgsize</name> <value>134217728</value> </property> <property> <name>hive.merge.sparkfiles</name> <value>true</value> </property> <property> <name>hive.merge.tezfiles</name> <value>true</value> </property> <property> <name>hive.metastore.db.encoding</name> <value>UTF-8</value> </property> <property> <name>hive.metastore.event.db.notification.api.auth</name> <value>false</value> </property> <property> <name>hive.metastore.kerberos.keytab.file</name> <value>/var/krb5kdc/emr.keytab</value> </property> <property> <name>hive.metastore.kerberos.principal</name> <value>hadoop/_HOST@EMR-5XJSY31F</value> </property> <property> <name>hive.metastore.metrics.enabled</name> <value>true</value> </property> <property> <name>hive.metastore.port</name> <value>7004</value> </property> <property> <name>hive.metastore.sasl.enabled</name> <value>true</value> </property> <property> <name>hive.metastore.schema.verification</name> <value>false</value> </property> <property> <name>hive.metastore.schema.verification.record.version</name> <value>false</value> </property> <property> <name>hive.metastore.uris</name> <value>thrift://10.254.20.18:7004,thrift://10.254.20.22:7004</value> </property> <property> <name>hive.metastore.warehouse.dir</name> <value>/usr/hive/warehouse</value> </property> <property> <name>hive.querylog.location</name> <value>/data/emr/hive/tmp</value> </property> <property> <name>hive.server2.authentication</name> <value>kerberos</value> </property> <property> <name>hive.server2.authentication.kerberos.keytab</name> <value>/var/krb5kdc/emr.keytab</value> </property> <property> <name>hive.server2.authentication.kerberos.principal</name> <value>hadoop/_HOST@EMR-5XJSY31F</value> </property> <property> <name>hive.server2.logging.operation.log.location</name> <value>/data/emr/hive/tmp/operation_logs</value> </property> <property> <name>hive.server2.metrics.enabled</name> <value>true</value> </property> <property> <name>hive.server2.support.dynamic.service.discovery</name> <value>true</value> </property> <property> <name>hive.server2.thrift.bind.host</name> <value>10.254.20.28</value> </property> <property> <name>hive.server2.thrift.http.port</name> <value>7000</value> </property> <property> <name>hive.server2.thrift.port</name> <value>7001</value> </property> <property> <name>hive.server2.webui.host</name> <value>0.0.0.0</value> </property> <property> <name>hive.server2.webui.max.threads</name> <value>50</value> </property> <property> <name>hive.server2.webui.port</name> <value>7003</value> </property> <property> <name>hive.server2.zookeeper.namespace</name> <value>hiveserver2</value> </property> <property> <name>hive.service.metrics.reporter</name> <value>JMX</value> </property> <property> <name>hive.stats.autogather</name> <value>false</value> </property> <property> <name>hive.tez.am.max.app.attempts</name> <value>5</value> </property> <property> <name>hive.tez.am.task.max.failed.attempts</name> <value>10</value> </property> <property> <name>hive.tez.auto.reducer.parallelism</name> <value>true</value> </property> <property> <name>hive.tez.container.size</name> <value>1024</value> </property> <property> <name>hive.vectorized.execution.enabled</name> <value>true</value> </property> <property> <name>hive.vectorized.execution.mapjoin.minmax.enabled</name> <value>true</value> </property> <property> <name>hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled</name> <value>true</value> </property> <property> <name>hive.vectorized.execution.mapjoin.native.multikey.only.enabled</name> <value>true</value> </property> <property> <name>hive.zookeeper.client.port</name> <value>2181</value> </property> <property> <name>hive.zookeeper.quorum</name> <value>10.254.20.23:2181,10.254.20.29:2181,10.254.20.26:2181</value> </property> <property> <name>io.compression.codec.lzo.class</name> <value>com.hadoop.compression.lzo.LzoCodec</value> </property> <property> <name>io.compression.codecs</name> <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.SnappyCodec</value> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>UYdfjsdDigf8d3U</value> </property> <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://10.254.20.16:3306/hivemetastore?useSSL=false&createDatabaseIfNotExist=true&characterEncoding=UTF-8</value> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>root</value> </property> <property> <name>mapreduce.cluster.local.dir</name> <value>/data/emr/hive/tmp/tmp</value> </property> <property> <name>spark.driver.extraLibraryPath</name> <value>/usr/local/service/hadoop/lib/native:/usr/local/service/hadoop/lib/native/Linux-amd64-64/lib</value> </property> <property> <name>spark.eventLog.dir</name> <value>hdfs://HDFS8000912/spark-history</value> </property> <property> <name>spark.eventLog.enabled</name> <value>true</value> </property> <property> <name>spark.executor.extraLibraryPath</name> <value>/usr/local/service/hadoop/lib/native:/usr/local/service/hadoop/lib/native/Linux-amd64-64/lib</value> </property> <property> <name>spark.history.fs.cleaner.enabled</name> <value>true</value> </property> <property> <name>spark.home</name> <value>/usr/local/service/spark</value> </property> <property> <name>spark.yarn.jars</name> <value>hdfs:///spark/jars/*</value> </property> </configuration>