Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
.count.Iterator.1.1.2.source-code.core-site.xml Maven / Gradle / Ivy
<!--The short-circuit local reads feature cannot be used because UNIX Domain sockets are not available on Windows-->
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hdfsCluster</value>
<final>true</final>
</property>
<property>
<name>fs.trash.interval</name>
<value>360</value>
</property>
<property>
<name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
<value>120</value>
</property>
<property>
<name>ha.zookeeper.quorum</name>
<value>sl010a-hopdb2:2181,sl010a-hopdb4:2181,sl010a-hopdb3:2181</value>
</property>
<property>
<name>hadoop.custom-extensions.root</name>
<value>/hdp/ext/2.6/hadoop</value>
</property>
<property>
<name>hadoop.http.authentication.simple.anonymous.allowed</name>
<value>true</value>
</property>
<property>
<name>hadoop.proxyuser.hbase.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hbase.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.hcat.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hcat.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.httpfs.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.httpfs.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.hue.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hue.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.manager.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.manager.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.oozie.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.oozie.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value>sl010a-analysisdb1,sl010a-analysisdb2,sl010a-analysisdb3,sl010a-hopdb1,sl010a-hopdb2,sl010a-hopdb3,sl010a-hopdb4,sl010a-hopdb5,sl010a-hopdb6,sl010a-hopdb7</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>DEFAULT</value>
</property>
<property>
<name>hadoop.security.authentication</name>
<value>simple</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>false</value>
</property>
<property>
<name>io.compression.codecs</name>
<value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization</value>
</property>
<property>
<name>ipc.client.connect.max.retries</name>
<value>50</value>
</property>
<property>
<name>ipc.client.connection.maxidletime</name>
<value>30000</value>
</property>
<property>
<name>ipc.client.idlethreshold</name>
<value>8000</value>
</property>
<property>
<name>ipc.server.tcpnodelay</name>
<value>true</value>
</property>
<property>
<name>mapreduce.jobtracker.webinterface.trusted</name>
<value>false</value>
</property>
<property>
<name>net.topology.script.file.name</name>
<value>/etc/hadoop/conf/topology_script.py</value>
</property>
</configuration>