Skip to content

Commit bb39381

Browse files
committed
HDFS-17700. Fix HttpServer service's conf issue.
1 parent 7e67358 commit bb39381

File tree

9 files changed

+114
-14
lines changed

9 files changed

+114
-14
lines changed

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/Router.java

+12
Original file line numberDiff line numberDiff line change
@@ -509,6 +509,18 @@ public InetSocketAddress getHttpServerAddress() {
509509
return null;
510510
}
511511

512+
/**
513+
* Get the current HTTP socket address for the router.
514+
*
515+
* @return InetSocketAddress HTTPS address.
516+
*/
517+
public InetSocketAddress getHttpsServerAddress() {
518+
if (httpServer != null) {
519+
return httpServer.getHttpsAddress();
520+
}
521+
return null;
522+
}
523+
512524
@Override
513525
public void verifyToken(DelegationTokenIdentifier tokenId, byte[] password)
514526
throws IOException {

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterHttpServer.java

+28-4
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,24 @@
2424
import org.apache.hadoop.hdfs.DFSUtil;
2525
import org.apache.hadoop.hdfs.server.common.JspHelper;
2626
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
27+
import org.apache.hadoop.http.HttpConfig;
2728
import org.apache.hadoop.http.HttpServer2;
29+
import org.apache.hadoop.net.NetUtils;
2830
import org.apache.hadoop.service.AbstractService;
2931

3032
import javax.servlet.ServletContext;
3133

34+
import org.slf4j.Logger;
35+
import org.slf4j.LoggerFactory;
36+
3237
/**
3338
* Web interface for the {@link Router}. It exposes the Web UI and the WebHDFS
3439
* methods from {@link RouterWebHdfsMethods}.
3540
*/
3641
public class RouterHttpServer extends AbstractService {
3742

43+
private static final Logger LOG = LoggerFactory.getLogger(RouterHttpServer.class);
44+
3845
protected static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
3946

4047

@@ -82,6 +89,7 @@ protected void serviceInit(Configuration configuration) throws Exception {
8289
protected void serviceStart() throws Exception {
8390
// Build and start server
8491
String webApp = "router";
92+
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
8593
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(
8694
this.conf, this.httpAddress, this.httpsAddress, webApp,
8795
RBFConfigKeys.DFS_ROUTER_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
@@ -109,10 +117,26 @@ protected void serviceStart() throws Exception {
109117
this.httpServer.start();
110118

111119
// The server port can be ephemeral... ensure we have the correct info
112-
InetSocketAddress listenAddress = this.httpServer.getConnectorAddress(0);
113-
if (listenAddress != null) {
114-
this.httpAddress = new InetSocketAddress(this.httpAddress.getHostName(),
115-
listenAddress.getPort());
120+
int curIndex = 0;
121+
if (policy.isHttpEnabled()) {
122+
InetSocketAddress httpListenAddress = this.httpServer.getConnectorAddress(curIndex++);
123+
if (httpListenAddress != null) {
124+
this.httpAddress =
125+
new InetSocketAddress(this.httpAddress.getHostName(), httpListenAddress.getPort());
126+
conf.set(RBFConfigKeys.DFS_ROUTER_HTTP_ADDRESS_KEY,
127+
NetUtils.getHostPortString(httpAddress));
128+
LOG.info("Listening HTTP traffic on {}", httpAddress);
129+
}
130+
}
131+
if (policy.isHttpsEnabled()) {
132+
InetSocketAddress httpsListenAddress = this.httpServer.getConnectorAddress(curIndex);
133+
if (httpsListenAddress != null) {
134+
this.httpsAddress =
135+
new InetSocketAddress(this.httpsAddress.getHostName(), httpsListenAddress.getPort());
136+
conf.set(RBFConfigKeys.DFS_ROUTER_HTTPS_ADDRESS_KEY,
137+
NetUtils.getHostPortString(httpsAddress));
138+
LOG.info("Listening HTTPS traffic on {}", httpsAddress);
139+
}
116140
}
117141
super.serviceStart();
118142
}

hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MiniRouterDFSCluster.java

+1
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
import org.apache.hadoop.fs.UnsupportedFileSystemException;
7272
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
7373
import org.apache.hadoop.hdfs.DFSClient;
74+
import org.apache.hadoop.hdfs.DFSConfigKeys;
7475
import org.apache.hadoop.hdfs.DistributedFileSystem;
7576
import org.apache.hadoop.hdfs.HdfsConfiguration;
7677
import org.apache.hadoop.hdfs.MiniDFSCluster;
+26-2
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,14 @@
2929
import org.apache.hadoop.conf.Configuration;
3030
import org.apache.hadoop.hdfs.DFSConfigKeys;
3131
import org.apache.hadoop.hdfs.HdfsConfiguration;
32+
import org.apache.hadoop.http.HttpConfig;
3233

3334
import static org.apache.hadoop.http.HttpServer2.XFrameOption.SAMEORIGIN;
3435

3536
/**
36-
* A class to test the XFrame options of Router HTTP Server.
37+
* A class to test the Router HTTP Server.
3738
*/
38-
public class TestRouterHttpServerXFrame {
39+
public class TestRouterHttpServer {
3940

4041
@Test
4142
public void testRouterXFrame() throws IOException {
@@ -62,4 +63,27 @@ public void testRouterXFrame() throws IOException {
6263
router.close();
6364
}
6465
}
66+
67+
@Test
68+
public void testRouterHttpServerConfig() throws IOException {
69+
Configuration conf = new HdfsConfiguration();
70+
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
71+
Router router = new Router();
72+
try {
73+
router.init(conf);
74+
router.start();
75+
76+
Configuration routerConfig = router.getConfig();
77+
InetSocketAddress httpServerAddress = router.getHttpServerAddress();
78+
Assert.assertEquals(httpServerAddress.getHostName() + ":" + httpServerAddress.getPort(),
79+
routerConfig.get(RBFConfigKeys.DFS_ROUTER_HTTP_ADDRESS_KEY));
80+
81+
InetSocketAddress httpsServerAddress = router.getHttpsServerAddress();
82+
Assert.assertEquals(httpsServerAddress.getHostName() + ":" + httpsServerAddress.getPort(),
83+
routerConfig.get(RBFConfigKeys.DFS_ROUTER_HTTPS_ADDRESS_KEY));
84+
} finally {
85+
router.stop();
86+
router.close();
87+
}
88+
}
6589
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

+4-1
Original file line numberDiff line numberDiff line change
@@ -1561,7 +1561,10 @@ private void initIpcServer() throws IOException {
15611561
DFSUtil.addInternalPBProtocol(getConf(), InterDatanodeProtocolPB.class, service,
15621562
ipcServer);
15631563

1564-
LOG.info("Opened IPC server at {}", ipcServer.getListenerAddress());
1564+
InetSocketAddress listenerAddress = ipcServer.getListenerAddress();
1565+
LOG.info("Opened IPC server at {}", listenerAddress);
1566+
dnConf.getConf().set(DFS_DATANODE_IPC_ADDRESS_KEY,
1567+
listenerAddress.getHostName() + ":" + listenerAddress.getPort());
15651568

15661569
// set service-level authorization security policy
15671570
if (getConf().getBoolean(

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java

+6-2
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,9 @@ public void start() throws IOException {
303303
if (httpServer != null) {
304304
InetSocketAddress infoAddr = DataNode.getInfoAddr(conf);
305305
httpAddress = getChannelLocalAddress(httpServer, infoAddr);
306-
LOG.info("Listening HTTP traffic on " + httpAddress);
306+
conf.set(DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY,
307+
NetUtils.getHostPortString(httpAddress));
308+
LOG.info("Listening HTTP traffic on {}", httpAddress);
307309
}
308310

309311
if (httpsServer != null) {
@@ -312,7 +314,9 @@ public void start() throws IOException {
312314
DFS_DATANODE_HTTPS_ADDRESS_KEY,
313315
DFS_DATANODE_HTTPS_ADDRESS_DEFAULT));
314316
httpsAddress = getChannelLocalAddress(httpsServer, secInfoSocAddr);
315-
LOG.info("Listening HTTPS traffic on " + httpsAddress);
317+
conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY,
318+
NetUtils.getHostPortString(httpsAddress));
319+
LOG.info("Listening HTTPS traffic on {}", httpsAddress);
316320
}
317321
}
318322

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java

+15-4
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,17 @@
4949
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
5050

5151
import com.sun.jersey.api.core.ResourceConfig;
52+
import org.slf4j.Logger;
53+
import org.slf4j.LoggerFactory;
5254

5355
/**
5456
* Encapsulates the HTTP server started by the NameNode.
5557
*/
5658
@InterfaceAudience.Private
5759
public class NameNodeHttpServer {
60+
61+
private static final Logger LOG = LoggerFactory.getLogger(NameNodeHttpServer.class);
62+
5863
private HttpServer2 httpServer;
5964
private final Configuration conf;
6065
private final NameNode nn;
@@ -168,14 +173,20 @@ void start() throws IOException {
168173
int connIdx = 0;
169174
if (policy.isHttpEnabled()) {
170175
httpAddress = httpServer.getConnectorAddress(connIdx++);
171-
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
172-
NetUtils.getHostPortString(httpAddress));
176+
if (httpAddress != null) {
177+
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
178+
NetUtils.getHostPortString(httpAddress));
179+
LOG.info("Listening HTTP traffic on {}", httpAddress);
180+
}
173181
}
174182

175183
if (policy.isHttpsEnabled()) {
176184
httpsAddress = httpServer.getConnectorAddress(connIdx);
177-
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
178-
NetUtils.getHostPortString(httpsAddress));
185+
if (httpsAddress != null) {
186+
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
187+
NetUtils.getHostPortString(httpsAddress));
188+
LOG.info("Listening HTTPS traffic on {}", httpsAddress);
189+
}
179190
}
180191
}
181192

hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@
6868

6969
<script type="text/x-dust-template" id="tmpl-dn">
7070
{#dn}
71-
<div class="page-header"><h1>DataNode on <small>{HostName}:{DataPort}</small></h1></div>
71+
<div class="page-header"><h1>DataNode on <small>{HostName}:{RpcPort}</small></h1></div>
7272
<table class="table table-bordered table-striped">
7373
<tr><th>Cluster ID:</th><td>{ClusterId}</td></tr>
7474
<tr><th>Started:</th><td>{DNStartedTimeInMillis|date_tostring}</td></tr>

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeConfig.java

+21
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,15 @@
1818
package org.apache.hadoop.hdfs;
1919

2020
import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
21+
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
2122
import static org.junit.Assert.assertNull;
2223
import static org.junit.Assert.assertTrue;
2324
import static org.junit.Assert.fail;
2425
import static org.junit.Assume.assumeTrue;
2526

2627
import java.io.File;
2728
import java.io.IOException;
29+
import java.net.InetSocketAddress;
2830
import java.net.URI;
2931
import java.net.URISyntaxException;
3032

@@ -165,4 +167,23 @@ public void testMemlockLimit() throws Exception {
165167
prevLimit);
166168
}
167169
}
170+
171+
@Test
172+
public void testDataNodeIpcAndHttpSeverConf() throws Exception {
173+
Configuration conf = cluster.getConfiguration(0);
174+
DataNode dn = null;
175+
try {
176+
dn = DataNode.createDataNode(new String[] {}, conf);
177+
Configuration dnConf = dn.getConf();
178+
InetSocketAddress listenerAddress = dn.ipcServer.getListenerAddress();
179+
assertThat(dnConf.get(DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY))
180+
.isEqualTo(listenerAddress.getHostName() + ":" + listenerAddress.getPort());
181+
assertThat(dnConf.get(DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY))
182+
.isEqualTo(listenerAddress.getHostName() + ":" + dn.getHttpPort());
183+
} finally {
184+
if (dn != null) {
185+
dn.shutdown();
186+
}
187+
}
188+
}
168189
}

0 commit comments

Comments
 (0)