hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From acmur...@apache.org
Subject svn commit: r1189036 [2/2] - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapredu...
Date Wed, 26 Oct 2011 06:29:23 GMT
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,275 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URLEncoder;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.Enumeration;
+import java.util.HashSet;
+
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.httpclient.Header;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.HttpMethod;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.util.Apps;
+import org.apache.hadoop.yarn.util.StringHelper;
+import org.apache.hadoop.yarn.webapp.MimeType;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+
+public class WebAppProxyServlet extends HttpServlet {
+  private static final long serialVersionUID = 1L;
+  private static final Log LOG = LogFactory.getLog(WebAppProxyServlet.class);
+  private static final HashSet<String> passThroughHeaders = 
+    new HashSet<String>(Arrays.asList("User-Agent", "Accept", "Accept-Encoding",
+        "Accept-Language", "Accept-Charset"));
+  
+  public static final String PROXY_USER_COOKIE_NAME = "proxy-user";
+  
+  
+  private static class _ implements Hamlet._ {
+    //Empty
+  }
+  
+  private static class Page extends Hamlet {
+    Page(PrintWriter out) {
+      super(out, 0, false);
+    }
+  
+    public HTML<WebAppProxyServlet._> html() {
+      return new HTML<WebAppProxyServlet._>("html", null, EnumSet.of(EOpt.ENDTAG));
+    }
+  }
+
+  /**
+   * Output 404 with appropriate message.
+   * @param resp the http response.
+   * @param message the message to include on the page.
+   * @throws IOException on any error.
+   */
+  private static void notFound(HttpServletResponse resp, String message) 
+    throws IOException {
+    resp.setStatus(HttpServletResponse.SC_NOT_FOUND);
+    resp.setContentType(MimeType.HTML);
+    Page p = new Page(resp.getWriter());
+    p.html().
+      h1(message).
+    _();
+  }
+  
+  /**
+   * Warn the user that the link may not be safe!
+   * @param resp the http response
+   * @param link the link to point to
+   * @param user the user that owns the link.
+   * @throws IOException on any error.
+   */
+  private static void warnUserPage(HttpServletResponse resp, String link, 
+      String user, ApplicationId id) throws IOException {
+    //Set the cookie when we warn which overrides the query parameter
+    //This is so that if a user passes in the approved query parameter without
+    //having first visited this page then this page will still be displayed 
+    resp.addCookie(makeCheckCookie(id, false));
+    resp.setContentType(MimeType.HTML);
+    Page p = new Page(resp.getWriter());
+    p.html().
+      h1("WARNING: The following page may not be safe!").h3().
+      _("click ").a(link, "here").
+      _(" to continue to an Application Master web interface owned by ", user).
+      _().
+    _();
+  }
+  
+  /**
+   * Download link and have it be the response.
+   * @param req the http request
+   * @param resp the http response
+   * @param link the link to download
+   * @param c the cookie to set if any
+   * @throws IOException on any error.
+   */
+  private static void proxyLink(HttpServletRequest req, 
+      HttpServletResponse resp, URI link,Cookie c) throws IOException {
+    org.apache.commons.httpclient.URI uri = 
+      new org.apache.commons.httpclient.URI(link.toString(), false);
+    HttpClient client = new HttpClient();
+    HttpMethod method = new GetMethod(uri.getEscapedURI());
+
+    @SuppressWarnings("unchecked")
+    Enumeration<String> names = req.getHeaderNames();
+    while(names.hasMoreElements()) {
+      String name = names.nextElement();
+      if(passThroughHeaders.contains(name)) {
+        String value = req.getHeader(name);
+        LOG.debug("REQ HEADER: "+name+" : "+value);
+        method.setRequestHeader(name, value);
+      }
+    }
+
+    String user = req.getRemoteUser();
+    if(user != null && !user.isEmpty()) {
+      method.setRequestHeader("Cookie",PROXY_USER_COOKIE_NAME+"="+
+          URLEncoder.encode(user, "ASCII"));
+    }
+    OutputStream out = resp.getOutputStream();
+    try {
+      resp.setStatus(client.executeMethod(method));
+      for(Header header : method.getResponseHeaders()) {
+        resp.setHeader(header.getName(), header.getValue());
+      }
+      if(c != null) {
+        resp.addCookie(c);
+      }
+      InputStream in = method.getResponseBodyAsStream();
+      if(in != null) {
+        IOUtils.copyBytes(in, out, 4096, true);
+      }
+    } finally {
+      method.releaseConnection();
+    }
+  }
+  
+  private static String getCheckCookieName(ApplicationId id){
+    return "checked_"+id;
+  }
+  
+  private static Cookie makeCheckCookie(ApplicationId id, boolean isSet) {
+    Cookie c = new Cookie(getCheckCookieName(id),String.valueOf(isSet));
+    c.setPath(ProxyUriUtils.getPath(id));
+    c.setMaxAge(60 * 60 * 2); //2 hours in seconds
+    return c;
+  }
+  
+  private boolean isSecurityEnabled() {
+    Boolean b = (Boolean) getServletContext()
+        .getAttribute(WebAppProxy.IS_SECURITY_ENABLED_ATTRIBUTE);
+    if(b != null) return b;
+    return false;
+  }
+  
+  private ApplicationReport getApplicationReport(ApplicationId id) throws IOException {
+    return ((AppReportFetcher) getServletContext()
+        .getAttribute(WebAppProxy.FETCHER_ATTRIBUTE)).getApplicationReport(id);
+  }
+  
+  @Override
+  protected void doGet(HttpServletRequest req, HttpServletResponse resp) 
+  throws IOException{
+    try {
+      String userApprovedParamS = 
+        req.getParameter(ProxyUriUtils.PROXY_APPROVAL_PARAM);
+      boolean userWasWarned = false;
+      boolean userApproved = 
+        (userApprovedParamS != null && Boolean.valueOf(userApprovedParamS));
+      boolean securityEnabled = isSecurityEnabled();
+      final String remoteUser = req.getRemoteUser();
+      final String pathInfo = req.getPathInfo();
+
+      String parts[] = pathInfo.split("/", 3);
+      if(parts.length < 2) {
+        LOG.warn(remoteUser+" Gave an invalid proxy path "+pathInfo);
+        notFound(resp, "Your path appears to be formatted incorrectly.");
+        return;
+      }
+      //parts[0] is empty because path info always starts with a /
+      String appId = parts[1];
+      String rest = parts.length > 2 ? parts[2] : "";
+      ApplicationId id = Apps.toAppID(appId);
+      if(id == null) {
+        LOG.warn(req.getRemoteUser()+" Attempting to access "+appId+
+        " that is invalid");
+        notFound(resp, appId+" appears to be formatted incorrectly.");
+        return;
+      }
+      
+      if(securityEnabled) {
+        String cookieName = getCheckCookieName(id); 
+        for(Cookie c: req.getCookies()) {
+          if(cookieName.equals(c.getName())) {
+            userWasWarned = true;
+            userApproved = userApproved || Boolean.valueOf(c.getValue());
+            break;
+          }
+        }
+      }
+      
+      boolean checkUser = securityEnabled && (!userWasWarned || !userApproved);
+
+      ApplicationReport applicationReport = getApplicationReport(id);
+      if(applicationReport == null) {
+        LOG.warn(req.getRemoteUser()+" Attempting to access "+id+
+            " that was not found");
+        notFound(resp, "Application "+appId+" could not be found, " +
+        		"please try the history server");
+        return;
+      }
+      URI trackingUri = ProxyUriUtils.getUriFromAMUrl(
+          applicationReport.getOriginalTrackingUrl());
+      
+      String runningUser = applicationReport.getUser();
+      if(checkUser && !runningUser.equals(remoteUser)) {
+        LOG.info("Asking "+remoteUser+" if they want to connect to the " +
+            "app master GUI of "+appId+" owned by "+runningUser);
+        warnUserPage(resp, ProxyUriUtils.getPathAndQuery(id, rest, 
+            req.getQueryString(), true), runningUser, id);
+        return;
+      }
+      
+      URI toFetch = new URI(req.getScheme(), 
+          trackingUri.getAuthority(),
+          StringHelper.ujoin(trackingUri.getPath(), rest), req.getQueryString(),
+          null);
+      
+      LOG.info(req.getRemoteUser()+" is accessing unchecked "+toFetch+
+          " which is the app master GUI of "+appId+" owned by "+runningUser);
+
+      switch(applicationReport.getYarnApplicationState()) {
+      case KILLED:
+      case FINISHED:
+      case FAILED:
+        resp.sendRedirect(resp.encodeRedirectURL(toFetch.toString()));
+        return;
+      }
+      Cookie c = null;
+      if(userWasWarned && userApproved) {
+        c = makeCheckCookie(id, true);
+      }
+      proxyLink(req, resp, toFetch, c);
+
+    } catch(URISyntaxException e) {
+      throw new IOException(e); 
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,45 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy.amfilter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.FilterContainer;
+import org.apache.hadoop.http.FilterInitializer;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+
+public class AmFilterInitializer extends FilterInitializer {
+  private static final String FILTER_NAME = "AM_PROXY_FILTER";
+  private static final String FILTER_CLASS = AmIpFilter.class.getCanonicalName();
+  
+  @Override
+  public void initFilter(FilterContainer container, Configuration conf) {
+    Map<String, String> params = new HashMap<String, String>();
+    String proxy = YarnConfiguration.getProxyHostAndPort(conf);
+    String[] parts = proxy.split(":");
+    params.put(AmIpFilter.PROXY_HOST, parts[0]);
+    params.put(AmIpFilter.PROXY_URI_BASE, "http://"+proxy+
+        System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV));
+    container.addFilter(FILTER_NAME, FILTER_CLASS, params);
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,117 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy.amfilter;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.yarn.server.webproxy.WebAppProxyServlet;
+
+public class AmIpFilter implements Filter {
+  private static final Log LOG = LogFactory.getLog(AmIpFilter.class);
+  
+  public static final String PROXY_HOST = "PROXY_HOST";
+  public static final String PROXY_URI_BASE = "PROXY_URI_BASE";
+  //update the proxy IP list about every 5 min
+  private static final long updateInterval = 5 * 60 * 1000;
+  
+  private String proxyHost;
+  private Set<String> proxyAddresses = null;
+  private long lastUpdate;
+  private String proxyUriBase;
+  
+  @Override
+  public void init(FilterConfig conf) throws ServletException {
+    proxyHost = conf.getInitParameter(PROXY_HOST);
+    proxyUriBase = conf.getInitParameter(PROXY_URI_BASE);
+  }
+  
+  private Set<String> getProxyAddresses() throws ServletException {
+    long now = System.currentTimeMillis();
+    if(proxyAddresses == null || (lastUpdate + updateInterval) >= now) {
+      synchronized(this) {
+        try {
+          proxyAddresses = new HashSet<String>();
+          for(InetAddress add : InetAddress.getAllByName(proxyHost)) {
+            proxyAddresses.add(add.getHostAddress());
+          }
+          lastUpdate = now;
+        } catch (UnknownHostException e) {
+          throw new ServletException("Could not locate "+proxyHost, e);
+        }
+      }
+    }
+    return proxyAddresses;
+  }
+
+  @Override
+  public void destroy() {
+    //Empty
+  }
+
+  @Override
+  public void doFilter(ServletRequest req, ServletResponse resp,
+      FilterChain chain) throws IOException, ServletException {
+    if(!(req instanceof HttpServletRequest)) {
+      throw new ServletException("This filter only works for HTTP/HTTPS");
+    }
+    
+    HttpServletRequest httpReq = (HttpServletRequest)req;
+    HttpServletResponse httpResp = (HttpServletResponse)resp;
+    if(!getProxyAddresses().contains(httpReq.getRemoteAddr())) {
+      String redirectUrl = httpResp.encodeRedirectURL(proxyUriBase + 
+          httpReq.getRequestURI());
+      httpResp.sendRedirect(redirectUrl);
+      return;
+    }
+    
+    String user = null;
+    for(Cookie c: httpReq.getCookies()) {
+      if(WebAppProxyServlet.PROXY_USER_COOKIE_NAME.equals(c.getName())){
+        user = c.getValue();
+        break;
+      }
+    }
+    if(user == null) {
+      LOG.warn("Could not find "+WebAppProxyServlet.PROXY_USER_COOKIE_NAME
+          +" cookie, so user will not be set");
+      chain.doFilter(req, resp);
+    } else {
+      final AmIpPrincipal principal = new AmIpPrincipal(user);
+      ServletRequest requestWrapper = new AmIpServletRequestWrapper(httpReq, 
+          principal);
+      chain.doFilter(requestWrapper, resp);
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpPrincipal.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpPrincipal.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpPrincipal.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpPrincipal.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,34 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy.amfilter;
+
+import java.security.Principal;
+
+public class AmIpPrincipal implements Principal {
+  private final String name;
+  
+  public AmIpPrincipal(String name) {
+    this.name = name;
+  }
+  
+  @Override
+  public String getName() {
+    return name;
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpServletRequestWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpServletRequestWrapper.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpServletRequestWrapper.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpServletRequestWrapper.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,51 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy.amfilter;
+
+import java.security.Principal;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+public class AmIpServletRequestWrapper extends HttpServletRequestWrapper {
+  private final AmIpPrincipal principal;
+
+  public AmIpServletRequestWrapper(HttpServletRequest request, 
+      AmIpPrincipal principal) {
+    super(request);
+    this.principal = principal;
+  }
+
+  @Override
+  public Principal getUserPrincipal() {
+    return principal;
+  }
+
+  @Override
+  public String getRemoteUser() {
+    return principal.getName();
+  }
+
+  @Override
+  public boolean isUserInRole(String role) {
+    //No role info so far
+    return false;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestProxyUriUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestProxyUriUtils.java?rev=1189036&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestProxyUriUtils.java
(added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestProxyUriUtils.java
Wed Oct 26 06:29:21 2011
@@ -0,0 +1,104 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.webproxy;
+
+import static org.junit.Assert.*;
+
+import java.net.URI;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.junit.Test;
+
+public class TestProxyUriUtils {
+  public static class TestAppId extends ApplicationId {
+    private long timestamp;
+    private int id;
+
+    public TestAppId(int id, long timestamp) {
+      setId(id);
+      setClusterTimestamp(timestamp);
+    }
+    @Override
+    public int getId() {
+      return id;
+    }
+
+    @Override
+    public void setId(int id) {
+      this.id = id;
+    }
+
+    @Override
+    public long getClusterTimestamp() {
+      return timestamp;
+    }
+
+    @Override
+    public void setClusterTimestamp(long clusterTimestamp) {
+      this.timestamp = clusterTimestamp;
+    }
+  }
+  
+  @Test
+  public void testGetPathApplicationId() {
+    assertEquals("/proxy/application_100_0001", 
+        ProxyUriUtils.getPath(new TestAppId(1, 100l)));
+    assertEquals("/proxy/application_6384623_0005", 
+        ProxyUriUtils.getPath(new TestAppId(5, 6384623l)));
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testGetPathApplicationIdBad() {
+    ProxyUriUtils.getPath(null);
+  }
+  
+  @Test
+  public void testGetPathApplicationIdString() {
+    assertEquals("/proxy/application_6384623_0005", 
+        ProxyUriUtils.getPath(new TestAppId(5, 6384623l), null));
+    assertEquals("/proxy/application_6384623_0005/static/app",
+        ProxyUriUtils.getPath(new TestAppId(5, 6384623l), "/static/app"));
+    assertEquals("/proxy/application_6384623_0005/", 
+        ProxyUriUtils.getPath(new TestAppId(5, 6384623l), "/"));
+    assertEquals("/proxy/application_6384623_0005/some/path", 
+        ProxyUriUtils.getPath(new TestAppId(5, 6384623l), "some/path"));
+  }
+  
+  @Test 
+  public void testGetPathAndQuery() {
+    assertEquals("/proxy/application_6384623_0005/static/app?foo=bar",
+    ProxyUriUtils.getPathAndQuery(new TestAppId(5, 6384623l), "/static/app", 
+        "?foo=bar", false));
+    
+    assertEquals("/proxy/application_6384623_0005/static/app?foo=bar&bad=good&proxyapproved=true",
+        ProxyUriUtils.getPathAndQuery(new TestAppId(5, 6384623l), "/static/app", 
+            "foo=bar&bad=good", true));
+  }
+
+  @Test
+  public void testGetProxyUri() throws Exception {
+    URI originalUri = new URI("http://host.com/static/foo?bar=bar");
+    URI proxyUri = new URI("http://proxy.net:8080/");
+    TestAppId id = new TestAppId(5, 6384623l);
+    URI expected = new URI("http://proxy.net:8080/proxy/application_6384623_0005/static/foo?bar=bar");
+    URI result = ProxyUriUtils.getProxyUri(originalUri, proxyUri, id);
+    assertEquals(expected, result);
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml?rev=1189036&r1=1189035&r2=1189036&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml Wed
Oct 26 06:29:21 2011
@@ -37,6 +37,7 @@
   <modules>
     <module>hadoop-yarn-server-common</module>
     <module>hadoop-yarn-server-nodemanager</module>
+    <module>hadoop-yarn-server-web-proxy</module>
     <module>hadoop-yarn-server-resourcemanager</module>
     <module>hadoop-yarn-server-tests</module>
   </modules>

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/pom.xml?rev=1189036&r1=1189035&r2=1189036&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/pom.xml (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/pom.xml Wed Oct 26 06:29:21 2011
@@ -336,6 +336,11 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
         <version>${project.version}</version>
       </dependency>



Mime
View raw message