This project has retired. For details please refer to its Attic page.
DashboardController xref
View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.chukwa.hicc.rest;
19  
20  import java.io.StringWriter;
21  import java.util.Set;
22  import java.net.InetAddress;
23  
24  import javax.servlet.ServletContext;
25  import javax.servlet.http.HttpServletRequest;
26  import javax.ws.rs.Consumes;
27  import javax.ws.rs.GET;
28  import javax.ws.rs.PUT;
29  import javax.ws.rs.Path;
30  import javax.ws.rs.PathParam;
31  import javax.ws.rs.Produces;
32  import javax.ws.rs.core.Context;
33  import javax.ws.rs.core.MediaType;
34  import javax.ws.rs.core.Response;
35  import javax.ws.rs.core.Response.Status;
36  
37  import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
38  import org.apache.hadoop.chukwa.hicc.bean.Dashboard;
39  import org.apache.hadoop.conf.Configuration;
40  import org.apache.hadoop.hbase.HBaseConfiguration;
41  import org.apache.hadoop.hbase.HConstants;
42  import org.apache.hadoop.hdfs.DFSConfigKeys;
43  import org.apache.hadoop.yarn.conf.YarnConfiguration;
44  import org.apache.log4j.Logger;
45  import org.apache.velocity.Template;
46  import org.apache.velocity.VelocityContext;
47  import org.apache.velocity.app.VelocityEngine;
48  
49  import com.google.gson.Gson;
50  
51  @Path("dashboard")
52  public class DashboardController {
53    static Logger LOG = Logger.getLogger(DashboardController.class);
54  
55    @Context
56    private ServletContext context;
57    @Context
58    VelocityEngine velocity;
59  
60    /**
61     * Load a dashboard view
62     * 
63     * @param request HTTP request object
64     * @param id is dashboard unique identifier
65     * @return Dashboard view in JSON structure
66     * 
67     * @response.representation.200.doc Display dashboard JSON structure
68     * @response.representation.200.mediaType application/json
69     * @response.representation.200.example {@link Examples#SYSTEM_DASHBOARD}
70     */
71    @GET
72    @Produces(MediaType.APPLICATION_JSON)
73    @Path("load/{id}")
74    public String load(@Context HttpServletRequest request, @PathParam("id") String id) {
75      Gson gson = new Gson();
76      Dashboard dash = ChukwaHBaseStore.getDashboard(id, request.getRemoteUser());
77      String json = gson.toJson(dash);
78      return json;
79    }
80    
81    /**
82     * Save a dashboard view
83     * 
84     * @param request HTTP request object
85     * @param id is dashboard unique identifier
86     * @param buffer is JSON structure of dashboard view
87     * @return Status of the dashboard save
88     * 
89     * @request.representation.example {@link Examples#USER_DASHBOARD}
90     * @response.representation.200.doc Display save status code
91     * @response.representation.200.mediaType text/plain
92     */
93    @PUT
94    @Path("save/{id}")
95    @Consumes(MediaType.APPLICATION_JSON)
96    public Response save(@Context HttpServletRequest request, @PathParam("id") String id, String buffer) {
97      Gson gson = new Gson();
98      Dashboard dash = gson.fromJson(buffer, Dashboard.class);
99      boolean result = ChukwaHBaseStore.updateDashboard(id, request.getRemoteUser(), dash);
100     if(!result) {
101       return Response.status(Status.BAD_REQUEST).build();
102     }
103     return Response.ok().build();
104   }
105   
106   /**
107    * Return HTTP request connection user name
108    * 
109    * @param request HTTP request object
110    * @return username
111    * 
112    * @response.representation.200.doc Username
113    * @response.representation.200.mediaType text/plain
114    * @response.representation.200.example admin
115    */
116   @GET
117   @Path("whoami")
118   @Produces(MediaType.TEXT_PLAIN)
119   public String whoami(@Context HttpServletRequest request) {
120     return request.getRemoteUser();
121   }
122   
123   /**
124    * Render Quick links to various services web portals
125    * 
126    * @return HTML page of quick links
127    * 
128    * @response.representation.200.doc Display quick link widget
129    * @response.representation.200.mediaType text/html
130    * @response.representation.200.example Example is availabe on HICC UI
131    */
132   @GET
133   @Path("quicklinks")
134   @Produces(MediaType.TEXT_HTML)
135   public String quicklinks() {
136     VelocityContext context = new VelocityContext();
137     StringWriter sw = null;
138     Configuration hconf = HBaseConfiguration.create();
139     Configuration hadoop = new Configuration();
140     String nn = "";
141     String rm = "";
142     String hm = "";
143     Set<String> sourceNames = ChukwaHBaseStore.getSourceNames("");
144     for (String source : sourceNames) {
145       String[] sourceParts = source.split(":");
146       if(sourceParts.length<2) {
147         continue;
148       }
149       if(sourceParts[1].equals("NameNode")) {
150         String[] parts = hadoop.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY).split(":");
151         StringBuilder buffer = new StringBuilder();
152         try {
153           InetAddress address = InetAddress.getByName(sourceParts[0]);
154           buffer.append(address.getHostAddress());
155         } catch (Exception e) {
156           buffer.append(sourceParts[0]);
157         } 
158         buffer.append(":");
159         buffer.append(parts[1]);
160         nn = buffer.toString();
161       } else if(sourceParts[1].equals("ResourceManager")) {
162         String[] parts = hadoop.get(YarnConfiguration.RM_WEBAPP_ADDRESS).split(":");
163         StringBuilder buffer = new StringBuilder();
164         try {
165           InetAddress address = InetAddress.getByName(sourceParts[0]);
166           buffer.append(address.getHostAddress());
167         } catch (Exception e) {
168           buffer.append(sourceParts[0]);
169         } 
170         buffer.append(":");
171         buffer.append(parts[1]);
172         rm = buffer.toString();
173       } else if(sourceParts[1].equals("Master")) {
174         StringBuilder buffer = new StringBuilder();
175         try {
176           InetAddress address = InetAddress.getByName(sourceParts[0]);
177           buffer.append(address.getHostAddress());
178         } catch (Exception e) {
179           buffer.append(sourceParts[0]);
180         } 
181         buffer.append(":");
182         buffer.append(hconf.getInt("hbase.master.info.port", HConstants.DEFAULT_MASTER_INFOPORT));
183         hm = buffer.toString();
184       }
185     }
186     try {
187       context.put("nn", nn);
188       context.put("rm", rm);
189       context.put("hm", hm);
190       Template template = velocity.getTemplate("quick-links.vm");
191       sw = new StringWriter();
192       template.merge(context, sw);
193     } catch (Exception e) {
194       e.printStackTrace();
195       return e.getMessage();
196     }
197     return sw.toString();
198   }
199 }