This project has retired. For details please refer to its Attic page.
DashboardController xref
View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.chukwa.hicc.rest;
19  
20  import java.io.StringWriter;
21  import java.util.Set;
22  import java.net.InetAddress;
23  
24  import javax.servlet.ServletContext;
25  import javax.servlet.http.HttpServletRequest;
26  import javax.ws.rs.Consumes;
27  import javax.ws.rs.GET;
28  import javax.ws.rs.PUT;
29  import javax.ws.rs.Path;
30  import javax.ws.rs.PathParam;
31  import javax.ws.rs.Produces;
32  import javax.ws.rs.core.Context;
33  import javax.ws.rs.core.MediaType;
34  import javax.ws.rs.core.Response;
35  import javax.ws.rs.core.Response.Status;
36  
37  import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
38  import org.apache.hadoop.chukwa.hicc.bean.Dashboard;
39  import org.apache.hadoop.conf.Configuration;
40  import org.apache.hadoop.hbase.HBaseConfiguration;
41  import org.apache.hadoop.hbase.HConstants;
42  import org.apache.hadoop.hdfs.DFSConfigKeys;
43  import org.apache.hadoop.yarn.conf.YarnConfiguration;
44  import org.apache.log4j.Logger;
45  import org.apache.velocity.Template;
46  import org.apache.velocity.VelocityContext;
47  import org.apache.velocity.app.VelocityEngine;
48  
49  import com.google.gson.Gson;
50  
51  @Path("dashboard")
52  public class DashboardController {
53    static Logger LOG = Logger.getLogger(DashboardController.class);
54  
55    @Context
56    private ServletContext context;
57    @Context
58    VelocityEngine velocity;
59  
60    
61    @GET
62    @Path("load/{id}")
63    public String load(@Context HttpServletRequest request, @PathParam("id") String id) {
64      Gson gson = new Gson();
65      Dashboard dash = ChukwaHBaseStore.getDashboard(id, request.getRemoteUser());
66      String json = gson.toJson(dash);
67      return json;
68    }
69    
70    @PUT
71    @Path("save/{id}")
72    @Consumes(MediaType.APPLICATION_JSON)
73    public Response save(@Context HttpServletRequest request, @PathParam("id") String id, String buffer) {
74      Gson gson = new Gson();
75      Dashboard dash = gson.fromJson(buffer, Dashboard.class);
76      boolean result = ChukwaHBaseStore.updateDashboard(id, request.getRemoteUser(), dash);
77      if(!result) {
78        return Response.status(Status.BAD_REQUEST).build();
79      }
80      return Response.ok().build();
81    }
82    
83    @GET
84    @Path("whoami")
85    public String whoami(@Context HttpServletRequest request) {
86      return request.getRemoteUser();
87    }
88    
89    @GET
90    @Path("quicklinks")
91    @Produces(MediaType.TEXT_HTML)
92    public String quicklinks() {
93      VelocityContext context = new VelocityContext();
94      StringWriter sw = null;
95      Configuration hconf = HBaseConfiguration.create();
96      Configuration hadoop = new Configuration();
97      String nn = "";
98      String rm = "";
99      String hm = "";
100     Set<String> sourceNames = ChukwaHBaseStore.getSourceNames("");
101     for (String source : sourceNames) {
102       String[] sourceParts = source.split(":");
103       if(sourceParts.length<2) {
104         continue;
105       }
106       if(sourceParts[1].equals("NameNode")) {
107         String[] parts = hadoop.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY).split(":");
108         StringBuilder buffer = new StringBuilder();
109         try {
110           InetAddress address = InetAddress.getByName(sourceParts[0]);
111           buffer.append(address.getHostAddress());
112         } catch (Exception e) {
113           buffer.append(sourceParts[0]);
114         } 
115         buffer.append(":");
116         buffer.append(parts[1]);
117         nn = buffer.toString();
118       } else if(sourceParts[1].equals("ResourceManager")) {
119         String[] parts = hadoop.get(YarnConfiguration.RM_WEBAPP_ADDRESS).split(":");
120         StringBuilder buffer = new StringBuilder();
121         try {
122           InetAddress address = InetAddress.getByName(sourceParts[0]);
123           buffer.append(address.getHostAddress());
124         } catch (Exception e) {
125           buffer.append(sourceParts[0]);
126         } 
127         buffer.append(":");
128         buffer.append(parts[1]);
129         rm = buffer.toString();
130       } else if(sourceParts[1].equals("Master")) {
131         StringBuilder buffer = new StringBuilder();
132         try {
133           InetAddress address = InetAddress.getByName(sourceParts[0]);
134           buffer.append(address.getHostAddress());
135         } catch (Exception e) {
136           buffer.append(sourceParts[0]);
137         } 
138         buffer.append(":");
139         buffer.append(hconf.getInt("hbase.master.info.port", HConstants.DEFAULT_MASTER_INFOPORT));
140         hm = buffer.toString();
141       }
142     }
143     try {
144       context.put("nn", nn);
145       context.put("rm", rm);
146       context.put("hm", hm);
147       Template template = velocity.getTemplate("quick-links.vm");
148       sw = new StringWriter();
149       template.merge(context, sw);
150     } catch (Exception e) {
151       e.printStackTrace();
152       return e.getMessage();
153     }
154     return sw.toString();
155   }
156 }