This project has retired. For details please refer to its Attic page.
ServletDiagnostics xref
View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.chukwa.datacollection.collector.servlet;
19  
20  
21  import java.io.PrintStream;
22  import org.apache.hadoop.chukwa.ChunkImpl;
23  import org.apache.log4j.Logger;
24  import java.util.*;
25  
26  /**
27   * One per post
28   */
29  @Deprecated
30  public class ServletDiagnostics {
31  
32    static Logger log = Logger.getLogger(ServletDiagnostics.class);
33  
34    static int CHUNKS_TO_KEEP = 50;
35    static int CHUNKS_TO_DISPLAY = 50;
36  
37    private static class PostStats { // statistics about a chunk
38      public PostStats(String src, int count, long receivedTs) {
39        this.count = count;
40        this.src = src;
41        this.receivedTs = receivedTs;
42        types = new String[count];
43        names = new String[count];
44        lengths = new int[count];
45  
46        seenChunkCount = 0;
47        dataSize = 0;
48      }
49  
50      final int count;
51      final String src;
52      final long receivedTs;
53      final String[] types, names;
54      final int[] lengths;
55  
56      int seenChunkCount;
57      long dataSize;
58  
59      public void addChunk(ChunkImpl c, int position) {
60        if (position != seenChunkCount)
61          log.warn("servlet collector is passing chunk " + position
62              + " but diagnostics has seen" + seenChunkCount);
63        else if (seenChunkCount >= count) {
64          log.warn("too many chunks in post declared as length " + count);
65        } else {
66          types[seenChunkCount] = c.getDataType();
67          lengths[seenChunkCount] = c.getData().length;
68          names[seenChunkCount] = c.getStreamName();
69          dataSize += c.getData().length;
70          ++seenChunkCount;
71        }
72      }
73    }
74  
75    static {
76      lastPosts = new LinkedList<PostStats>();
77    }
78  
79    static LinkedList<PostStats> lastPosts;
80    PostStats curPost;
81  
82    public void sawPost(String source, int chunks, long receivedTs) {
83      if (curPost != null) {
84        log.warn("should only have one HTTP post per ServletDiagnostics");
85        doneWithPost();
86      }
87      curPost = new PostStats(source, chunks, receivedTs);
88    }
89  
90    public void sawChunk(ChunkImpl c, int pos) {
91      curPost.addChunk(c, pos);
92    }
93  
94    public static void printPage(PrintStream out) {
95  
96      HashMap<String, Long> bytesFromHost = new HashMap<String, Long>();
97      long timeWindowOfSample = Long.MAX_VALUE;
98      long now = System.currentTimeMillis();
99  
100     out.println("<ul>");
101 
102     synchronized (lastPosts) {
103       int toSkip = lastPosts.size() - CHUNKS_TO_DISPLAY;
104 
105       if (!lastPosts.isEmpty())
106         timeWindowOfSample = now - lastPosts.peek().receivedTs;
107 
108       for (PostStats stats : lastPosts) {
109         Long oldBytes = bytesFromHost.get(stats.src);
110         long newBytes = stats.dataSize;
111         if (oldBytes != null)
112           newBytes += oldBytes;
113         bytesFromHost.put(stats.src, newBytes);
114 
115         if (--toSkip < 0) { // done skipping
116           out.print("<li>");
117 
118           out.print(stats.dataSize + " bytes from " + stats.src
119               + " at timestamp " + stats.receivedTs);
120           out.println(" which was " + ((now - stats.receivedTs) / 1000)
121               + " seconds ago");
122 
123           out.println("<ol>");
124           for (int i = 0; i < stats.count; ++i)
125             out.println("<li> " + stats.lengths[i] + " bytes of type "
126                 + stats.types[i] + ".  Adaptor name =" + stats.names[i]
127                 + " </li>");
128           out.println("</ol></li>");
129         }
130       }
131     }
132     out.println("</ul>");
133     out.println("<ul>");
134     for (Map.Entry<String, Long> h : bytesFromHost.entrySet()) {
135       out.print("<li>rate from " + h.getKey() + " was "
136           + (1000 * h.getValue() / timeWindowOfSample));
137       out.println(" bytes/second in last " + timeWindowOfSample / 1000
138           + " seconds.</li>");
139     }
140 
141     out.println("</ul>");
142     out.println("total of " + bytesFromHost.size() + " unique hosts seen");
143 
144     out.println("<p>current time is " + System.currentTimeMillis() + " </p>");
145   }
146 
147   public void doneWithPost() {
148     synchronized (lastPosts) {
149       if (lastPosts.size() > CHUNKS_TO_KEEP)
150         lastPosts.removeFirst();
151       lastPosts.add(curPost);
152     }
153   }
154 
155 }