This project has retired. For details please refer to its
Attic page.
ServletDiagnostics xref
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.chukwa.datacollection.collector.servlet;
19
20
21 import java.io.PrintStream;
22 import org.apache.hadoop.chukwa.ChunkImpl;
23 import org.apache.log4j.Logger;
24 import java.util.*;
25
26
27
28
29 @Deprecated
30 public class ServletDiagnostics {
31
32 static Logger log = Logger.getLogger(ServletDiagnostics.class);
33
34 static int CHUNKS_TO_KEEP = 50;
35 static int CHUNKS_TO_DISPLAY = 50;
36
37 private static class PostStats {
38 public PostStats(String src, int count, long receivedTs) {
39 this.count = count;
40 this.src = src;
41 this.receivedTs = receivedTs;
42 types = new String[count];
43 names = new String[count];
44 lengths = new int[count];
45
46 seenChunkCount = 0;
47 dataSize = 0;
48 }
49
50 final int count;
51 final String src;
52 final long receivedTs;
53 final String[] types, names;
54 final int[] lengths;
55
56 int seenChunkCount;
57 long dataSize;
58
59 public void addChunk(ChunkImpl c, int position) {
60 if (position != seenChunkCount)
61 log.warn("servlet collector is passing chunk " + position
62 + " but diagnostics has seen" + seenChunkCount);
63 else if (seenChunkCount >= count) {
64 log.warn("too many chunks in post declared as length " + count);
65 } else {
66 types[seenChunkCount] = c.getDataType();
67 lengths[seenChunkCount] = c.getData().length;
68 names[seenChunkCount] = c.getStreamName();
69 dataSize += c.getData().length;
70 ++seenChunkCount;
71 }
72 }
73 }
74
75 static {
76 lastPosts = new LinkedList<PostStats>();
77 }
78
79 static LinkedList<PostStats> lastPosts;
80 PostStats curPost;
81
82 public void sawPost(String source, int chunks, long receivedTs) {
83 if (curPost != null) {
84 log.warn("should only have one HTTP post per ServletDiagnostics");
85 doneWithPost();
86 }
87 curPost = new PostStats(source, chunks, receivedTs);
88 }
89
90 public void sawChunk(ChunkImpl c, int pos) {
91 curPost.addChunk(c, pos);
92 }
93
94 public static void printPage(PrintStream out) {
95
96 HashMap<String, Long> bytesFromHost = new HashMap<String, Long>();
97 long timeWindowOfSample = Long.MAX_VALUE;
98 long now = System.currentTimeMillis();
99
100 out.println("<ul>");
101
102 synchronized (lastPosts) {
103 int toSkip = lastPosts.size() - CHUNKS_TO_DISPLAY;
104
105 if (!lastPosts.isEmpty())
106 timeWindowOfSample = now - lastPosts.peek().receivedTs;
107
108 for (PostStats stats : lastPosts) {
109 Long oldBytes = bytesFromHost.get(stats.src);
110 long newBytes = stats.dataSize;
111 if (oldBytes != null)
112 newBytes += oldBytes;
113 bytesFromHost.put(stats.src, newBytes);
114
115 if (--toSkip < 0) {
116 out.print("<li>");
117
118 out.print(stats.dataSize + " bytes from " + stats.src
119 + " at timestamp " + stats.receivedTs);
120 out.println(" which was " + ((now - stats.receivedTs) / 1000)
121 + " seconds ago");
122
123 out.println("<ol>");
124 for (int i = 0; i < stats.count; ++i)
125 out.println("<li> " + stats.lengths[i] + " bytes of type "
126 + stats.types[i] + ". Adaptor name =" + stats.names[i]
127 + " </li>");
128 out.println("</ol></li>");
129 }
130 }
131 }
132 out.println("</ul>");
133 out.println("<ul>");
134 for (Map.Entry<String, Long> h : bytesFromHost.entrySet()) {
135 out.print("<li>rate from " + h.getKey() + " was "
136 + (1000 * h.getValue() / timeWindowOfSample));
137 out.println(" bytes/second in last " + timeWindowOfSample / 1000
138 + " seconds.</li>");
139 }
140
141 out.println("</ul>");
142 out.println("total of " + bytesFromHost.size() + " unique hosts seen");
143
144 out.println("<p>current time is " + System.currentTimeMillis() + " </p>");
145 }
146
147 public void doneWithPost() {
148 synchronized (lastPosts) {
149 if (lastPosts.size() > CHUNKS_TO_KEEP)
150 lastPosts.removeFirst();
151 lastPosts.add(curPost);
152 }
153 }
154
155 }