This project has retired. For details please refer to its Attic page.
TempFileUtil xref
View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.chukwa.util;
20  
21  
22  import java.io.*;
23  import java.nio.charset.Charset;
24  import java.util.Calendar;
25  import java.util.Random;
26  
27  import org.apache.hadoop.chukwa.ChukwaArchiveKey;
28  import org.apache.hadoop.chukwa.ChunkImpl;
29  import org.apache.hadoop.conf.Configuration;
30  import org.apache.hadoop.fs.FSDataOutputStream;
31  import org.apache.hadoop.fs.FileSystem;
32  import org.apache.hadoop.fs.Path;
33  import org.apache.hadoop.io.SequenceFile;
34  
35  public class TempFileUtil {
36    public static File makeBinary(int length) throws IOException {
37      File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),
38          "chukwaTest");
39      FileOutputStream fos = null;
40      try {
41        fos = new FileOutputStream(tmpOutput);
42        Random r = new Random();
43        byte[] randomData = new byte[length];
44        r.nextBytes(randomData);
45        randomData[length - 1] = '\n';// need data to end with \n since default
46                                    // tailer uses that
47        fos.write(randomData);
48        fos.flush();
49      } finally {
50        if(fos != null) {
51          fos.close();
52        }
53      }
54      return tmpOutput;
55    }
56    
57  
58    static class RandSeqFileWriter {
59      java.util.Random r = new java.util.Random();
60      long lastSeqID = 0;
61      
62      public ChunkImpl getARandomChunk() {
63        int ms = r.nextInt(1000);
64        String line = "2008-05-29 10:42:22," + ms
65            + " INFO org.apache.hadoop.dfs.DataNode: Some text goes here"
66            + r.nextInt() + "\n";
67         
68        ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test",
69        line.length()  + lastSeqID, line.getBytes(Charset.forName("UTF-8")), null);
70        lastSeqID += line.length();
71        c.addTag("cluster=\"foocluster\"");
72        return c;
73      }
74    }
75    
76    public static void writeASinkFile(Configuration conf, FileSystem fileSys, Path dest,
77         int chunks) throws IOException {
78       FSDataOutputStream out = fileSys.create(dest);
79  
80       Calendar calendar = Calendar.getInstance();
81       SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(conf, out,
82           ChukwaArchiveKey.class, ChunkImpl.class,
83           SequenceFile.CompressionType.NONE, null);
84       RandSeqFileWriter rw = new RandSeqFileWriter();
85       for (int i = 0; i < chunks; ++i) {
86         ChunkImpl chunk = rw.getARandomChunk();
87         ChukwaArchiveKey archiveKey = new ChukwaArchiveKey();
88  
89         calendar.set(Calendar.YEAR, 2008);
90         calendar.set(Calendar.MONTH, Calendar.MAY);
91         calendar.set(Calendar.DAY_OF_MONTH, 29);
92         calendar.set(Calendar.HOUR, 10);
93         calendar.set(Calendar.MINUTE, 0);
94         calendar.set(Calendar.SECOND, 0);
95         calendar.set(Calendar.MILLISECOND, 0);
96         archiveKey.setTimePartition(calendar.getTimeInMillis());
97         archiveKey.setDataType(chunk.getDataType());
98         archiveKey.setStreamName(chunk.getStreamName());
99         archiveKey.setSeqId(chunk.getSeqID());
100        seqFileWriter.append(archiveKey, chunk);
101      }
102      seqFileWriter.close();
103      out.close();
104    }
105    
106 
107    public static File makeTestFile(String name, int size,File baseDir) throws IOException {
108      File tmpOutput = new File(baseDir, name);
109      FileOutputStream fos = new FileOutputStream(tmpOutput);
110      PrintWriter pw = new PrintWriter(new OutputStreamWriter(fos, Charset.forName("UTF-8")));
111      for (int i = 0; i < size; ++i) {
112        pw.print(i + " ");
113        pw.println("abcdefghijklmnopqrstuvwxyz");
114      }
115      pw.flush();
116      pw.close();
117      return tmpOutput;
118    }
119    
120 
121    public static File makeTestFile(String name, int size) throws IOException {
122      return makeTestFile(name, size, new File(System.getProperty("test.build.data", "/tmp")));
123 
124    }
125    
126    public static File makeTestFile(File baseDir) throws IOException {
127      return makeTestFile("atemp",10, baseDir);
128    }
129    
130 
131    public static File makeTestFile() throws IOException {
132      return makeTestFile("atemp",80, new File(System.getProperty("test.build.data", "/tmp")));
133    }
134   
135 }