这里有一些不错的答案,但没有一个可以让我运行cron作业以便我可以每天进行汇总。我们有journald日志文件每天写入HDFS,我不想每天在工作时都在Hive中运行查询。
我最终采取的做法对我来说似乎更直接。我编写了一个Java程序,使用ORC库扫描目录中的所有文件并创建一个文件列表List。然后打开一个新的Writer,它是“组合”文件(以“.”开头,因此Hive无法看到,否则将失败)。然后程序打开列表中的每个文件,读取内容并写入组合文件。读取完所有文件后,程序会删除这些文件。我还添加了按需要一次处理一个目录的功能。
注意:您需要一个模式文件。Journald日志可以以JSON格式输出“journalctl -o json”,然后可以使用Apache ORC工具生成模式文件,或者手动编写一个。由ORC自动生成的模式很好,但手动编写的模式始终更好。
注意:要原样使用此代码,您需要一个有效的keytab并在类路径中添加-Dkeytab=。
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.orc.OrcFile;
import org.apache.orc.Reader;
import org.apache.orc.RecordReader;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
import com.cloudera.org.joda.time.LocalDate;
public class OrcFileRollUp {
private final static String SCHEMA = "journald.schema";
private final static String UTF_8 = "UTF-8";
private final static String HDFS_BASE_LOGS_DIR = "/<baseDir>/logs";
private static final String keytabLocation = System.getProperty("keytab");
private static final String kerberosUser = "<userName>";
private static Writer writer;
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
InetAddress myHost = InetAddress.getLocalHost();
String kerberosPrincipal = String.format("%s/%s", kerberosUser, myHost.getHostName());
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, keytabLocation);
int currentDay = LocalDate.now().getDayOfMonth();
int currentMonth = LocalDate.now().getMonthOfYear();
int currentYear = LocalDate.now().getYear();
Path path = new Path(HDFS_BASE_LOGS_DIR);
FileSystem fileSystem = path.getFileSystem(conf);
System.out.println("The URI is: " + fileSystem.getUri());
List<String> allHostsPath = getHosts(path, fileSystem);
TypeDescription schema = TypeDescription.fromString(getSchema(SCHEMA)
.replaceAll("\n", ""));
for(int i = 0; i < allHostsPath.size(); i++) {
String outFile = "." + currentYear + "_" + currentMonth + "_" + currentDay + ".orc.working";
String directory = "";
Path outFilePath;
Path argsPath;
List<String> orcFiles;
if(args.length == 0) {
directory = currentYear + "/" + currentMonth + "/" + currentDay;
outFilePath = new Path(allHostsPath.get(i) + "/" + directory + "/" + outFile);
try {
orcFiles = getAllFilePath(new Path(allHostsPath.get(i) + "/" + directory), fileSystem);
} catch (Exception e) {
continue;
}
} else {
outFilePath = new Path(args[0] + "/" + outFile);
argsPath = new Path(args[0]);
try {
orcFiles = getAllFilePath(argsPath, fileSystem);
} catch (Exception e) {
continue;
}
}
FileSystem fs = outFilePath.getFileSystem(conf);
if(fs.exists(outFilePath)) {
System.out.println(outFilePath + " exists, delete before continuing.");
} else {
writer = OrcFile.createWriter(outFilePath, OrcFile.writerOptions(conf)
.setSchema(schema));
}
for(int j = 0; j < orcFiles.size(); j++ ) {
Reader reader = OrcFile.createReader(new Path(orcFiles.get(j)), OrcFile.readerOptions(conf));
VectorizedRowBatch batch = reader.getSchema().createRowBatch();
RecordReader rows = reader.rows();
while (rows.nextBatch(batch)) {
if (batch != null) {
writer.addRowBatch(batch);
}
}
rows.close();
fs.delete(new Path(orcFiles.get(j)), false);
}
writer.close();
outFile = fileSystem.getFileStatus(outFilePath)
.getPath()
.getName();
if (outFile.startsWith(".")) {
outFile = outFile.substring(1);
int lastIndexOf = outFile.lastIndexOf(".working");
outFile = outFile.substring(0, lastIndexOf);
}
Path parent = outFilePath.getParent();
fileSystem.rename(outFilePath, new Path(parent, outFile));
if(args.length != 0)
break;
}
}
private static String getSchema(String resource) throws IOException {
try (InputStream input = OrcFileRollUp.class.getResourceAsStream("/" + resource)) {
return IOUtils.toString(input, UTF_8);
}
}
public static List<String> getHosts(Path filePath, FileSystem fs) throws FileNotFoundException, IOException {
List<String> hostsList = new ArrayList<String>();
FileStatus[] fileStatus = fs.listStatus(filePath);
for (FileStatus fileStat : fileStatus) {
hostsList.add(fileStat.getPath().toString());
}
return hostsList;
}
private static List<String> getAllFilePath(Path filePath, FileSystem fs) throws FileNotFoundException, IOException {
List<String> fileList = new ArrayList<String>();
FileStatus[] fileStatus = fs.listStatus(filePath);
for (FileStatus fileStat : fileStatus) {
if (fileStat.isDirectory()) {
fileList.addAll(getAllFilePath(fileStat.getPath(), fs));
} else {
fileList.add(fileStat.getPath()
.toString());
}
}
for(int i = 0; i< fileList.size(); i++) {
if(!fileList.get(i).endsWith(".orc"))
fileList.remove(i);
}
return fileList;
}
}
CONCATENATE
函数适用于外部表格吗?据我了解,不适用。 - Omar Ali