1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.backup.mapreduce;
19
20 import java.io.IOException;
21
22 import org.apache.commons.lang.StringUtils;
23 import org.apache.commons.logging.Log;
24 import org.apache.commons.logging.LogFactory;
25 import org.apache.hadoop.conf.Configuration;
26 import org.apache.hadoop.fs.FileSystem;
27 import org.apache.hadoop.fs.Path;
28 import org.apache.hadoop.hbase.HConstants;
29 import org.apache.hadoop.hbase.TableName;
30 import org.apache.hadoop.hbase.backup.IncrementalRestoreService;
31 import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
32 import org.apache.hadoop.hbase.classification.InterfaceAudience;
33 import org.apache.hadoop.hbase.classification.InterfaceStability;
34 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
35 import org.apache.hadoop.hbase.mapreduce.WALPlayer;
36 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
37
38 @InterfaceAudience.Private
39 @InterfaceStability.Evolving
40 public class MapReduceRestoreService implements IncrementalRestoreService {
41 public static final Log LOG = LogFactory.getLog(MapReduceRestoreService.class);
42
43 private WALPlayer player;
44
45 public MapReduceRestoreService() {
46 this.player = new WALPlayer();
47 }
48
49 @Override
50 public void run(Path[] logDirPaths, TableName[] tableNames, TableName[] newTableNames)
51 throws IOException {
52
53
54
55 String logDirs = StringUtils.join(logDirPaths, ",");
56 LOG.info("Restore incremental backup from directory " + logDirs + " from hbase tables "
57 + BackupServerUtil.join(tableNames) + " to tables " + BackupServerUtil.join(newTableNames));
58
59 for (int i = 0; i < tableNames.length; i++) {
60
61 LOG.info("Restore "+ tableNames[i] + " into "+ newTableNames[i]);
62
63 Path bulkOutputPath = getBulkOutputDir(getFileNameCompatibleString(newTableNames[i]));
64 String[] playerArgs =
65 { logDirs, tableNames[i].getNameAsString(), newTableNames[i].getNameAsString()};
66
67 int result = 0;
68 int loaderResult = 0;
69 try {
70 Configuration conf = getConf();
71 conf.set(WALPlayer.BULK_OUTPUT_CONF_KEY, bulkOutputPath.toString());
72 player.setConf(getConf());
73 result = player.run(playerArgs);
74 if (succeeded(result)) {
75
76 LoadIncrementalHFiles loader = createLoader();
77 if (LOG.isDebugEnabled()) {
78 LOG.debug("Restoring HFiles from directory " + bulkOutputPath);
79 }
80 String[] args = { bulkOutputPath.toString(), newTableNames[i].getNameAsString() };
81 loaderResult = loader.run(args);
82 if(failed(loaderResult)) {
83 throw new IOException("Can not restore from backup directory " + logDirs
84 + " (check Hadoop and HBase logs). Bulk loader return code =" + loaderResult);
85 }
86 } else {
87 throw new IOException("Can not restore from backup directory " + logDirs
88 + " (check Hadoop/MR and HBase logs). WALPlayer return code =" + result);
89 }
90 LOG.debug("Restore Job finished:" + result);
91 } catch (Exception e) {
92 throw new IOException("Can not restore from backup directory " + logDirs
93 + " (check Hadoop and HBase logs) ", e);
94 }
95
96 }
97 }
98
99 private String getFileNameCompatibleString(TableName table)
100 {
101 return table.getNamespaceAsString() +"-"+ table.getQualifierAsString();
102 }
103
104 private boolean failed(int result) {
105 return result != 0;
106 }
107
108 private boolean succeeded(int result) {
109 return result == 0;
110 }
111
112 private LoadIncrementalHFiles createLoader()
113 throws IOException {
114
115
116
117
118 Integer milliSecInHour = 3600000;
119 Configuration conf = new Configuration(getConf());
120 conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, milliSecInHour);
121
122
123
124 conf.setInt(LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY, Integer.MAX_VALUE);
125 LoadIncrementalHFiles loader = null;
126 try {
127 loader = new LoadIncrementalHFiles(conf);
128 } catch (Exception e) {
129 throw new IOException(e);
130 }
131 return loader;
132 }
133
134 private Path getBulkOutputDir(String tableName) throws IOException
135 {
136 Configuration conf = getConf();
137 FileSystem fs = FileSystem.get(conf);
138 String tmp = conf.get("hbase.tmp.dir");
139 Path path = new Path(tmp + Path.SEPARATOR + "bulk_output-"+tableName + "-"
140 + EnvironmentEdgeManager.currentTime());
141 fs.deleteOnExit(path);
142 return path;
143 }
144
145
146 @Override
147 public Configuration getConf() {
148 return player.getConf();
149 }
150
151 @Override
152 public void setConf(Configuration conf) {
153 this.player.setConf(conf);
154 }
155
156 }