Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,14 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.HashSet;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
Expand All @@ -48,6 +54,20 @@ public class ConfBlock extends HtmlBlock {
appContext = appctx;
}

/**
* To URLDecode the string value for URLEncoded data.
* @param value string data to be decoded
* @return value data after decoded
* @throws UnsupportedEncodingException if empty string or unsupported enc parameter.
*/
private String urlDecode(String value){
try {
return URLDecoder.decode(value, "UTF-8");
} catch (UnsupportedEncodingException e) {
return value;
}
}

/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
Expand Down Expand Up @@ -95,12 +115,22 @@ public class ConfBlock extends HtmlBlock {
first = false;
buffer.append(sources[i]);
}
Configuration conf = appContext.getJob(jobID).loadConfFile();
String decodeStrings = conf.getTrimmed(MRJobConfig.MR_DECODE_CONFIGS, "");
Set<String> decodeConfigs = new HashSet<>();
for (String config : decodeStrings.split(",")) {
decodeConfigs.add(config.trim());
}
String value = entry.getValue();
if(decodeConfigs.contains(entry.getName())){
value = urlDecode(value);
}
tbody.
tr().
td(entry.getName()).
td(entry.getValue()).
td(buffer.toString()).
__();
td(entry.getName()).
td(value).
td(buffer.toString()).
__();
}
tbody.__().
tfoot().
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Map;
import java.net.URLDecoder;

import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.util.MRJobConfUtil;
Expand Down Expand Up @@ -70,12 +71,16 @@ public void testConfigurationBlock() throws Exception {
final String redactedProp = "Key for redaction";
configuration.set(MRJobConfig.MR_JOB_REDACTED_PROPERTIES,
redactedProp);
final String testQueryStringEncoded = "insert+overwrite+table" +
"+test_hive_query_string+values%282%2C1%29";
final String decodeKey = "hive.query.string";
configuration.set("hive.query.string", testQueryStringEncoded);
configuration.set(MRJobConfig.MR_DECODE_CONFIGS, decodeKey);

when(job.getConfFile()).thenReturn(path);
when(job.loadConfFile()).thenReturn(configuration);

when(ctx.getJob(any(JobId.class))).thenReturn(job);


ConfBlockForTest configurationBlock = new ConfBlockForTest(ctx);
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
Expand All @@ -94,6 +99,8 @@ public void testConfigurationBlock() throws Exception {
assertTrue(data.toString().contains(redactedProp));
assertTrue(data.toString().contains(
MRJobConfUtil.REDACTION_REPLACEMENT_VAL));
assertTrue(data.toString().contains("hive.query.string"));
assertTrue(data.toString().contains(URLDecoder.decode(testQueryStringEncoded, "UTF-8")));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1316,4 +1316,5 @@ public interface MRJobConfig {
String INPUT_FILE_MANDATORY_PREFIX = "mapreduce.job.input.file.must.";
String SHUFFLE_KEY_LENGTH = "mapreduce.shuffle-key-length";
int DEFAULT_SHUFFLE_KEY_LENGTH = 64;
String MR_DECODE_CONFIGS = "mapreduce.job.decode.configs";
}
Original file line number Diff line number Diff line change
Expand Up @@ -2282,4 +2282,12 @@
</description>
</property>

<property>
<name>mapreduce.job.decode.configs</name>
<value>hive.query.string</value>
<description>
Some Configurations in job is encoded,which should be decoded before rendered.
</description>
</property>

</configuration>