Skip to content

Commit 13350e4

Browse files
authored
Merge branch 'apache:trunk' into YARN-11764
2 parents 9708a7d + 06ff1b6 commit 13350e4

File tree

65 files changed

+1283
-1072
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

65 files changed

+1283
-1072
lines changed
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.test;
19+
20+
import org.junit.jupiter.api.extension.BeforeEachCallback;
21+
import org.junit.jupiter.api.extension.ExtensionContext;
22+
23+
/**
24+
* This is a custom JUnit5 `RegisterExtension`
25+
* we created to obtain the methond name of the executing function.
26+
*/
27+
public class TestName implements BeforeEachCallback {
28+
29+
private volatile String name;
30+
31+
@Override
32+
public void beforeEach(ExtensionContext extensionContext) throws Exception {
33+
name = extensionContext.getTestMethod().get().getName();
34+
}
35+
36+
public String getMethodName() {
37+
return this.name;
38+
}
39+
}

hadoop-project/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1589,7 +1589,7 @@
15891589
<dependency>
15901590
<groupId>com.aliyun.oss</groupId>
15911591
<artifactId>aliyun-sdk-oss</artifactId>
1592-
<version>3.13.2</version>
1592+
<version>3.18.1</version>
15931593
<exclusions>
15941594
<exclusion>
15951595
<groupId>org.apache.httpcomponents</groupId>

hadoop-tools/hadoop-aliyun/pom.xml

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,5 +165,31 @@
165165
<scope>test</scope>
166166
<type>jar</type>
167167
</dependency>
168+
169+
<dependency>
170+
<groupId>org.junit.jupiter</groupId>
171+
<artifactId>junit-jupiter-api</artifactId>
172+
<scope>test</scope>
173+
</dependency>
174+
<dependency>
175+
<groupId>org.junit.jupiter</groupId>
176+
<artifactId>junit-jupiter-engine</artifactId>
177+
<scope>test</scope>
178+
</dependency>
179+
<dependency>
180+
<groupId>org.junit.jupiter</groupId>
181+
<artifactId>junit-jupiter-params</artifactId>
182+
<scope>test</scope>
183+
</dependency>
184+
<dependency>
185+
<groupId>org.junit.platform</groupId>
186+
<artifactId>junit-platform-launcher</artifactId>
187+
<scope>test</scope>
188+
</dependency>
189+
<dependency>
190+
<groupId>org.junit.vintage</groupId>
191+
<artifactId>junit-vintage-engine</artifactId>
192+
<scope>test</scope>
193+
</dependency>
168194
</dependencies>
169195
</project>

hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileSystemStore.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@
7373
import java.util.ListIterator;
7474
import java.util.NoSuchElementException;
7575
import java.util.stream.Collectors;
76+
import com.aliyun.oss.common.comm.SignVersion;
7677

7778
import static org.apache.hadoop.fs.aliyun.oss.Constants.*;
7879

@@ -113,6 +114,16 @@ public void initialize(URI uri, Configuration conf, String user,
113114
conf.get(USER_AGENT_PREFIX, USER_AGENT_PREFIX_DEFAULT) + ", Hadoop/"
114115
+ VersionInfo.getVersion());
115116

117+
String region = conf.get(REGION_KEY, "");
118+
String signatureVersion = conf.get(SIGNATURE_VERSION_KEY, SIGNATURE_VERSION_DEFAULT);
119+
if ("V4".equalsIgnoreCase(signatureVersion)) {
120+
clientConf.setSignatureVersion(SignVersion.V4);
121+
if (StringUtils.isEmpty(region)) {
122+
LOG.error("Signature version is V4 ,but region is empty.");
123+
throw new IOException("SignVersion is V4 but region is empty");
124+
}
125+
}
126+
116127
String proxyHost = conf.getTrimmed(PROXY_HOST_KEY, "");
117128
int proxyPort = conf.getInt(PROXY_PORT_KEY, -1);
118129
if (StringUtils.isNotEmpty(proxyHost)) {
@@ -171,6 +182,11 @@ public void initialize(URI uri, Configuration conf, String user,
171182
statistics.incrementWriteOps(1);
172183
}
173184

185+
if (StringUtils.isNotEmpty(region)) {
186+
ossClient.setRegion(region);
187+
LOG.debug("ossClient setRegion {}", region);
188+
}
189+
174190
maxKeys = conf.getInt(MAX_PAGING_KEYS_KEY, MAX_PAGING_KEYS_DEFAULT);
175191
int listVersion = conf.getInt(LIST_VERSION, DEFAULT_LIST_VERSION);
176192
if (listVersion < 1 || listVersion > 2) {

hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/Constants.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,4 +211,19 @@ private Constants() {
211211
public static final String LIST_VERSION = "fs.oss.list.version";
212212

213213
public static final int DEFAULT_LIST_VERSION = 2;
214+
215+
/**
216+
* OSS signature version.
217+
*/
218+
public static final String SIGNATURE_VERSION_KEY = "fs.oss.signatureversion";
219+
220+
/**
221+
* OSS signature version DEFAULT {@value}.
222+
*/
223+
public static final String SIGNATURE_VERSION_DEFAULT = "V1";
224+
225+
/**
226+
* OSS region {@value}.
227+
*/
228+
public static final String REGION_KEY = "fs.oss.region";
214229
}
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.aliyun.oss;
20+
21+
import org.apache.hadoop.conf.Configuration;
22+
import org.apache.hadoop.fs.FileStatus;
23+
import org.apache.hadoop.fs.Path;
24+
import org.junit.Before;
25+
import org.junit.Test;
26+
import org.slf4j.Logger;
27+
import org.slf4j.LoggerFactory;
28+
29+
import java.io.IOException;
30+
import java.net.URI;
31+
32+
import static org.apache.hadoop.fs.aliyun.oss.Constants.REGION_KEY;
33+
import static org.apache.hadoop.fs.aliyun.oss.Constants.SIGNATURE_VERSION_KEY;
34+
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
35+
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
36+
import static org.junit.Assert.*;
37+
import static org.junit.Assume.assumeNotNull;
38+
39+
/**
40+
* Tests Aliyun OSS system.
41+
*/
42+
public class ITAliyunOSSSignatureV4 {
43+
private static final Logger LOG = LoggerFactory.getLogger(ITAliyunOSSSignatureV4.class);
44+
private Configuration conf;
45+
private URI testURI;
46+
private Path testFile = new Path("ITAliyunOSSSignatureV4/atestr");
47+
48+
@Before
49+
public void setUp() throws Exception {
50+
conf = new Configuration();
51+
String bucketUri = conf.get("test.fs.oss.name");
52+
LOG.debug("bucketUri={}", bucketUri);
53+
testURI = URI.create(bucketUri);
54+
}
55+
56+
@Test
57+
public void testV4() throws IOException {
58+
conf.set(SIGNATURE_VERSION_KEY, "V4");
59+
conf.set(REGION_KEY, "cn-hongkong");
60+
AliyunOSSFileSystem fs = new AliyunOSSFileSystem();
61+
fs.initialize(testURI, conf);
62+
assumeNotNull(fs);
63+
64+
createFile(fs, testFile, true, dataset(256, 0, 255));
65+
FileStatus status = fs.getFileStatus(testFile);
66+
fs.delete(testFile);
67+
fs.close();
68+
}
69+
70+
@Test
71+
public void testDefaultSignatureVersion() throws IOException {
72+
AliyunOSSFileSystem fs = new AliyunOSSFileSystem();
73+
fs.initialize(testURI, conf);
74+
assumeNotNull(fs);
75+
76+
Path testFile2 = new Path("/test/atestr");
77+
createFile(fs, testFile2, true, dataset(256, 0, 255));
78+
FileStatus status = fs.getFileStatus(testFile2);
79+
fs.delete(testFile2);
80+
fs.close();
81+
}
82+
83+
@Test
84+
public void testV4WithoutRegion() throws IOException {
85+
conf.set(SIGNATURE_VERSION_KEY, "V4");
86+
AliyunOSSFileSystem fs = new AliyunOSSFileSystem();
87+
IOException expectedException = null;
88+
try {
89+
fs.initialize(testURI, conf);
90+
} catch (IOException e) {
91+
LOG.warn("use V4 , but do not set region, get exception={}", e);
92+
expectedException = e;
93+
assertEquals("use V4 , but do not set region", e.getMessage(),
94+
"SignVersion is V4 but region is empty");
95+
}
96+
assertNotNull(expectedException);
97+
}
98+
}

hadoop-tools/hadoop-aliyun/src/test/resources/log4j.properties

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,3 +21,6 @@ log4j.threshold=ALL
2121
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
2222
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
2323
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
24+
25+
# Log all oss classes
26+
log4j.logger.org.apache.hadoop.fs.aliyun.oss=DEBUG

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/AzureServiceErrorCode.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,8 @@ public enum AzureServiceErrorCode {
6565
COPY_BLOB_ABORTED("CopyBlobAborted", HttpURLConnection.HTTP_INTERNAL_ERROR, null),
6666
BLOB_OPERATION_NOT_SUPPORTED("BlobOperationNotSupported", HttpURLConnection.HTTP_CONFLICT, null),
6767
INVALID_APPEND_OPERATION("InvalidAppendOperation", HttpURLConnection.HTTP_CONFLICT, null),
68+
UNAUTHORIZED_BLOB_OVERWRITE("UnauthorizedBlobOverwrite", HttpURLConnection.HTTP_FORBIDDEN,
69+
"This request is not authorized to perform blob overwrites."),
6870
UNKNOWN(null, -1, null);
6971

7072
private final String errorCode;

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsDfsClient.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242

4343
import org.apache.hadoop.fs.FileSystem;
4444
import org.apache.hadoop.fs.Path;
45+
import org.apache.hadoop.fs.FileAlreadyExistsException;
4546
import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
4647
import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore;
4748
import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
@@ -132,6 +133,8 @@
132133
import static org.apache.hadoop.fs.azurebfs.constants.HttpQueryParams.QUERY_PARAM_RETAIN_UNCOMMITTED_DATA;
133134
import static org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode.RENAME_DESTINATION_PARENT_PATH_NOT_FOUND;
134135
import static org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode.SOURCE_PATH_NOT_FOUND;
136+
import static org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode.UNAUTHORIZED_BLOB_OVERWRITE;
137+
import static org.apache.hadoop.fs.azurebfs.services.AbfsErrors.ERR_FILE_ALREADY_EXISTS;
135138

136139
/**
137140
* AbfsClient interacting with the DFS Endpoint.
@@ -702,6 +705,14 @@ public AbfsClientRenameResult renamePath(
702705
throw e;
703706
}
704707

708+
// ref: HADOOP-19393. Write permission checks can occur before validating
709+
// rename operation's validity. If there is an existing destination path, it may be rejected
710+
// with an authorization error. Catching and throwing FileAlreadyExistsException instead.
711+
if (op.getResult().getStorageErrorCode()
712+
.equals(UNAUTHORIZED_BLOB_OVERWRITE.getErrorCode())){
713+
throw new FileAlreadyExistsException(ERR_FILE_ALREADY_EXISTS);
714+
}
715+
705716
// ref: HADOOP-18242. Rename failure occurring due to a rare case of
706717
// tracking metadata being in incomplete state.
707718
if (op.getResult().getStorageErrorCode()

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
@InterfaceAudience.Public
2929
@InterfaceStability.Evolving
3030
public final class AbfsErrors {
31+
public static final String ERR_FILE_ALREADY_EXISTS = "File already exists.";
3132
public static final String ERR_WRITE_WITHOUT_LEASE = "Attempted to write to file without lease";
3233
public static final String ERR_LEASE_EXPIRED = "A lease ID was specified, but the lease for the resource has expired.";
3334
public static final String ERR_LEASE_EXPIRED_BLOB = "A lease ID was specified, but the lease for the blob has expired.";

0 commit comments

Comments
 (0)