From 1f1e4eac63a972e7a5cfc0da55fcfd135ad81d74 Mon Sep 17 00:00:00 2001 From: LiHS Date: Thu, 12 May 2022 15:47:49 +0800 Subject: [PATCH 1/2] fix resume upload v2 can't upload without key --- src/Qiniu/Storage/ResumableUploader.cs | 18 +-- src/Qiniu/Storage/ResumeBlocker.cs | 2 +- src/Qiniu/Util/Base64.cs | 4 +- .../Storage/ResumableUploaderTests.cs | 122 ++++++++++++++++++ 4 files changed, 134 insertions(+), 12 deletions(-) diff --git a/src/Qiniu/Storage/ResumableUploader.cs b/src/Qiniu/Storage/ResumableUploader.cs index f000b69..b62c46d 100644 --- a/src/Qiniu/Storage/ResumableUploader.cs +++ b/src/Qiniu/Storage/ResumableUploader.cs @@ -79,10 +79,10 @@ public HttpResult UploadFile(string localFile, string key, string token, PutExtr public HttpResult UploadStream(Stream stream, string key, string upToken, PutExtra putExtra) { HttpResult result = new HttpResult(); - string encodedObjectName = ""; - if (putExtra != null && putExtra.Version == "v2") + string encodedObjectNameForV2 = "~"; + if (putExtra != null && putExtra.Version == "v2" && key != null) { - encodedObjectName = Base64.GetEncodedObjectName(key); + encodedObjectNameForV2 = Base64.UrlSafeBase64Encode(key); } //check put extra @@ -144,7 +144,7 @@ public HttpResult UploadStream(Stream stream, string key, string upToken, PutExt } else if (putExtra.Version == "v2") { - HttpResult res = initReq(encodedObjectName, upToken); + HttpResult res = initReq(encodedObjectNameForV2, upToken); Dictionary responseBody = JsonConvert.DeserializeObject>(res.Text); if (res.Code != 200) { @@ -265,7 +265,7 @@ public HttpResult UploadStream(Stream stream, string key, string upToken, PutExt { processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, - encodedObjectName); + encodedObjectNameForV2); //check mkblk results foreach (int blkIndex in blockMakeResults.Keys) { @@ -290,7 +290,7 @@ public HttpResult UploadStream(Stream stream, string key, string upToken, PutExt if (blockDataDict.Count > 0) { processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, - encodedObjectName); + encodedObjectNameForV2); //check mkblk results foreach (int blkIndex in blockMakeResults.Keys) { @@ -319,7 +319,7 @@ public HttpResult UploadStream(Stream stream, string key, string upToken, PutExt } else if (putExtra.Version == "v2") { - hr = completeParts(key, resumeInfo, key, upToken, putExtra, encodedObjectName); + hr = completeParts(key, resumeInfo, key, upToken, putExtra, encodedObjectNameForV2); } else { throw new Exception("Invalid Version, only supports v1 / v2"); } @@ -701,7 +701,7 @@ private HttpResult MakeFile(string fileName, long size, string key, string upTok } /// - /// 初始化上传任务 + /// 初始化上传任务,仅用于分片上传 V2 /// /// 上传凭证 /// Base64编码后的资源名 @@ -754,7 +754,7 @@ private HttpResult initReq(string encodedObjectName, string upToken) } /// - /// 根据已上传的所有分片数据创建文件 + /// 根据已上传的所有分片数据创建文件,仅用于分片上传 V2 /// /// 源文件名 /// 分片上传记录信息 diff --git a/src/Qiniu/Storage/ResumeBlocker.cs b/src/Qiniu/Storage/ResumeBlocker.cs index 1de3e65..55c090f 100644 --- a/src/Qiniu/Storage/ResumeBlocker.cs +++ b/src/Qiniu/Storage/ResumeBlocker.cs @@ -16,7 +16,7 @@ class ResumeBlocker public object ProgressLock { set; get; } public Dictionary UploadedBytesDict { set; get; } public long FileSize { set; get; } - public string encodedObjectName { set; get; } + public string encodedObjectName { set; get; } // 仅用于分片上传 V2 public ResumeBlocker(ManualResetEvent doneEvent, byte[] blockBuffer, long blockIndex, string uploadToken, PutExtra putExtra, ResumeInfo resumeInfo, Dictionary blockMakeResults, diff --git a/src/Qiniu/Util/Base64.cs b/src/Qiniu/Util/Base64.cs index bdc0552..5b84e4a 100644 --- a/src/Qiniu/Util/Base64.cs +++ b/src/Qiniu/Util/Base64.cs @@ -50,14 +50,14 @@ public static byte[] UrlsafeBase64Decode(string text) } /// - /// 获取EncodedObjectName + /// 获取EncodedObjectName,建议仅用于分片上传 V2 /// /// 待加密的字符串 /// 已加密的字符串 public static string GetEncodedObjectName(string key) { string encodedObjectName = "~"; - if (!string.IsNullOrEmpty(key)) + if (key != null) { encodedObjectName = UrlSafeBase64Encode(key); } diff --git a/src/QiniuTests/Storage/ResumableUploaderTests.cs b/src/QiniuTests/Storage/ResumableUploaderTests.cs index 20ad036..2981ddb 100644 --- a/src/QiniuTests/Storage/ResumableUploaderTests.cs +++ b/src/QiniuTests/Storage/ResumableUploaderTests.cs @@ -144,6 +144,63 @@ public void UploadFileV2Test() System.IO.File.Delete(filePath); } + [Test] + public void UploadFileV2WithoutKeyTest() + { + Mac mac = new Mac(AccessKey, SecretKey); + + string tempPath = Path.GetTempPath(); + int rnd = new Random().Next(1, 100000); + string filePath = tempPath + "resumeFile" + rnd.ToString(); + char[] testBody = new char[6 * 1024 * 1024]; + FileStream stream = new FileStream(filePath, FileMode.Create); + StreamWriter sw = new StreamWriter(stream, System.Text.Encoding.Default); + sw.Write(testBody); + sw.Close(); + stream.Close(); + + PutPolicy putPolicy = new PutPolicy(); + putPolicy.Scope = Bucket; + putPolicy.SetExpires(3600); + putPolicy.DeleteAfterDays = 1; + string token = Auth.CreateUploadToken(mac, putPolicy.ToJsonString()); + + Config config = new Config(); + config.Zone = Zone.ZONE_CN_East; + config.UseHttps = true; + config.UseCdnDomains = true; + config.ChunkSize = ChunkUnit.U512K; + PutExtra extra = new PutExtra(); + extra.MimeType = "application/json"; + extra.Version = "v2"; + extra.PartSize = 4 * 1024 * 1024; + ResumableUploader target = new ResumableUploader(config); + HttpResult result = target.UploadFile(filePath, null, token, extra); + Console.WriteLine("chunk upload result: " + result.ToString()); + Assert.AreEqual((int)HttpCode.OK, result.Code); + Dictionary responseBody = JsonConvert.DeserializeObject>(result.Text); + Assert.AreEqual(responseBody["hash"], responseBody["key"]); + + string downloadUrl = string.Format("http://{0}/{1}", Domain, responseBody["key"]); + HttpWebRequest wReq = WebRequest.Create(downloadUrl) as HttpWebRequest; + wReq.Method = "GET"; + HttpWebResponse wResp = wReq.GetResponse() as HttpWebResponse; + Assert.AreEqual((int)HttpCode.OK, (int)wResp.StatusCode); + Assert.AreEqual("application/json", wResp.Headers[HttpResponseHeader.ContentType]); + + using (var md5_1 = MD5.Create()) { + using (var md5_2 = MD5.Create()) { + using (var fileStream = File.OpenRead(filePath)) { + byte[] checksum1 = md5_1.ComputeHash(fileStream); + byte[] checksum2 = md5_2.ComputeHash(wResp.GetResponseStream()); + Assert.AreEqual(checksum1, checksum2); + } + } + } + + File.Delete(filePath); + } + [Test] public void ResumeUploadFileTest() { @@ -264,6 +321,71 @@ public void ResumeUploadFileV2Test() System.IO.File.Delete(filePath); } } + + [Test] + public void ResumeUploadFileV2WithoutKeyTest() + { + Mac mac = new Mac(AccessKey, SecretKey); + Config config = new Config(); + config.UseHttps = true; + config.Zone = Zone.ZONE_CN_East; + config.UseCdnDomains = true; + config.ChunkSize = ChunkUnit.U512K; + ResumableUploader target = new ResumableUploader(config); + PutExtra extra = new PutExtra(); + extra.PartSize = 4 * 1024 * 1024; + extra.Version = "v2"; + + int[] sizes = new int[5]{extra.PartSize/2, extra.PartSize, extra.PartSize+1, extra.PartSize*2, 10*1024*1024}; + foreach(int i in sizes) + { + char[] testBody = new char[i]; + Random rand = new Random(); + + string tempPath = Path.GetTempPath(); + int rnd = new Random().Next(1, 100000); + string filePath = tempPath + "resumeFile" + rnd.ToString(); + FileStream stream = new FileStream(filePath, FileMode.Create); + StreamWriter sw = new StreamWriter(stream, System.Text.Encoding.Default); + sw.Write(testBody); + sw.Close(); + stream.Close(); + Stream fs = File.OpenRead(filePath); + + PutPolicy putPolicy = new PutPolicy(); + putPolicy.Scope = Bucket; + putPolicy.SetExpires(3600); + putPolicy.DeleteAfterDays = 1; + string token = Auth.CreateUploadToken(mac, putPolicy.ToJsonString()); + + //设置断点续传进度记录文件 + extra.ResumeRecordFile = ResumeHelper.GetDefaultRecordKey(filePath, rand.Next().ToString()); + Console.WriteLine("record file:" + extra.ResumeRecordFile); + HttpResult result = target.UploadStream(fs, null, token, extra); + Console.WriteLine("resume upload: " + result.ToString()); + Assert.AreEqual((int)HttpCode.OK, result.Code); + Dictionary responseBody = JsonConvert.DeserializeObject>(result.Text); + Assert.AreEqual(responseBody["hash"], responseBody["key"]); + + string downloadUrl = string.Format("http://{0}/{1}", Domain, responseBody["key"]); + HttpWebRequest wReq = WebRequest.Create(downloadUrl) as HttpWebRequest; + wReq.Method = "GET"; + HttpWebResponse wResp = wReq.GetResponse() as HttpWebResponse; + Assert.AreEqual((int)HttpCode.OK, (int)wResp.StatusCode); + + using (var md5_1 = MD5.Create()) { + using (var md5_2 = MD5.Create()) { + using (var fileStream = File.OpenRead(filePath)) { + byte[] checksum1 = md5_1.ComputeHash(fileStream); + byte[] checksum2 = md5_2.ComputeHash(wResp.GetResponseStream()); + Assert.AreEqual(checksum1, checksum2); + } + } + } + + File.Delete(filePath); + } + } } } From 0f63d266b86b083ec9a9a28b7519b94f5d9be7d3 Mon Sep 17 00:00:00 2001 From: LiHS Date: Fri, 16 Sep 2022 17:21:46 +0800 Subject: [PATCH 2/2] support retry expired parts with upload --- src/Qiniu/Storage/ResumableUploader.cs | 593 +++++++++++++++++-------- src/Qiniu/Storage/ResumeInfo.cs | 6 + 2 files changed, 402 insertions(+), 197 deletions(-) diff --git a/src/Qiniu/Storage/ResumableUploader.cs b/src/Qiniu/Storage/ResumableUploader.cs index b62c46d..8e0b0d5 100644 --- a/src/Qiniu/Storage/ResumableUploader.cs +++ b/src/Qiniu/Storage/ResumableUploader.cs @@ -79,11 +79,6 @@ public HttpResult UploadFile(string localFile, string key, string token, PutExtr public HttpResult UploadStream(Stream stream, string key, string upToken, PutExtra putExtra) { HttpResult result = new HttpResult(); - string encodedObjectNameForV2 = "~"; - if (putExtra != null && putExtra.Version == "v2" && key != null) - { - encodedObjectNameForV2 = Base64.UrlSafeBase64Encode(key); - } //check put extra if (putExtra == null) @@ -109,263 +104,466 @@ public HttpResult UploadStream(Stream stream, string key, string upToken, PutExt //start to upload try { - long uploadedBytes = 0; - long fileSize = stream.Length; - long blockCount = (fileSize + putExtra.PartSize - 1) / putExtra.PartSize; - int partNumber = 1; - - //check resume record file + // load resume record file ResumeInfo resumeInfo = null; if (File.Exists(putExtra.ResumeRecordFile)) { - resumeInfo = ResumeHelper.Load(putExtra.ResumeRecordFile); + } + if (putExtra.Version == "v1") + { + result = UploadStreamV1(stream, key, upToken, putExtra, resumeInfo); + } + else if (putExtra.Version == "v2") + { + result = UploadStreamV2(stream, key, upToken, putExtra, resumeInfo); + } else { + throw new Exception("Invalid Version, only supports v1 / v2"); + } + } + catch (Exception ex) + { + Console.WriteLine(ex.StackTrace); + StringBuilder sb = new StringBuilder(); + sb.AppendFormat("[{0}] [ResumableUpload] Error: ", DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + Exception e = ex; + while (e != null) + { + sb.Append(e.Message + " "); + e = e.InnerException; + } + sb.AppendLine(); - if (resumeInfo != null && fileSize == resumeInfo.FileSize) - { - //check whether ctx expired - if (UnixTimestamp.IsContextExpired(resumeInfo.ExpiredAt)) - { - resumeInfo = null; - } - } + result.RefCode = (int)HttpCode.USER_UNDEF; + result.RefText += sb.ToString(); + } + } + + return result; + } + + private HttpResult UploadStreamV1(Stream stream, string key, string upToken, PutExtra putExtra, ResumeInfo resumeInfo) + { + HttpResult result = new HttpResult(); + bool isResumeUpload = resumeInfo != null; + try + { + string encodedObjectName = ""; + long uploadedBytes = 0; + long fileSize = stream.Length; + long blockCount = (fileSize + putExtra.PartSize - 1) / putExtra.PartSize; + if (resumeInfo == null) + { + resumeInfo = new ResumeInfo() + { + FileSize = fileSize, + BlockCount = blockCount, + Contexts = new string[blockCount], + ContextsExpiredAt = new long[blockCount], + ExpiredAt = 0, + }; + } + + //init block upload error + UploadControllerAction upCtrl = putExtra.UploadController(); + ManualResetEvent manualResetEvent = new ManualResetEvent(false); + Dictionary blockDataDict = new Dictionary(); + Dictionary blockMakeResults = new Dictionary(); + Dictionary uploadedBytesDict = new Dictionary(); + uploadedBytesDict.Add("UploadProgress", uploadedBytes); + byte[] blockBuffer = new byte[putExtra.PartSize]; + + //check not finished blocks to upload + for (long blockIndex = 0; blockIndex < blockCount; blockIndex++) + { + string context = resumeInfo.Contexts[blockIndex]; + long contextExpiredAt = resumeInfo.ContextsExpiredAt[blockIndex]; + if (!string.IsNullOrEmpty(context) && !UnixTimestamp.IsContextExpired(contextExpiredAt)) + { + uploadedBytesDict["UploadProgress"] += putExtra.PartSize; + continue; } - if (resumeInfo == null) + //check upload controller action before each chunk + while (true) { - if (putExtra.Version == "v1") + upCtrl = putExtra.UploadController(); + + if (upCtrl == UploadControllerAction.Aborted) { - resumeInfo = new ResumeInfo() - { - FileSize = fileSize, - BlockCount = blockCount, - Contexts = new string[blockCount], - ExpiredAt = 0, - }; + result.Code = (int)HttpCode.USER_CANCELED; + result.RefCode = (int)HttpCode.USER_CANCELED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + manualResetEvent.Set(); + return result; } - else if (putExtra.Version == "v2") + else if (upCtrl == UploadControllerAction.Suspended) { - HttpResult res = initReq(encodedObjectNameForV2, upToken); - Dictionary responseBody = JsonConvert.DeserializeObject>(res.Text); - if (res.Code != 200) - { - return res; - } - - resumeInfo = new ResumeInfo() - { - FileSize = fileSize, - BlockCount = blockCount, - Etags = new Dictionary[blockCount], - Uploaded = 0, - ExpiredAt = long.Parse(responseBody["expireAt"]), - UploadId = responseBody["uploadId"] - }; - } else { - throw new Exception("Invalid Version, only supports v1 / v2"); + result.RefCode = (int)HttpCode.USER_PAUSED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is paused\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + manualResetEvent.WaitOne(1000); + } + else if (upCtrl == UploadControllerAction.Activated) + { + break; } } - //calc upload progress - for (long blockIndex = 0; blockIndex < blockCount; blockIndex++) + long offset = blockIndex * putExtra.PartSize; + stream.Seek(offset, SeekOrigin.Begin); + int blockLen = stream.Read(blockBuffer, 0, putExtra.PartSize); + byte[] blockData = new byte[blockLen]; + Array.Copy(blockBuffer, blockData, blockLen); + blockDataDict.Add(blockIndex, blockData); + + if (blockDataDict.Count == putExtra.BlockUploadThreads) { - if (putExtra.Version == "v1") + processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, + encodedObjectName); + //check mkblk results + foreach (int blkIndex in blockMakeResults.Keys) { - string context = resumeInfo.Contexts[blockIndex]; - if (!string.IsNullOrEmpty(context)) + HttpResult mkblkRet = blockMakeResults[blkIndex]; + if (mkblkRet.Code != 200) { - uploadedBytes += putExtra.PartSize; + result = mkblkRet; + manualResetEvent.Set(); + return result; } } - else if (putExtra.Version == "v2") + blockDataDict.Clear(); + blockMakeResults.Clear(); + if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) { - Dictionary etag = resumeInfo.Etags[blockIndex]; - if (etag != null) - { - if (blockIndex > 0) - { - partNumber += 1; - } - uploadedBytes += putExtra.PartSize; - resumeInfo.Uploaded = uploadedBytes; - } - } else { - throw new Exception("Invalid Version, only supports v1 / v2"); + ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); } - } - - //set upload progress - putExtra.ProgressHandler(uploadedBytes, fileSize); - - //init block upload error - //check not finished blocks to upload - UploadControllerAction upCtrl = putExtra.UploadController(); - ManualResetEvent manualResetEvent = new ManualResetEvent(false); - Dictionary blockDataDict = new Dictionary(); - Dictionary blockMakeResults = new Dictionary(); - Dictionary uploadedBytesDict = new Dictionary(); - uploadedBytesDict.Add("UploadProgress", uploadedBytes); - byte[] blockBuffer = new byte[putExtra.PartSize]; - for (long blockIndex = 0; blockIndex < blockCount; blockIndex++) + } + + + if (blockDataDict.Count > 0) + { + processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, + encodedObjectName); + //check mkblk results + foreach (int blkIndex in blockMakeResults.Keys) { - string context = null; - if (putExtra.Version == "v1") + HttpResult mkblkRet = blockMakeResults[blkIndex]; + if (mkblkRet.Code != 200) { - context = resumeInfo.Contexts[blockIndex]; + result = mkblkRet; + manualResetEvent.Set(); + return result; } - else if (putExtra.Version == "v2") + } + blockDataDict.Clear(); + blockMakeResults.Clear(); + if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) + { + ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); + } + } + + if (upCtrl == UploadControllerAction.Activated) + { + HttpResult hr = new HttpResult(); + hr = MakeFile(key, fileSize, key, upToken, putExtra, resumeInfo.Contexts); + + if (hr.Code != (int)HttpCode.OK) + { + result.Shadow(hr); + result.RefText += string.Format("[{0}] [ResumableUpload] Error: mkfile: code = {1}, text = {2}\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), hr.Code, hr.Text); + } + + if (File.Exists(putExtra.ResumeRecordFile)) + { + File.Delete(putExtra.ResumeRecordFile); + } + result.Shadow(hr); + result.RefText += string.Format("[{0}] [ResumableUpload] Uploaded: \"{1}\" ==> \"{2}\"\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), putExtra.ResumeRecordFile, key); + } + else + { + result.Code = (int)HttpCode.USER_CANCELED; + result.RefCode = (int)HttpCode.USER_CANCELED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted, mkfile\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + } + + manualResetEvent.Set(); + } + catch (Exception ex) + { + Console.WriteLine(ex.StackTrace); + StringBuilder sb = new StringBuilder(); + sb.AppendFormat("[{0}] [ResumableUpload] Error: ", DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + Exception e = ex; + while (e != null) + { + sb.Append(e.Message + " "); + e = e.InnerException; + } + sb.AppendLine(); + + result.RefCode = (int)HttpCode.USER_UNDEF; + result.RefText += sb.ToString(); + } + + if (isResumeUpload && result.Code == (int)HttpCode.CONTEXT_EXPIRED) + { + stream.Seek(0, SeekOrigin.Begin); + return UploadStreamV1(stream, key, upToken, putExtra, null); + } + + return result; + } + + private HttpResult UploadStreamV2(Stream stream, string key, string upToken, PutExtra putExtra, ResumeInfo resumeInfo) + { + HttpResult result = new HttpResult(); + bool isResumeUpload = resumeInfo != null; + + try + { + string encodedObjectName = "~"; + if (key != null) + { + encodedObjectName = Base64.GetEncodedObjectName(key); + } + long uploadedBytes = 0; + long fileSize = stream.Length; + long blockCount = (fileSize + putExtra.PartSize - 1) / putExtra.PartSize; + + if (resumeInfo == null || UnixTimestamp.IsContextExpired(resumeInfo.ExpiredAt)) + { + HttpResult res = initReq(encodedObjectName, upToken); + Dictionary responseBody = JsonConvert.DeserializeObject>(res.Text); + if (res.Code != 200) + { + return res; + } + + resumeInfo = new ResumeInfo() + { + FileSize = fileSize, + BlockCount = blockCount, + Etags = new Dictionary[blockCount], + Uploaded = 0, + ExpiredAt = long.Parse(responseBody["expireAt"]), + UploadId = responseBody["uploadId"] + }; + } + + //calc upload progress + for (long blockIndex = 0; blockIndex < blockCount; blockIndex++) + { + Dictionary etag = resumeInfo.Etags[blockIndex]; + if (etag != null) + { + uploadedBytes += putExtra.PartSize; + resumeInfo.Uploaded = uploadedBytes; + } + } + //set upload progress + putExtra.ProgressHandler(uploadedBytes, fileSize); + + + //init block upload error + //check not finished blocks to upload + UploadControllerAction upCtrl = putExtra.UploadController(); + ManualResetEvent manualResetEvent = new ManualResetEvent(false); + Dictionary blockDataDict = new Dictionary(); + Dictionary blockMakeResults = new Dictionary(); + Dictionary uploadedBytesDict = new Dictionary(); + uploadedBytesDict.Add("UploadProgress", uploadedBytes); + byte[] blockBuffer = new byte[putExtra.PartSize]; + for (long blockIndex = 0; blockIndex < blockCount; blockIndex++) + { + string context = null; + Dictionary etag = resumeInfo.Etags[blockIndex]; + if (etag != null && etag.Count > 0) + { + context = "~"; + } + + if (string.IsNullOrEmpty(context)) + { + //check upload controller action before each chunk + while (true) { - Dictionary etag = resumeInfo.Etags[blockIndex]; - if (etag != null && etag.Count > 0) + upCtrl = putExtra.UploadController(); + + if (upCtrl == UploadControllerAction.Aborted) { - context = "~"; + result.Code = (int)HttpCode.USER_CANCELED; + result.RefCode = (int)HttpCode.USER_CANCELED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + manualResetEvent.Set(); + return result; + } + else if (upCtrl == UploadControllerAction.Suspended) + { + result.RefCode = (int)HttpCode.USER_PAUSED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is paused\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + manualResetEvent.WaitOne(1000); + } + else if (upCtrl == UploadControllerAction.Activated) + { + break; } - } else { - throw new Exception("Invalid Version, only supports v1 / v2"); } - if (string.IsNullOrEmpty(context)) + long offset = blockIndex * putExtra.PartSize; + stream.Seek(offset, SeekOrigin.Begin); + int blockLen = stream.Read(blockBuffer, 0, putExtra.PartSize); + byte[] blockData = new byte[blockLen]; + Array.Copy(blockBuffer, blockData, blockLen); + blockDataDict.Add(blockIndex, blockData); + + if (blockDataDict.Count == putExtra.BlockUploadThreads) { - //check upload controller action before each chunk - while (true) - { - upCtrl = putExtra.UploadController(); - if (upCtrl == UploadControllerAction.Aborted) + processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, + encodedObjectName); + //check mkblk results + foreach (int blkIndex in blockMakeResults.Keys) + { + HttpResult mkblkRet = blockMakeResults[blkIndex]; + if (mkblkRet.Code == (int) HttpCode.FILE_NOT_EXIST) { - result.Code = (int)HttpCode.USER_CANCELED; - result.RefCode = (int)HttpCode.USER_CANCELED; - result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted\n", - DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); - manualResetEvent.Set(); - return result; + if (File.Exists(putExtra.ResumeRecordFile)) + { + File.Delete(putExtra.ResumeRecordFile); + } } - else if (upCtrl == UploadControllerAction.Suspended) + if (isResumeUpload && mkblkRet.Code == (int)HttpCode.FILE_NOT_EXIST) { - result.RefCode = (int)HttpCode.USER_PAUSED; - result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is paused\n", - DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); - manualResetEvent.WaitOne(1000); + stream.Seek(0, SeekOrigin.Begin); + return UploadStreamV2(stream, key, upToken, putExtra, null); } - else if (upCtrl == UploadControllerAction.Activated) + if (mkblkRet.Code != (int)HttpCode.OK) { - break; + result = mkblkRet; + manualResetEvent.Set(); + return result; } } - - long offset = blockIndex * putExtra.PartSize; - stream.Seek(offset, SeekOrigin.Begin); - int blockLen = stream.Read(blockBuffer, 0, putExtra.PartSize); - byte[] blockData = new byte[blockLen]; - Array.Copy(blockBuffer, blockData, blockLen); - blockDataDict.Add(blockIndex, blockData); - - if (blockDataDict.Count == putExtra.BlockUploadThreads) + blockDataDict.Clear(); + blockMakeResults.Clear(); + if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) { - - processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, - encodedObjectNameForV2); - //check mkblk results - foreach (int blkIndex in blockMakeResults.Keys) - { - HttpResult mkblkRet = blockMakeResults[blkIndex]; - if (mkblkRet.Code != 200) - { - result = mkblkRet; - manualResetEvent.Set(); - return result; - } - } - blockDataDict.Clear(); - blockMakeResults.Clear(); - if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) - { - ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); - } + ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); } } } + } - if (blockDataDict.Count > 0) + if (blockDataDict.Count > 0) + { + processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, + encodedObjectName); + //check mkblk results + foreach (int blkIndex in blockMakeResults.Keys) { - processMakeBlocks(blockDataDict, upToken, putExtra, resumeInfo, blockMakeResults, uploadedBytesDict, fileSize, - encodedObjectNameForV2); - //check mkblk results - foreach (int blkIndex in blockMakeResults.Keys) + HttpResult mkblkRet = blockMakeResults[blkIndex]; + + if (mkblkRet.Code == (int) HttpCode.FILE_NOT_EXIST) { - HttpResult mkblkRet = blockMakeResults[blkIndex]; - if (mkblkRet.Code != 200) + if (File.Exists(putExtra.ResumeRecordFile)) { - result = mkblkRet; - manualResetEvent.Set(); - return result; + File.Delete(putExtra.ResumeRecordFile); } } - blockDataDict.Clear(); - blockMakeResults.Clear(); - if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) - { - ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); - } - } - - if (upCtrl == UploadControllerAction.Activated) - { - HttpResult hr = new HttpResult();; - if (putExtra.Version == "v1") + if (isResumeUpload && mkblkRet.Code == (int)HttpCode.FILE_NOT_EXIST) { - hr = MakeFile(key, fileSize, key, upToken, putExtra, resumeInfo.Contexts); + stream.Seek(0, SeekOrigin.Begin); + return UploadStreamV2(stream, key, upToken, putExtra, null); } - else if (putExtra.Version == "v2") + if (mkblkRet.Code != (int)HttpCode.OK) { - hr = completeParts(key, resumeInfo, key, upToken, putExtra, encodedObjectNameForV2); - } else { - throw new Exception("Invalid Version, only supports v1 / v2"); + result = mkblkRet; + manualResetEvent.Set(); + return result; } + } + blockDataDict.Clear(); + blockMakeResults.Clear(); + if (!string.IsNullOrEmpty(putExtra.ResumeRecordFile)) + { + ResumeHelper.Save(resumeInfo, putExtra.ResumeRecordFile); + } + } - if (hr.Code != (int)HttpCode.OK) - { - result.Shadow(hr); - result.RefText += string.Format("[{0}] [ResumableUpload] Error: mkfile: code = {1}, text = {2}\n", - DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), hr.Code, hr.Text); - } + if (upCtrl == UploadControllerAction.Activated) + { + HttpResult hr = new HttpResult();; + hr = completeParts(key, resumeInfo, key, upToken, putExtra, encodedObjectName); + if (result.Code == (int) HttpCode.FILE_NOT_EXIST) + { if (File.Exists(putExtra.ResumeRecordFile)) { File.Delete(putExtra.ResumeRecordFile); } - result.Shadow(hr); - result.RefText += string.Format("[{0}] [ResumableUpload] Uploaded: \"{1}\" ==> \"{2}\"\n", - DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), putExtra.ResumeRecordFile, key); } - else + if (isResumeUpload && result.Code == (int)HttpCode.FILE_NOT_EXIST) { - result.Code = (int)HttpCode.USER_CANCELED; - result.RefCode = (int)HttpCode.USER_CANCELED; - result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted, mkfile\n", - DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + stream.Seek(0, SeekOrigin.Begin); + return UploadStreamV2(stream, key, upToken, putExtra, null); } - manualResetEvent.Set(); - return result; - } - catch (Exception ex) - { - Console.WriteLine(ex.StackTrace); - StringBuilder sb = new StringBuilder(); - sb.AppendFormat("[{0}] [ResumableUpload] Error: ", DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); - Exception e = ex; - while (e != null) + if (hr.Code != (int)HttpCode.OK) { - sb.Append(e.Message + " "); - e = e.InnerException; + result.Shadow(hr); + result.RefText += string.Format("[{0}] [ResumableUpload] Error: mkfile: code = {1}, text = {2}\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), hr.Code, hr.Text); } - sb.AppendLine(); - result.RefCode = (int)HttpCode.USER_UNDEF; - result.RefText += sb.ToString(); + if (File.Exists(putExtra.ResumeRecordFile)) + { + File.Delete(putExtra.ResumeRecordFile); + } + result.Shadow(hr); + result.RefText += string.Format("[{0}] [ResumableUpload] Uploaded: \"{1}\" ==> \"{2}\"\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff"), putExtra.ResumeRecordFile, key); + } + else + { + result.Code = (int)HttpCode.USER_CANCELED; + result.RefCode = (int)HttpCode.USER_CANCELED; + result.RefText += string.Format("[{0}] [ResumableUpload] Info: upload task is aborted, mkfile\n", + DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); } + + manualResetEvent.Set(); + } + catch (Exception ex) + { + Console.WriteLine(ex.StackTrace); + StringBuilder sb = new StringBuilder(); + sb.AppendFormat("[{0}] [ResumableUpload] Error: ", DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffff")); + Exception e = ex; + while (e != null) + { + sb.Append(e.Message + " "); + e = e.InnerException; + } + sb.AppendLine(); + + result.RefCode = (int)HttpCode.USER_UNDEF; + result.RefText += sb.ToString(); + } + + if (isResumeUpload && result.Code == (int)HttpCode.FILE_NOT_EXIST) + { + stream.Seek(0, SeekOrigin.Begin); + return UploadStreamV2(stream, key, upToken, putExtra, null); } return result; @@ -407,7 +605,7 @@ private void processMakeBlocks(Dictionary blockDataDict, string up /// /// 创建块(携带首片数据),v1检查CRC32,v2检查md5 /// - /// 创建分片上次的块请求 + /// 创建分片上传的块请求 private void MakeBlock(object resumeBlockerObj) { ResumeBlocker resumeBlocker = (ResumeBlocker)resumeBlockerObj; @@ -517,7 +715,7 @@ private void MakeBlock(object resumeBlockerObj) { //write the mkblk context resumeInfo.Contexts[blockIndex] = rc.Ctx; - resumeInfo.ExpiredAt = rc.ExpiredAt; + resumeInfo.ContextsExpiredAt[blockIndex] = rc.ExpiredAt; lock (progressLock) { uploadedBytesDict["UploadProgress"] += blockSize; @@ -525,6 +723,7 @@ private void MakeBlock(object resumeBlockerObj) putExtra.ProgressHandler(uploadedBytesDict["UploadProgress"], fileSize); } } + // TODO: unreachable? bug? else if (putExtra.Version == "v2") { result.RefText += string.Format("[{0}] JSON Decode Error: text = {1}", diff --git a/src/Qiniu/Storage/ResumeInfo.cs b/src/Qiniu/Storage/ResumeInfo.cs index 79b25f7..0a14167 100644 --- a/src/Qiniu/Storage/ResumeInfo.cs +++ b/src/Qiniu/Storage/ResumeInfo.cs @@ -25,6 +25,12 @@ public class ResumeInfo [JsonProperty("contexts")] public string[] Contexts { get; set; } + /// + /// 上下文信息过期列表,与 context 配合使用 + /// + [JsonProperty("contextsExpiredAt")] + public long[] ContextsExpiredAt { get; set; } + /// /// Ctx过期时间戳(单位秒) ///