'Multiple large file upload to SP in angular
I am trying to upload multiple large files to the SP in the angular project and I've done it successfully. But my code uploads all the files at once into the server. This process will lead to some problems within the system. Now I am trying to recreate the program to upload files asynchronously and I'm confused with how to do it properly. Can anyone help me to understand it help me to do it the proper way? Part of my codes are below.
upload.component.html
<div class="row p-3">
<div class="row mb-2">
<div class="col">
<input class="form-control" #selectedFile required type="file" id="formFile" (change)="onFileSelected($event)" [disabled]="uploading" accept="*/*" multiple>
</div>
<div class="col">
<button class="btn btn-primary" id="uploadFileBtn" type="button" (click)="onUploadFile()" [disabled]="fileSelected == false || uploading"><i
class="fas fa-upload"></i> Upload</button>
</div>
</div>
<div class="row">
<div class="col">
<span><strong>Status: </strong>
<span *ngIf="uploading"><i class="fas fa-spinner fa-pulse"></i></span>
{{progressText}}</span>
</div>
</div>
upload.component.ts
......
onFileSelected(event) {
this.files = event.srcElement.files;
console.log(event);
if (this.files) {
this.fileSelected = true;
}
this.progressText = "Click Upload button to start upload";
}
onUploadFile() {
this.uploading = true;
this.progressText = "Upload pending...";
if (!this.itemId) {
this.createSharepointFolder();
} else {
// this.uploadFile();
this.uploadMultipleFiles();
}
}
async uploadMultipleFiles() {
for (const file of this.files) {
console.log("Call uploadFile(files:File) to upload file one by one.");
console.log(file.name);
await this.uploadFile(file)
}
}
async uploadFile(currentFile: File) {
this.sessionUploadURL = '';
var file = currentFile;
console.log(file.name);
var fileNameArray = file.name.lastIndexOf('.');
var fileType = file.name.substring(fileNameArray + 1);
var fileName = file.name.substring(0, fileNameArray);
this.progressText = "Creating session...";
this.ms_graph.currentStatusMessage.subscribe(
status => {
this.progressText = status;
this.changeDetection.detectChanges();
});
this.ms_graph.uploadingStatus.subscribe(
status => {
this.uploading = status;
this.changeDetection.detectChanges();
});
this.ms_graph.uploadSuccessStatus.subscribe(
status => {
this.uploadSuccess = status;
if (this.uploadSuccess == true) {
this.toastr.success("File uploaded sucessfully", "Upload complete");
this.updatedParentRecordEvent.emit(this.parentRecord);
this.selectedFileField.nativeElement.value = ""; // Clear the value of the file select field
}
});
this.ms_graph.get_upload_session(fileType, fileName, this.itemId).subscribe(
(response: MSCreateUploadSession) => {
console.log(JSON.stringify(response));
this.sessionUploadURL = response.uploadUrl;
this.ms_graph.upload_chunks.call(this.ms_graph, file, this.sessionUploadURL);
},
(error) => {
console.error("Error get_upload_session() \n" + JSON.stringify(error));
throw (error);
}
);
this.fileSelected = false;
}
And upload_chunks method, readFragmentAsync method & upload_chunk method within upload.service.ts file.
upload.service.ts
/* function upload_chunks
After getting the uploadUrl, this function does the logic of chunking out
the fragments and sending the chunks to uploadChunk */
async upload_chunks(file: File, uploadUrl: string) {
console.log('upload_chunks() called');
this.changeUploadStatus(true);
this.changeUploadSuccessStatus(false);
this.changeStatusMessage("Uploading file...");
const ms_graph = this;
var reader = new FileReader();
// Variables for byte stream position
var position = 0;
var chunkLength = 320 * 1024;
console.log('File size is: ' + file.size);
var continueRead = true;
// var token = this.get_token();
while (continueRead) {
var chunk;
try {
continueRead = true;
//Try to read in the chunk
try {
let stopB = position + chunkLength;
console.log('Sending Asynchronous request to read in chunk bytes from position ' + position + ' to end ' + stopB)
chunk = await this.readFragmentAsync(file, position, stopB);
console.log("UploadChunks: Chunk read in of " + chunk.byteLength + " bytes.");
if (chunk.byteLength > 0) {
continueRead = true;
} else {
break;
}
console.log('Chunk bytes received = ' + chunk.byteLength);
} catch (e) {
console.log('Bytes Received from readFragmentAsync:: ' + e);
break;
}
// Try to upload the chunk.
try {
console.log('Request sent for uploadFragmentAsync');
// let res = MSCreateUploadSession;
let res;
res = await ms_graph.upload_chunk(chunk, uploadUrl, position, file.size);
let currentBytes = this.util.formatBytes(position);
let totalBytes = this.util.formatBytes(file.size);
// Check the response.
if (res[0] != 202 && res[0] != 201 && res[0] != 200)
throw ("Put operation did not return expected response");
if (res[0] === 201 || res[0] === 200) {
console.log("Reached last chunk of file. Status code is: " + res[0]);
continueRead = false;
}
else {
console.log("Continuing - Status Code is: " + res[0]);
position = Number(res[1].nextExpectedRanges[0].split('-')[0])
}
console.log('Response received from upload_chunk.');
this.changeStatusMessage(`Uploading file... (${currentBytes}/${totalBytes})`);
console.log(`Uploading file... (${currentBytes}/${totalBytes})`);
// console.log('Position is now ' + position);
} catch (e) {
console.log('Error occured when calling upload_chunk:' + e);
}
//
} catch (e) {
continueRead = false;
}
}
console.log("upload finished and worked!!");
this.changeStatusMessage("File uploaded sucessfully");
this.changeUploadStatus(false);
this.changeUploadSuccessStatus(true);
}
// Reads in the chunck and returns a promise.
readFragmentAsync(file, startB, stopB) {
let frag: string | ArrayBuffer | null;
const reader = new FileReader();
console.log('startBytes :' + startB + ' stopBytes :' + stopB)
var blob = file.slice(startB, stopB);
reader.readAsArrayBuffer(blob);
return new Promise((resolve, reject) => {
reader.onloadend = (event) => {
console.log("onloadend called " + reader.result?.toString);
if (reader.readyState == reader.DONE) {
frag = reader.result
resolve(frag);
}
};
})
}
// Upload each chunk using PUT
upload_chunk(chunk, uploadURL, position, totalLength) {//: Observable<any> {
let max = position + chunk.byteLength - 1;
let contentLength = position + chunk.byteLength;
console.log(chunk.byteLength);
return new Promise((resolve, reject) => {
console.log('uploadURL:: ' + uploadURL);
try {
console.log('Just before making the PUT call to uploadUrl.');
let crHeader = `bytes ${position}-${max}/${totalLength}`;
console.log('Content-Range header being set is : ' + crHeader);
request
.put(uploadURL)
.set({ 'Content-Range': crHeader})
// .set({ 'Authorization': 'Bearer ' + this.get_token() })
.send(chunk)
.end((err, res: any) => {
if (err) {
console.error(err);
reject(err);
return;
}
console.log(res.status);
console.log(res.body);
resolve([res.status, res.body]);
});
} catch (e) {
console.log('exception inside uploadFragmentAsync:: ' + e)
reject(e);
}
});
}
Solution 1:[1]
An perhaps easier approach would be to load the files to an azure blob storage from angular. Then at server side you can access those files when you need them.
The backend api (C#) supports, stream reading, byte[] and whole files.
Cost is not to large either and if money is critical, you can easy delete the files after handling them backend.
Frontend api has an official npm package which you can use and there is an updated guide in azure portal when you have created your first azure blob storage.
Also you have the benefit that the security of the file transfer is handled by the azure system.
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | Andlab |