mirror of
https://code.equilibrium.co.ao/ITO/doneit-web.git
synced 2026-04-19 04:57:52 +00:00
add file chuck upload, file validation, redirect to home page incase route doesnt exist and refresh token interceptor
This commit is contained in:
@@ -0,0 +1,16 @@
|
||||
import { TestBed } from '@angular/core/testing';
|
||||
|
||||
import { ChunkService } from './chunk.service';
|
||||
|
||||
describe('ChunkService', () => {
|
||||
let service: ChunkService;
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({});
|
||||
service = TestBed.inject(ChunkService);
|
||||
});
|
||||
|
||||
it('should be created', () => {
|
||||
expect(service).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,140 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class ChunkService {
|
||||
|
||||
constructor() {
|
||||
window["ChunkService"] = this
|
||||
|
||||
// const a = this.generateChunkFile({} as any, {} as any)
|
||||
}
|
||||
|
||||
generateChunkFile(base64: string, chunkSizeInBytes: number): File[] {
|
||||
const byteCharacters = atob(base64); // Decode base64 data to binary
|
||||
const chunks: File[] = [];
|
||||
let start = 0;
|
||||
|
||||
function createChunk(): File {
|
||||
const chunkSize = Math.min(chunkSizeInBytes, byteCharacters.length - start);
|
||||
const chunkData = byteCharacters.slice(start, start + chunkSize);
|
||||
start += chunkSize;
|
||||
|
||||
// console.log({chunkData})
|
||||
|
||||
const chunkArray = new Uint8Array(chunkData.length);
|
||||
|
||||
|
||||
for (let i = 0; i < chunkData.length; i++) {
|
||||
chunkArray[i] = chunkData.charCodeAt(i);
|
||||
// console.log('chunkData.charCodeAt', chunkData.charCodeAt(i))
|
||||
}
|
||||
|
||||
// console.log({Uint8Array:"Uint8Array", chunkArray})
|
||||
|
||||
const blob = new Blob([chunkArray]);
|
||||
// console.log('blob')
|
||||
// console.log('File')
|
||||
return new File([blob], `chunk_${chunks.length + 1}`);
|
||||
}
|
||||
|
||||
while (start < byteCharacters.length) {
|
||||
chunks.push(createChunk());
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
generateChunkOfUint8Array(base64: string, chunkSizeInBytes: number): Uint8Array[] {
|
||||
const byteCharacters = atob(base64); // Decode base64 data to binary
|
||||
const chunks: Uint8Array[] = [];
|
||||
let start = 0;
|
||||
|
||||
function createChunk(): Uint8Array {
|
||||
const chunkSize = Math.min(chunkSizeInBytes, byteCharacters.length - start);
|
||||
const chunkData = byteCharacters.slice(start, start + chunkSize);
|
||||
start += chunkSize;
|
||||
|
||||
const chunkArray = new Uint8Array(chunkData.length);
|
||||
|
||||
|
||||
for (let i = 0; i < chunkData.length; i++) {
|
||||
chunkArray[i] = chunkData.charCodeAt(i);
|
||||
}
|
||||
|
||||
return chunkArray;
|
||||
}
|
||||
|
||||
while (start < byteCharacters.length) {
|
||||
chunks.push(createChunk());
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
async uploadChunk(file: File): Promise<any> {
|
||||
// Read and upload the file in chunks (as you've previously implemented)
|
||||
const chunkSize = 1024 * 500; // Adjust the chunk size as needed
|
||||
const chunks = [];
|
||||
let offset = 0;
|
||||
|
||||
let i = 0
|
||||
|
||||
while (offset < file.size) {
|
||||
console.log(offset)
|
||||
const chunk = file.slice(offset, offset + chunkSize);
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => {
|
||||
chunks[i] = new Uint8Array(reader.result as ArrayBuffer)
|
||||
};
|
||||
reader.readAsArrayBuffer(chunk);
|
||||
offset += chunkSize;
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
onFileSelect(event: any, chunkSizeInBytes):Promise<Blob[]> {
|
||||
const file:File = event.target.files[0];
|
||||
const filename = file.name;
|
||||
//const chunkSize = 1024 * 1024; // 1 MB chunks (adjust as needed)
|
||||
const chunkSize = chunkSizeInBytes
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Read and upload chunks
|
||||
const fileReader = new FileReader();
|
||||
fileReader.onload = (e) => {
|
||||
const arrayBuffer = e.target.result as ArrayBuffer;
|
||||
const blob = new Blob([new Uint8Array(arrayBuffer)]);
|
||||
const totalChunks = Math.ceil(file.size / chunkSize);
|
||||
const chunks: Blob[] = []
|
||||
|
||||
for (let i = 1; i <= totalChunks; i++) {
|
||||
const start = (i - 1) * chunkSize;
|
||||
const end = i * chunkSize;
|
||||
const chunk = blob.slice(start, end);
|
||||
chunks.push(chunk)
|
||||
}
|
||||
|
||||
resolve(chunks)
|
||||
};
|
||||
|
||||
fileReader.readAsArrayBuffer(file);
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
generateChunkFromBase64(base64: string, chunkSizeInBytes: number) {
|
||||
const byteCharacters = atob(base64); // Decode base64 data to binary
|
||||
const chunkArray = [];
|
||||
|
||||
for (let offset = 0; offset < byteCharacters.length; offset += chunkSizeInBytes) {
|
||||
const chunkData = byteCharacters.slice(offset, offset + chunkSizeInBytes);
|
||||
chunkArray.push(chunkData);
|
||||
}
|
||||
|
||||
return chunkArray;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { TestBed } from '@angular/core/testing';
|
||||
|
||||
import { StreamService } from './stream.service';
|
||||
|
||||
describe('StreamService', () => {
|
||||
let service: StreamService;
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({});
|
||||
service = TestBed.inject(StreamService);
|
||||
});
|
||||
|
||||
it('should be created', () => {
|
||||
expect(service).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,112 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { HttpClient, HttpHeaders, HttpEventType } from '@angular/common/http';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class StreamService {
|
||||
|
||||
constructor(
|
||||
private http: HttpClient,
|
||||
) {
|
||||
window["StreamService"] = this
|
||||
}
|
||||
|
||||
|
||||
async uploadFile() {
|
||||
const API_URL = 'http://localhost:3000/upload'; // Replace with your server URL
|
||||
const filePath = 'path/to/large-file.zip'; // Replace with the path to your file
|
||||
const fileName = 'my-file'; // Specify your desired filename
|
||||
const fileExtension = 'zip'; // Specify the file extension
|
||||
|
||||
const headers = new HttpHeaders()
|
||||
.append('X-File-Name', fileName)
|
||||
.append('X-File-Extension', fileExtension);
|
||||
|
||||
const file = await this.readFileInChunks(filePath);
|
||||
const chunkSize = 1024 * 1024; // 1 MB chunk size (adjust as needed)
|
||||
|
||||
for (let offset = 0; offset < file.length; offset += chunkSize) {
|
||||
const chunk = file.slice(offset, offset + chunkSize);
|
||||
// await this.uploadChunk(API_URL, chunk, headers);
|
||||
}
|
||||
|
||||
console.log('Upload completed.');
|
||||
}
|
||||
|
||||
async readFileInChunks(filePath: string): Promise<Uint8Array> {
|
||||
const response = await fetch(filePath);
|
||||
const reader = response.body.getReader();
|
||||
const chunks: Uint8Array[] = [];
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const { value, done: isDone } = await reader.read();
|
||||
if (!isDone) {
|
||||
chunks.push(value);
|
||||
}
|
||||
done = isDone;
|
||||
}
|
||||
|
||||
return new Uint8Array([].concat(...chunks.map((chunk) => Array.from(chunk))));
|
||||
}
|
||||
|
||||
async uploadChunk(url: string, chunks: Uint8Array[], fileName, fileExtension): Promise<void> {
|
||||
|
||||
let i = 1
|
||||
|
||||
console.log('123', chunks.length)
|
||||
for(const chunk of chunks) {
|
||||
try {
|
||||
|
||||
console.log("iterate")
|
||||
|
||||
const headers = new HttpHeaders()
|
||||
.append('X-File-Name', fileName)
|
||||
.append('X-File-Extension', fileExtension)
|
||||
.append('X-File-Content-Length', chunks.length.toString())
|
||||
.append('X-File-Index', i.toString())
|
||||
|
||||
await this.http.post('http://localhost:3001/upload', chunk.buffer, { headers, responseType: 'blob' }).toPromise();
|
||||
i++
|
||||
|
||||
} catch (error) {
|
||||
console.error('Upload error:', error);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
async uploadChunkNoLoop(url: string, chunk: Uint8Array, fileName, fileExtension, i, length): Promise<void> {
|
||||
|
||||
console.log("iterate")
|
||||
|
||||
const headers = new HttpHeaders()
|
||||
.append('X-File-Name', fileName)
|
||||
.append('X-File-Extension', fileExtension)
|
||||
.append('X-File-Content-Length', length)
|
||||
.append('X-File-Index', i.toString())
|
||||
|
||||
await this.http.post('http://localhost:3001/upload', chunk.buffer, { headers, responseType: 'blob' }).toPromise();
|
||||
|
||||
}
|
||||
|
||||
uploadChunk1(chunk: Blob, chunkNumber: number, totalChunks: number, filename: string) {
|
||||
|
||||
console.log(chunk)
|
||||
|
||||
const headers = new HttpHeaders()
|
||||
.append('X-File-Name', filename)
|
||||
.append('X-File-Content-Length', totalChunks.toString())
|
||||
.append('X-File-Index', chunkNumber.toString())
|
||||
|
||||
return this.http.post('http://localhost:3001/upload-chunk', Blob, { headers, responseType: 'blob' });
|
||||
}
|
||||
}
|
||||
|
||||
// const text = 'Hello, World00120301010asdf1002sdf 0fsdfasf0001230 12300!\n';
|
||||
// const base64 = btoa(text);
|
||||
|
||||
// let chunks = window["ChunkService"].generateChunkOfUint8Array(base64, 8)
|
||||
// window.StreamService.uploadChunk("", chunks, "peter12", "txt")
|
||||
Reference in New Issue
Block a user