feat: Implement Server-Side Chunked Transfer for Drive Uploads
- Implemented 'Client-Orchestrated, Server-Side Chunked Transfer' to bypass CORS and 50MB limits for Google Photos. - Added 'getResumableUploadUrl' to GASDriveService for high-priority video processing. - Refactored 'MediaManager.html' to orchestrate uploads using 'transferRemoteChunk' loop. - Added 'getRemoteFileSize' and 'transferRemoteChunk' to 'mediaHandlers.ts'. - Updated 'global.ts' to expose new backend functions.
This commit is contained in:
@ -127,7 +127,21 @@ export function linkDriveFileToShopifyMedia(sku: string, driveId: string, shopif
|
||||
return mediaService.linkDriveFileToShopifyMedia(sku, driveId, shopifyId)
|
||||
}
|
||||
|
||||
// NEW: Resumable Upload Ticket
|
||||
export function getUploadUrl(sku: string, filename: string, mimeType: string) {
|
||||
const config = new Config()
|
||||
const driveService = new GASDriveService()
|
||||
|
||||
// Ensure folder exists and get ID
|
||||
const folder = driveService.getOrCreateFolder(sku, config.productPhotosFolderId)
|
||||
|
||||
// Generate Ticket
|
||||
return driveService.getResumableUploadUrl(filename, mimeType, folder.getId())
|
||||
}
|
||||
|
||||
// Deprecated (but kept for fallback/legacy small files if needed)
|
||||
export function saveFileToDrive(sku: string, filename: string, mimeType: string, base64Data: string) {
|
||||
console.warn("Using legacy saveFileToDrive (Base64). Consider using getUploadUrl.");
|
||||
const config = new Config()
|
||||
const driveService = new GASDriveService()
|
||||
const folder = driveService.getOrCreateFolder(sku, config.productPhotosFolderId)
|
||||
@ -406,3 +420,97 @@ export function checkPhotoSession(sessionId: string) {
|
||||
return { status: 'error', message: e.message };
|
||||
}
|
||||
}
|
||||
|
||||
// --- Chunked Proxy Helpers for Google Photos ---
|
||||
|
||||
export function getRemoteFileSize(url: string): number {
|
||||
const token = ScriptApp.getOAuthToken();
|
||||
const params = {
|
||||
method: 'get' as const,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Range: 'bytes=0-0'
|
||||
},
|
||||
muteHttpExceptions: true
|
||||
};
|
||||
|
||||
let response = UrlFetchApp.fetch(url, params);
|
||||
|
||||
|
||||
|
||||
if (response.getResponseCode() >= 400) {
|
||||
throw new Error(`Failed to get file size: ${response.getResponseCode()} ${response.getContentText()}`);
|
||||
}
|
||||
|
||||
const headers = response.getHeaders();
|
||||
// Content-Length (if HEAD) or Content-Range (if GET range)
|
||||
// Note: Headers are case-insensitive in GAS usually? But let's check safely.
|
||||
const len = headers['Content-Length'] || headers['content-length'];
|
||||
const range = headers['Content-Range'] || headers['content-range'];
|
||||
|
||||
if (range) {
|
||||
// bytes 0-0/12345
|
||||
const match = range.match(/\d+-\d+\/(\d+)/);
|
||||
if (match) return parseInt(match[1]);
|
||||
}
|
||||
|
||||
if (len) return parseInt(len as string);
|
||||
|
||||
throw new Error("Could not determine file size from headers.");
|
||||
}
|
||||
|
||||
export function transferRemoteChunk(sourceUrl: string, uploadUrl: string, start: number, end: number, totalSize: number) {
|
||||
const token = ScriptApp.getOAuthToken();
|
||||
|
||||
// 1. Fetch from Source (Google Photos)
|
||||
const getParams = {
|
||||
method: 'get' as const,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Range: `bytes=${start}-${end}`
|
||||
},
|
||||
muteHttpExceptions: true
|
||||
};
|
||||
|
||||
const sourceResponse = UrlFetchApp.fetch(sourceUrl, getParams);
|
||||
if (sourceResponse.getResponseCode() !== 200 && sourceResponse.getResponseCode() !== 206) {
|
||||
throw new Error(`Source fetch failed: ${sourceResponse.getResponseCode()} ${sourceResponse.getContentText()}`);
|
||||
}
|
||||
|
||||
// 2. Prepare Payload
|
||||
// Use getContent() to get raw bytes. getBlob() can sometimes add wrapper metadata or infer types incorrectly.
|
||||
let bytes = sourceResponse.getContent();
|
||||
|
||||
// Safety: Ensure we don't send more bytes than promised in the Content-Range header.
|
||||
// sometimes Range requests return more/different if server is quirky.
|
||||
const expectedSize = end - start + 1;
|
||||
if (bytes.length > expectedSize) {
|
||||
console.warn(`[transferRemoteChunk] Trimming bytes. Requested ${expectedSize}, got ${bytes.length}.`);
|
||||
bytes = bytes.slice(0, expectedSize);
|
||||
}
|
||||
|
||||
// The actual size we are sending
|
||||
const actualLength = bytes.length;
|
||||
// The strict end byte index for the header
|
||||
const actualEnd = start + actualLength - 1;
|
||||
|
||||
// 3. Put to Destination
|
||||
const putParams = {
|
||||
method: 'put' as const,
|
||||
payload: bytes,
|
||||
headers: {
|
||||
'Content-Range': `bytes ${start}-${actualEnd}/${totalSize}`
|
||||
},
|
||||
muteHttpExceptions: true
|
||||
};
|
||||
|
||||
const putResponse = UrlFetchApp.fetch(uploadUrl, putParams);
|
||||
|
||||
const code = putResponse.getResponseCode();
|
||||
if (code !== 308 && code !== 200 && code !== 201) {
|
||||
throw new Error(`Upload PUT failed: ${code} ${putResponse.getContentText()}`);
|
||||
}
|
||||
|
||||
// Return bytesUploaded so client can adjust if we were forced to send fewer bytes
|
||||
return { success: true, code: code, bytesUploaded: actualLength };
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user