feat: Implement Server-Side Chunked Transfer for Drive Uploads

- Implemented 'Client-Orchestrated, Server-Side Chunked Transfer' to bypass CORS and 50MB limits for Google Photos.
- Added 'getResumableUploadUrl' to GASDriveService for high-priority video processing.
- Refactored 'MediaManager.html' to orchestrate uploads using 'transferRemoteChunk' loop.
- Added 'getRemoteFileSize' and 'transferRemoteChunk' to 'mediaHandlers.ts'.
- Updated 'global.ts' to expose new backend functions.
This commit is contained in:
Ben Miller
2025-12-29 22:08:21 -07:00
parent bade8a3020
commit ebc1a39ce3
4 changed files with 315 additions and 30 deletions

View File

@ -1150,18 +1150,44 @@
},
handleFiles(fileList) {
Array.from(fileList).forEach(file => {
const reader = new FileReader();
reader.onload = (e) => {
const data = e.target.result.split(',')[1]; // Base64
if (fileList.length === 0) return;
google.script.run
.withSuccessHandler(() => {
this.loadMedia();
})
.saveFileToDrive(state.sku, file.name, file.type, data);
};
reader.readAsDataURL(file);
let processed = 0;
const total = fileList.length;
ui.setLoadingState(true);
ui.logStatus('upload', `Starting upload of ${total} files...`, 'info');
Array.from(fileList).forEach(file => {
// Request Upload Ticket
google.script.run
.withSuccessHandler(uploadUrl => {
ui.logStatus('upload', `Uploading ${file.name}...`, 'info');
const xhr = new XMLHttpRequest();
xhr.open('PUT', uploadUrl, true);
xhr.onload = () => {
if (xhr.status === 200 || xhr.status === 201) {
ui.logStatus('upload', `Uploaded ${file.name}`, 'success');
processed++;
if (processed === total) {
ui.logStatus('done', 'All uploads complete. Refreshing...', 'success');
this.loadMedia();
}
} else {
ui.logStatus('error', `Upload failed for ${file.name}: ${xhr.status}`, 'error');
}
};
xhr.onerror = () => {
ui.logStatus('error', `Network error uploading ${file.name}`, 'error');
};
// Determine mime from file object, default to octet-stream
xhr.setRequestHeader('Content-Type', file.type || 'application/octet-stream');
xhr.send(file);
})
.withFailureHandler(e => {
ui.logStatus('error', `Failed to initiate upload for ${file.name}: ${e.message}`, 'error');
})
.getUploadUrl(state.sku, file.name, file.type || 'application/octet-stream');
});
},
@ -1212,34 +1238,139 @@
processPhotoItems(items) {
let done = 0;
const total = items.length;
ui.logStatus('import', `Processing ${total} items from Google Photos...`, 'info');
items.forEach(item => {
console.log("[MediaManager] Processing Item:", JSON.stringify(item));
// The API returns nested 'mediaFile' object for actual file details
// Extract Info
const mediaFile = item.mediaFile || item;
const url = mediaFile.baseUrl || item.baseUrl;
const filename = mediaFile.filename || item.filename;
let filename = mediaFile.filename || item.filename || `photo_${Date.now()}.jpg`;
let mimeType = mediaFile.mimeType || item.mimeType;
console.log(`[MediaManager] Extracted: URL=${url ? 'Yes' : 'No'}, Mime=${mimeType}, Name=${filename}`);
// Force video mimeType if metadata indicates video (Critical for backend =dv param)
// Correction for Video Mime
if (item.mediaMetadata && item.mediaMetadata.video) {
console.log("[MediaManager] Metadata indicates VIDEO. Forcing video/mp4.");
mimeType = 'video/mp4';
if (!filename.endsWith('.mp4')) filename = filename.split('.')[0] + '.mp4';
}
google.script.run
.withSuccessHandler(() => {
done++;
if (done === items.length) {
ui.updatePhotoStatus("Done!");
controller.loadMedia();
setTimeout(() => ui.closePhotoSession(), 2000);
}
// Decide: Video vs Image URL parameter
let fetchUrl = url;
if (url.includes("googleusercontent.com")) {
// =dv for video download, =d for image download
if (mimeType.startsWith('video/')) {
if (!fetchUrl.includes('=dv')) fetchUrl += '=dv';
} else {
if (!fetchUrl.includes('=d')) fetchUrl += '=d';
}
}
// Helper: Upload Blob to Drive
const uploadBlob = (blob) => {
google.script.run
.withSuccessHandler(uploadUrl => {
const xhr = new XMLHttpRequest();
xhr.open('PUT', uploadUrl, true);
xhr.onload = () => {
if (xhr.status === 200 || xhr.status === 201) {
ui.logStatus('success', `Imported ${filename}`, 'success');
done++;
if (done === total) {
ui.updatePhotoStatus("Done!");
controller.loadMedia();
setTimeout(() => ui.closePhotoSession(), 2000);
}
} else {
ui.logStatus('error', `Upload failed for ${filename}: ${xhr.status}`, 'error');
}
};
xhr.onerror = () => ui.logStatus('error', `Network error uploading ${filename}`, 'error');
// Important: Use Blob's type
xhr.setRequestHeader('Content-Type', blob.type);
xhr.send(blob);
})
.withFailureHandler(e => ui.logStatus('error', `Ticket failed for ${filename}: ${e.message}`, 'error'))
.getUploadUrl(state.sku, filename, mimeType);
};
// 1. Try Client-Side Fetch (Direct Transfer)
console.log(`[MediaManager] Attempting client fetch for ${filename}`);
fetch(fetchUrl, {
headers: { 'Authorization': `Bearer ${state.token}` }
})
.then(res => {
if (!res.ok) throw new Error(`Client fetch failed: ${res.status}`);
return res.blob();
})
.importFromPicker(state.sku, null, mimeType, filename, url);
.then(blob => {
console.log(`[MediaManager] Client fetch success. Size: ${blob.size}`);
// Fix blob type if needed
const finalBlob = blob.slice(0, blob.size, mimeType);
uploadBlob(finalBlob);
})
.catch(err => {
console.warn(`[MediaManager] Client fetch failed (${err.message}). Switching to Server-Side Transfer.`);
// 2. Fallback: Server-Side Transfer (Client Orchestrated)
// This bypasses CORS and keeps data cloud-side (Photos -> Server -> Drive)
const CHUNK_SIZE = 10 * 1024 * 1024; // 10MB chunks (Safe for Server Transfer)
// Step A: Get Total Size from Server
google.script.run
.withSuccessHandler(totalSize => {
console.log(`[MediaManager] Remote size: ${totalSize}`);
// Step B: Get Resumable Upload Ticket
google.script.run
.withSuccessHandler(uploadUrl => {
let start = 0;
const transferNextChunk = () => {
if (start >= totalSize) {
// Done!
ui.logStatus('success', `Imported ${filename}`, 'success');
done++;
if (done === items.length) {
ui.updatePhotoStatus("Done!");
controller.loadMedia();
setTimeout(() => ui.closePhotoSession(), 2000);
}
return;
}
const end = Math.min(start + CHUNK_SIZE - 1, totalSize - 1);
ui.logStatus('import', `Transferring chunk ${Math.round(start / 1024 / 1024)}MB / ${Math.round(totalSize / 1024 / 1024)}MB...`, 'info');
// Step C: Trigger Server-Side Transfer
google.script.run
.withSuccessHandler(result => {
// Result { success: true, code: 308/200, bytesUploaded: number }
if (result.bytesUploaded) {
start = start + result.bytesUploaded; // Advance by ACTUAL amount
} else {
// Fallback for old API if needed, or if exact
start = end + 1;
}
transferNextChunk(); // Recurse
})
.withFailureHandler(e => ui.logStatus('error', `Transfer failed: ${e.message}`, 'error'))
.transferRemoteChunk(fetchUrl, uploadUrl, start, end, totalSize);
};
// Start Loop
transferNextChunk();
})
.withFailureHandler(e => ui.logStatus('error', `Ticket failed: ${e.message}`, 'error'))
.getUploadUrl(state.sku, filename, mimeType);
})
.withFailureHandler(e => {
ui.logStatus('error', `Cannot transfer ${filename}: ${e.message}`, 'error');
})
.getRemoteFileSize(fetchUrl);
});
});
},

View File

@ -23,7 +23,7 @@ import { fillProductFromTemplate } from "./fillProductFromTemplate"
import { showSidebar, getQueueStatus, setQueueEnabled, deleteEdit, pushEdit } from "./sidebar"
import { checkRecentSales, reconcileSalesHandler } from "./salesSync"
import { installSalesSyncTrigger } from "./triggers"
import { showMediaManager, getSelectedProductInfo, getMediaForSku, saveFileToDrive, saveMediaChanges, getMediaDiagnostics, getPickerConfig, importFromPicker, debugScopes, createPhotoSession, checkPhotoSession, debugFolderAccess, linkDriveFileToShopifyMedia } from "./mediaHandlers"
import { showMediaManager, getSelectedProductInfo, getMediaForSku, saveFileToDrive, saveMediaChanges, getMediaDiagnostics, getPickerConfig, importFromPicker, debugScopes, createPhotoSession, checkPhotoSession, debugFolderAccess, linkDriveFileToShopifyMedia, getUploadUrl, getRemoteFileSize, transferRemoteChunk } from "./mediaHandlers"
import { runSystemDiagnostics } from "./verificationSuite"
// prettier-ignore
@ -65,3 +65,8 @@ import { runSystemDiagnostics } from "./verificationSuite"
;(global as any).checkPhotoSession = checkPhotoSession
;(global as any).debugFolderAccess = debugFolderAccess
;(global as any).linkDriveFileToShopifyMedia = linkDriveFileToShopifyMedia
;(global as any).getUploadUrl = getUploadUrl
;(global as any).getRemoteFileSize = getRemoteFileSize
;(global as any).transferRemoteChunk = transferRemoteChunk

View File

@ -127,7 +127,21 @@ export function linkDriveFileToShopifyMedia(sku: string, driveId: string, shopif
return mediaService.linkDriveFileToShopifyMedia(sku, driveId, shopifyId)
}
// NEW: Resumable Upload Ticket
export function getUploadUrl(sku: string, filename: string, mimeType: string) {
const config = new Config()
const driveService = new GASDriveService()
// Ensure folder exists and get ID
const folder = driveService.getOrCreateFolder(sku, config.productPhotosFolderId)
// Generate Ticket
return driveService.getResumableUploadUrl(filename, mimeType, folder.getId())
}
// Deprecated (but kept for fallback/legacy small files if needed)
export function saveFileToDrive(sku: string, filename: string, mimeType: string, base64Data: string) {
console.warn("Using legacy saveFileToDrive (Base64). Consider using getUploadUrl.");
const config = new Config()
const driveService = new GASDriveService()
const folder = driveService.getOrCreateFolder(sku, config.productPhotosFolderId)
@ -406,3 +420,97 @@ export function checkPhotoSession(sessionId: string) {
return { status: 'error', message: e.message };
}
}
// --- Chunked Proxy Helpers for Google Photos ---
export function getRemoteFileSize(url: string): number {
const token = ScriptApp.getOAuthToken();
const params = {
method: 'get' as const,
headers: {
Authorization: `Bearer ${token}`,
Range: 'bytes=0-0'
},
muteHttpExceptions: true
};
let response = UrlFetchApp.fetch(url, params);
if (response.getResponseCode() >= 400) {
throw new Error(`Failed to get file size: ${response.getResponseCode()} ${response.getContentText()}`);
}
const headers = response.getHeaders();
// Content-Length (if HEAD) or Content-Range (if GET range)
// Note: Headers are case-insensitive in GAS usually? But let's check safely.
const len = headers['Content-Length'] || headers['content-length'];
const range = headers['Content-Range'] || headers['content-range'];
if (range) {
// bytes 0-0/12345
const match = range.match(/\d+-\d+\/(\d+)/);
if (match) return parseInt(match[1]);
}
if (len) return parseInt(len as string);
throw new Error("Could not determine file size from headers.");
}
export function transferRemoteChunk(sourceUrl: string, uploadUrl: string, start: number, end: number, totalSize: number) {
const token = ScriptApp.getOAuthToken();
// 1. Fetch from Source (Google Photos)
const getParams = {
method: 'get' as const,
headers: {
Authorization: `Bearer ${token}`,
Range: `bytes=${start}-${end}`
},
muteHttpExceptions: true
};
const sourceResponse = UrlFetchApp.fetch(sourceUrl, getParams);
if (sourceResponse.getResponseCode() !== 200 && sourceResponse.getResponseCode() !== 206) {
throw new Error(`Source fetch failed: ${sourceResponse.getResponseCode()} ${sourceResponse.getContentText()}`);
}
// 2. Prepare Payload
// Use getContent() to get raw bytes. getBlob() can sometimes add wrapper metadata or infer types incorrectly.
let bytes = sourceResponse.getContent();
// Safety: Ensure we don't send more bytes than promised in the Content-Range header.
// sometimes Range requests return more/different if server is quirky.
const expectedSize = end - start + 1;
if (bytes.length > expectedSize) {
console.warn(`[transferRemoteChunk] Trimming bytes. Requested ${expectedSize}, got ${bytes.length}.`);
bytes = bytes.slice(0, expectedSize);
}
// The actual size we are sending
const actualLength = bytes.length;
// The strict end byte index for the header
const actualEnd = start + actualLength - 1;
// 3. Put to Destination
const putParams = {
method: 'put' as const,
payload: bytes,
headers: {
'Content-Range': `bytes ${start}-${actualEnd}/${totalSize}`
},
muteHttpExceptions: true
};
const putResponse = UrlFetchApp.fetch(uploadUrl, putParams);
const code = putResponse.getResponseCode();
if (code !== 308 && code !== 200 && code !== 201) {
throw new Error(`Upload PUT failed: ${code} ${putResponse.getContentText()}`);
}
// Return bytesUploaded so client can adjust if we were forced to send fewer bytes
return { success: true, code: code, bytesUploaded: actualLength };
}

View File

@ -99,4 +99,45 @@ export class GASDriveService implements IDriveService {
return {}
}
}
getResumableUploadUrl(filename: string, mimeType: string, folderId: string): string {
const token = ScriptApp.getOAuthToken();
// Metadata for the file to be created
const metadata = {
name: filename,
mimeType: mimeType,
parents: [folderId]
};
const params = {
method: 'post' as const,
contentType: 'application/json',
headers: {
Authorization: `Bearer ${token}`
},
payload: JSON.stringify(metadata),
muteHttpExceptions: true
};
// We use the v3 endpoint for uploads universally as it's cleaner for resumable sessions
const response = UrlFetchApp.fetch(
'https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable',
params
);
if (response.getResponseCode() === 200) {
// The upload URL is in the 'Location' header
const headers = response.getHeaders();
// Headers can be case-insensitive, but Apps Script limits standardization.
// Usually 'Location' or 'location'.
const location = headers['Location'] || headers['location'];
if (!location) {
throw new Error("Resumable upload initiated but no Location header found.");
}
return location as string;
} else {
throw new Error(`Failed to initiate upload: ${response.getContentText()}`);
}
}
}