Save workspace changes
This commit is contained in:
@@ -0,0 +1,704 @@
|
||||
import { useCallback, useReducer, useRef } from 'react'
|
||||
import { emitUploadEvent } from '../../lib/uploadAnalytics'
|
||||
import * as uploadEndpoints from '../../lib/uploadEndpoints'
|
||||
import { mapUploadErrorNotice, mapUploadResultNotice } from '../../lib/uploadNotices'
|
||||
|
||||
// ─── Constants ──────────────────────────────────────────────────────────────
|
||||
const DEFAULT_CHUNK_SIZE_BYTES = 5 * 1024 * 1024
|
||||
const DEFAULT_CHUNK_REQUEST_TIMEOUT_MS = 45000
|
||||
const MIN_CHUNK_SIZE_BYTES = 256 * 1024
|
||||
const POLL_INTERVAL_MS = 2000
|
||||
|
||||
// ─── State machine ───────────────────────────────────────────────────────────
|
||||
export const machineStates = {
|
||||
idle: 'idle',
|
||||
initializing: 'initializing',
|
||||
uploading: 'uploading',
|
||||
finishing: 'finishing',
|
||||
processing: 'processing',
|
||||
ready_to_publish: 'ready_to_publish',
|
||||
publishing: 'publishing',
|
||||
complete: 'complete',
|
||||
error: 'error',
|
||||
cancelled: 'cancelled',
|
||||
}
|
||||
|
||||
const initialMachineState = {
|
||||
state: machineStates.idle,
|
||||
progress: 0,
|
||||
sessionId: null,
|
||||
uploadToken: null,
|
||||
processingStatus: null,
|
||||
isCancelling: false,
|
||||
error: '',
|
||||
lastAction: null,
|
||||
slug: null,
|
||||
}
|
||||
|
||||
function machineReducer(state, action) {
|
||||
switch (action.type) {
|
||||
case 'INIT_START':
|
||||
return { ...state, state: machineStates.initializing, progress: 0, error: '', isCancelling: false, lastAction: 'start' }
|
||||
case 'INIT_SUCCESS':
|
||||
return { ...state, sessionId: action.sessionId, uploadToken: action.uploadToken, error: '' }
|
||||
case 'UPLOAD_START':
|
||||
return { ...state, state: machineStates.uploading, progress: 1, error: '' }
|
||||
case 'UPLOAD_PROGRESS':
|
||||
return { ...state, progress: Math.max(1, Math.min(95, action.progress)), error: '' }
|
||||
case 'FINISH_START':
|
||||
return { ...state, state: machineStates.finishing, progress: Math.max(state.progress, 96), error: '' }
|
||||
case 'FINISH_SUCCESS':
|
||||
return { ...state, state: machineStates.processing, progress: 100, processingStatus: action.processingStatus ?? 'processing', error: '' }
|
||||
case 'PROCESSING_STATUS':
|
||||
return { ...state, processingStatus: action.processingStatus ?? state.processingStatus, error: '' }
|
||||
case 'READY_TO_PUBLISH':
|
||||
return { ...state, state: machineStates.ready_to_publish, processingStatus: 'ready', error: '' }
|
||||
case 'PUBLISH_START':
|
||||
return { ...state, state: machineStates.publishing, error: '', lastAction: 'publish' }
|
||||
case 'PUBLISH_SUCCESS':
|
||||
return { ...state, state: machineStates.complete, error: '', slug: action.slug ?? state.slug }
|
||||
case 'SCHEDULED':
|
||||
return { ...state, state: machineStates.complete, error: '', lastAction: 'schedule', slug: action.slug ?? state.slug }
|
||||
case 'CANCEL_START':
|
||||
return { ...state, isCancelling: true, error: '', lastAction: 'cancel' }
|
||||
case 'CANCELLED':
|
||||
return { ...state, state: machineStates.cancelled, isCancelling: false, error: '' }
|
||||
case 'ERROR':
|
||||
return { ...state, state: machineStates.error, isCancelling: false, error: action.error || 'Upload failed.' }
|
||||
case 'RESET_MACHINE':
|
||||
return { ...initialMachineState }
|
||||
default:
|
||||
return state
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
function toPercent(loaded, total) {
|
||||
if (!Number.isFinite(total) || total <= 0) return 0
|
||||
return Math.max(0, Math.min(100, Math.round((loaded / total) * 100)))
|
||||
}
|
||||
|
||||
function formatChunkSize(bytes) {
|
||||
if (!Number.isFinite(bytes) || bytes <= 0) return '0 KB'
|
||||
if (bytes >= 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(bytes % (1024 * 1024) === 0 ? 0 : 1)} MB`
|
||||
return `${Math.max(1, Math.round(bytes / 1024))} KB`
|
||||
}
|
||||
|
||||
function isRequestTooLarge(error) {
|
||||
return Number(error?.response?.status || 0) === 413
|
||||
}
|
||||
|
||||
function getProcessingValue(payload) {
|
||||
const direct = String(payload?.processing_state || payload?.status || '').toLowerCase()
|
||||
return direct || 'processing'
|
||||
}
|
||||
|
||||
export function isReadyToPublishStatus(status) {
|
||||
const normalized = String(status || '').toLowerCase()
|
||||
return ['ready', 'processed', 'publish_ready', 'published', 'complete'].includes(normalized)
|
||||
}
|
||||
|
||||
// ─── Hook ─────────────────────────────────────────────────────────────────────
|
||||
/**
|
||||
* useUploadMachine
|
||||
*
|
||||
* Manages the full upload state machine lifecycle:
|
||||
* init → chunk → finish → poll → publish
|
||||
*
|
||||
* @param {object} opts
|
||||
* @param {File|null} opts.primaryFile
|
||||
* @param {boolean} opts.canStartUpload
|
||||
* @param {string} opts.primaryType 'image' | 'archive' | 'unknown'
|
||||
* @param {boolean} opts.isArchive
|
||||
* @param {number|null} opts.initialDraftId
|
||||
* @param {object} opts.metadata { title, description, tags, rightsAccepted, ... }
|
||||
* @param {number} [opts.selectedScreenshotIndex]
|
||||
* @param {number} [opts.chunkSize]
|
||||
* @param {function} [opts.onArtworkCreated] called with artworkId after draft creation
|
||||
*/
|
||||
export default function useUploadMachine({
|
||||
primaryFile,
|
||||
screenshots = [],
|
||||
selectedScreenshotIndex = 0,
|
||||
canStartUpload,
|
||||
primaryType,
|
||||
isArchive,
|
||||
initialDraftId = null,
|
||||
metadata,
|
||||
chunkSize = DEFAULT_CHUNK_SIZE_BYTES,
|
||||
chunkRequestTimeoutMs = DEFAULT_CHUNK_REQUEST_TIMEOUT_MS,
|
||||
onArtworkCreated,
|
||||
onNotice,
|
||||
}) {
|
||||
const [machine, dispatchMachine] = useReducer(machineReducer, initialMachineState)
|
||||
|
||||
const pollingTimerRef = useRef(null)
|
||||
const requestControllersRef = useRef(new Set())
|
||||
const publishLockRef = useRef(false)
|
||||
const archiveSessionRef = useRef({ sessionId: null, uploadToken: null })
|
||||
const additionalScreenshotSessionsRef = useRef([])
|
||||
|
||||
// Resolved artwork id (draft) created at the start of the upload
|
||||
const resolvedArtworkIdRef = useRef(
|
||||
(() => {
|
||||
const parsed = Number(initialDraftId)
|
||||
return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : null
|
||||
})()
|
||||
)
|
||||
|
||||
const effectiveChunkSize = (() => {
|
||||
const parsed = Number(chunkSize)
|
||||
return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : DEFAULT_CHUNK_SIZE_BYTES
|
||||
})()
|
||||
const effectiveChunkRequestTimeoutMs = (() => {
|
||||
const parsed = Number(chunkRequestTimeoutMs)
|
||||
return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : DEFAULT_CHUNK_REQUEST_TIMEOUT_MS
|
||||
})()
|
||||
const adaptiveChunkSizeRef = useRef(effectiveChunkSize)
|
||||
|
||||
// ── Controller registry ────────────────────────────────────────────────────
|
||||
const registerController = useCallback(() => {
|
||||
const controller = new AbortController()
|
||||
requestControllersRef.current.add(controller)
|
||||
return controller
|
||||
}, [])
|
||||
|
||||
const unregisterController = useCallback((controller) => {
|
||||
if (!controller) return
|
||||
requestControllersRef.current.delete(controller)
|
||||
}, [])
|
||||
|
||||
const abortAllRequests = useCallback(() => {
|
||||
requestControllersRef.current.forEach((c) => c.abort())
|
||||
requestControllersRef.current.clear()
|
||||
}, [])
|
||||
|
||||
const clearArchiveSession = useCallback(() => {
|
||||
archiveSessionRef.current = { sessionId: null, uploadToken: null }
|
||||
}, [])
|
||||
|
||||
const setArchiveSession = useCallback((sessionId, uploadToken) => {
|
||||
archiveSessionRef.current = { sessionId, uploadToken }
|
||||
}, [])
|
||||
|
||||
const clearAdditionalScreenshotSessions = useCallback(() => {
|
||||
additionalScreenshotSessionsRef.current = []
|
||||
}, [])
|
||||
|
||||
const setAdditionalScreenshotSessions = useCallback((sessions) => {
|
||||
additionalScreenshotSessionsRef.current = Array.isArray(sessions) ? sessions : []
|
||||
}, [])
|
||||
|
||||
// ── Polling ────────────────────────────────────────────────────────────────
|
||||
const clearPolling = useCallback(() => {
|
||||
if (pollingTimerRef.current) {
|
||||
window.clearInterval(pollingTimerRef.current)
|
||||
pollingTimerRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
const fetchProcessingStatus = useCallback(async (sessionId, uploadToken, signal) => {
|
||||
const response = await window.axios.get(uploadEndpoints.status(sessionId), {
|
||||
signal,
|
||||
headers: uploadToken ? { 'X-Upload-Token': uploadToken } : undefined,
|
||||
params: uploadToken ? { upload_token: uploadToken } : undefined,
|
||||
})
|
||||
return response.data || {}
|
||||
}, [])
|
||||
|
||||
const pollProcessing = useCallback(async (sessionId, uploadToken) => {
|
||||
if (!sessionId) return
|
||||
try {
|
||||
const statusController = registerController()
|
||||
const payload = await fetchProcessingStatus(sessionId, uploadToken, statusController.signal)
|
||||
unregisterController(statusController)
|
||||
|
||||
const processingValue = getProcessingValue(payload)
|
||||
dispatchMachine({ type: 'PROCESSING_STATUS', processingStatus: processingValue })
|
||||
|
||||
if (isReadyToPublishStatus(processingValue)) {
|
||||
dispatchMachine({ type: 'READY_TO_PUBLISH' })
|
||||
clearPolling()
|
||||
} else if (processingValue === 'rejected' || processingValue === 'error' || payload?.failure_reason) {
|
||||
const failureMessage = payload?.failure_reason || payload?.message || `Processing ended with status: ${processingValue}`
|
||||
dispatchMachine({ type: 'ERROR', error: failureMessage })
|
||||
onNotice?.({ type: 'error', message: failureMessage })
|
||||
clearPolling()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error?.name === 'CanceledError' || error?.code === 'ERR_CANCELED') return
|
||||
const notice = mapUploadErrorNotice(error, 'Processing status check failed.')
|
||||
dispatchMachine({ type: 'ERROR', error: notice.message })
|
||||
onNotice?.(notice)
|
||||
emitUploadEvent('upload_error', { stage: 'processing_poll', message: notice.message })
|
||||
clearPolling()
|
||||
}
|
||||
}, [fetchProcessingStatus, registerController, unregisterController, clearPolling, onNotice])
|
||||
|
||||
const startPolling = useCallback((sessionId, uploadToken) => {
|
||||
clearPolling()
|
||||
pollProcessing(sessionId, uploadToken)
|
||||
pollingTimerRef.current = window.setInterval(() => {
|
||||
pollProcessing(sessionId, uploadToken)
|
||||
}, POLL_INTERVAL_MS)
|
||||
}, [clearPolling, pollProcessing])
|
||||
|
||||
const initUploadSession = useCallback(async () => {
|
||||
const initController = registerController()
|
||||
try {
|
||||
const initResponse = await window.axios.post(
|
||||
uploadEndpoints.init(),
|
||||
{ client: 'web' },
|
||||
{ signal: initController.signal }
|
||||
)
|
||||
|
||||
const sessionId = initResponse?.data?.session_id
|
||||
const uploadToken = initResponse?.data?.upload_token
|
||||
if (!sessionId || !uploadToken) {
|
||||
throw new Error('Upload session initialization returned an invalid payload.')
|
||||
}
|
||||
|
||||
return { sessionId, uploadToken }
|
||||
} finally {
|
||||
unregisterController(initController)
|
||||
}
|
||||
}, [registerController, unregisterController])
|
||||
|
||||
const uploadSingleFile = useCallback(async (sessionId, uploadToken, file, uploadedBaseBytes, combinedTotalBytes) => {
|
||||
let uploadedForFile = 0
|
||||
const totalSize = file.size
|
||||
|
||||
while (uploadedForFile < totalSize) {
|
||||
const activeChunkSize = Math.max(MIN_CHUNK_SIZE_BYTES, Number(adaptiveChunkSizeRef.current || effectiveChunkSize))
|
||||
const nextOffset = Math.min(uploadedForFile + activeChunkSize, totalSize)
|
||||
const blob = file.slice(uploadedForFile, nextOffset)
|
||||
|
||||
const payload = new FormData()
|
||||
payload.append('session_id', sessionId)
|
||||
payload.append('offset', String(uploadedForFile))
|
||||
payload.append('chunk_size', String(blob.size))
|
||||
payload.append('total_size', String(totalSize))
|
||||
payload.append('upload_token', uploadToken)
|
||||
payload.append('chunk', blob)
|
||||
|
||||
const chunkController = registerController()
|
||||
try {
|
||||
await window.axios.post(uploadEndpoints.chunk(), payload, {
|
||||
signal: chunkController.signal,
|
||||
timeout: effectiveChunkRequestTimeoutMs,
|
||||
headers: { 'X-Upload-Token': uploadToken },
|
||||
})
|
||||
} catch (error) {
|
||||
if (isRequestTooLarge(error) && activeChunkSize > MIN_CHUNK_SIZE_BYTES) {
|
||||
const nextChunkSize = Math.max(MIN_CHUNK_SIZE_BYTES, Math.floor(activeChunkSize / 2))
|
||||
if (nextChunkSize < activeChunkSize) {
|
||||
adaptiveChunkSizeRef.current = nextChunkSize
|
||||
onNotice?.({
|
||||
type: 'warning',
|
||||
message: `Server rejected ${formatChunkSize(activeChunkSize)} chunks. Retrying with ${formatChunkSize(nextChunkSize)} chunks.`,
|
||||
})
|
||||
continue
|
||||
}
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
unregisterController(chunkController)
|
||||
}
|
||||
|
||||
uploadedForFile = nextOffset
|
||||
const totalUploaded = uploadedBaseBytes + uploadedForFile
|
||||
const progress = Math.max(1, Math.min(95, toPercent(totalUploaded, combinedTotalBytes)))
|
||||
dispatchMachine({ type: 'UPLOAD_PROGRESS', progress })
|
||||
}
|
||||
|
||||
return uploadedBaseBytes + totalSize
|
||||
}, [effectiveChunkRequestTimeoutMs, effectiveChunkSize, onNotice, registerController, unregisterController])
|
||||
|
||||
const cancelUploadSession = useCallback(async (sessionId, uploadToken) => {
|
||||
if (!sessionId) return
|
||||
|
||||
await window.axios.post(
|
||||
uploadEndpoints.cancel(),
|
||||
{ session_id: sessionId, upload_token: uploadToken || undefined },
|
||||
{ headers: uploadToken ? { 'X-Upload-Token': uploadToken } : undefined }
|
||||
)
|
||||
}, [])
|
||||
|
||||
// ── Core upload flow ───────────────────────────────────────────────────────
|
||||
const runUploadFlow = useCallback(async () => {
|
||||
if (!primaryFile || !canStartUpload) return
|
||||
|
||||
const normalizedScreenshotIndex = Number.isFinite(selectedScreenshotIndex)
|
||||
? Math.max(0, Math.floor(selectedScreenshotIndex))
|
||||
: 0
|
||||
const previewFile = isArchive
|
||||
? (screenshots[normalizedScreenshotIndex] || screenshots[0] || null)
|
||||
: primaryFile
|
||||
const archiveFile = isArchive ? primaryFile : null
|
||||
const additionalScreenshotFiles = isArchive
|
||||
? screenshots.filter((file, index) => index !== normalizedScreenshotIndex && Boolean(file))
|
||||
: []
|
||||
if (!previewFile) {
|
||||
const message = isArchive
|
||||
? 'Archive uploads require at least one screenshot before upload can start.'
|
||||
: 'A preview image is required before upload can start.'
|
||||
dispatchMachine({ type: 'ERROR', error: message })
|
||||
onNotice?.({ type: 'error', message })
|
||||
return
|
||||
}
|
||||
|
||||
clearPolling()
|
||||
clearArchiveSession()
|
||||
clearAdditionalScreenshotSessions()
|
||||
dispatchMachine({ type: 'INIT_START' })
|
||||
emitUploadEvent('upload_start', {
|
||||
file_name: primaryFile.name,
|
||||
file_size: primaryFile.size,
|
||||
file_type: primaryType,
|
||||
is_archive: isArchive,
|
||||
})
|
||||
|
||||
let activePrimarySessionId = null
|
||||
let activePrimaryUploadToken = null
|
||||
let activeArchiveSessionId = null
|
||||
let activeArchiveUploadToken = null
|
||||
let activeAdditionalScreenshotSessions = []
|
||||
|
||||
try {
|
||||
// 1. Create or reuse the artwork draft
|
||||
let artworkIdForUpload = resolvedArtworkIdRef.current
|
||||
if (!artworkIdForUpload) {
|
||||
const titleSourceFile = archiveFile || primaryFile || previewFile
|
||||
const derivedTitle =
|
||||
String(metadata.title || '').trim() ||
|
||||
String(titleSourceFile?.name || '').replace(/\.[^.]+$/, '') ||
|
||||
'Untitled upload'
|
||||
|
||||
const draftResponse = await window.axios.post('/api/artworks', {
|
||||
title: derivedTitle,
|
||||
description: String(metadata.description || '').trim() || null,
|
||||
category: metadata.subCategoryId || metadata.rootCategoryId || null,
|
||||
tags: Array.isArray(metadata.tags) ? metadata.tags.join(', ') : '',
|
||||
license: Boolean(metadata.rightsAccepted),
|
||||
is_mature: Boolean(metadata.isMature),
|
||||
group: String(metadata.group || '').trim() || null,
|
||||
})
|
||||
|
||||
const draftIdCandidate = Number(draftResponse?.data?.artwork_id ?? draftResponse?.data?.id)
|
||||
if (!Number.isFinite(draftIdCandidate) || draftIdCandidate <= 0) {
|
||||
throw new Error('Unable to create upload draft before finishing upload.')
|
||||
}
|
||||
|
||||
artworkIdForUpload = Math.floor(draftIdCandidate)
|
||||
resolvedArtworkIdRef.current = artworkIdForUpload
|
||||
onArtworkCreated?.(artworkIdForUpload)
|
||||
}
|
||||
|
||||
// 2. Init upload session
|
||||
const { sessionId, uploadToken } = await initUploadSession()
|
||||
activePrimarySessionId = sessionId
|
||||
activePrimaryUploadToken = uploadToken
|
||||
|
||||
dispatchMachine({ type: 'INIT_SUCCESS', sessionId, uploadToken })
|
||||
dispatchMachine({ type: 'UPLOAD_START' })
|
||||
|
||||
// 3. Chunked upload
|
||||
const combinedTotalBytes = previewFile.size + (archiveFile?.size || 0) + additionalScreenshotFiles.reduce((sum, file) => sum + (file?.size || 0), 0)
|
||||
let uploadedBytes = await uploadSingleFile(sessionId, uploadToken, previewFile, 0, combinedTotalBytes)
|
||||
|
||||
let archiveSessionId = null
|
||||
let archiveUploadToken = null
|
||||
|
||||
if (archiveFile) {
|
||||
const archiveSession = await initUploadSession()
|
||||
archiveSessionId = archiveSession.sessionId
|
||||
archiveUploadToken = archiveSession.uploadToken
|
||||
activeArchiveSessionId = archiveSessionId
|
||||
activeArchiveUploadToken = archiveUploadToken
|
||||
setArchiveSession(archiveSessionId, archiveUploadToken)
|
||||
|
||||
uploadedBytes = await uploadSingleFile(archiveSessionId, archiveUploadToken, archiveFile, uploadedBytes, combinedTotalBytes)
|
||||
}
|
||||
|
||||
const additionalScreenshotSessions = []
|
||||
for (const screenshotFile of additionalScreenshotFiles) {
|
||||
const screenshotSession = await initUploadSession()
|
||||
additionalScreenshotSessions.push({
|
||||
sessionId: screenshotSession.sessionId,
|
||||
uploadToken: screenshotSession.uploadToken,
|
||||
fileName: String(screenshotFile?.name || ''),
|
||||
})
|
||||
activeAdditionalScreenshotSessions = additionalScreenshotSessions
|
||||
setAdditionalScreenshotSessions(additionalScreenshotSessions)
|
||||
|
||||
uploadedBytes = await uploadSingleFile(
|
||||
screenshotSession.sessionId,
|
||||
screenshotSession.uploadToken,
|
||||
screenshotFile,
|
||||
uploadedBytes,
|
||||
combinedTotalBytes,
|
||||
)
|
||||
}
|
||||
|
||||
// 4. Finish + start processing
|
||||
dispatchMachine({ type: 'FINISH_START' })
|
||||
|
||||
const finishController = registerController()
|
||||
const finishResponse = await window.axios.post(
|
||||
uploadEndpoints.finish(),
|
||||
{
|
||||
session_id: sessionId,
|
||||
upload_token: uploadToken,
|
||||
artwork_id: artworkIdForUpload,
|
||||
file_name: String(previewFile?.name || ''),
|
||||
archive_session_id: archiveSessionId,
|
||||
archive_file_name: archiveFile ? String(archiveFile?.name || '') : undefined,
|
||||
additional_screenshot_sessions: additionalScreenshotSessions.map((item) => ({
|
||||
session_id: item.sessionId,
|
||||
file_name: item.fileName,
|
||||
})),
|
||||
},
|
||||
{ signal: finishController.signal, headers: { 'X-Upload-Token': uploadToken } }
|
||||
)
|
||||
unregisterController(finishController)
|
||||
|
||||
const finishStatus = getProcessingValue(finishResponse?.data || {})
|
||||
dispatchMachine({ type: 'FINISH_SUCCESS', processingStatus: finishStatus })
|
||||
|
||||
const finishNotice = mapUploadResultNotice(finishResponse?.data || {}, {
|
||||
fallbackType: finishStatus === 'queued' ? 'warning' : 'success',
|
||||
fallbackMessage: finishStatus === 'queued'
|
||||
? 'Upload received. Processing is queued.'
|
||||
: 'Upload completed successfully.',
|
||||
})
|
||||
onNotice?.(finishNotice)
|
||||
|
||||
if (isReadyToPublishStatus(finishStatus)) {
|
||||
dispatchMachine({ type: 'READY_TO_PUBLISH' })
|
||||
} else {
|
||||
startPolling(sessionId, uploadToken)
|
||||
}
|
||||
|
||||
emitUploadEvent('upload_complete', { session_id: sessionId, artwork_id: artworkIdForUpload })
|
||||
} catch (error) {
|
||||
if (error?.name === 'CanceledError' || error?.code === 'ERR_CANCELED') return
|
||||
|
||||
await Promise.allSettled([
|
||||
cancelUploadSession(activePrimarySessionId, activePrimaryUploadToken),
|
||||
cancelUploadSession(activeArchiveSessionId, activeArchiveUploadToken),
|
||||
...activeAdditionalScreenshotSessions.map((item) => cancelUploadSession(item.sessionId, item.uploadToken)),
|
||||
])
|
||||
|
||||
const notice = mapUploadErrorNotice(error, 'Upload failed.')
|
||||
dispatchMachine({ type: 'ERROR', error: notice.message })
|
||||
onNotice?.(notice)
|
||||
emitUploadEvent('upload_error', { stage: 'upload_flow', message: notice.message })
|
||||
}
|
||||
}, [
|
||||
primaryFile,
|
||||
screenshots,
|
||||
selectedScreenshotIndex,
|
||||
canStartUpload,
|
||||
primaryType,
|
||||
isArchive,
|
||||
metadata,
|
||||
clearPolling,
|
||||
clearArchiveSession,
|
||||
startPolling,
|
||||
onArtworkCreated,
|
||||
onNotice,
|
||||
initUploadSession,
|
||||
uploadSingleFile,
|
||||
setArchiveSession,
|
||||
cancelUploadSession,
|
||||
])
|
||||
|
||||
// ── Cancel ─────────────────────────────────────────────────────────────────
|
||||
const handleCancel = useCallback(async () => {
|
||||
dispatchMachine({ type: 'CANCEL_START' })
|
||||
clearPolling()
|
||||
abortAllRequests()
|
||||
|
||||
try {
|
||||
const { sessionId, uploadToken } = machine
|
||||
const archiveSessionId = archiveSessionRef.current.sessionId
|
||||
const archiveUploadToken = archiveSessionRef.current.uploadToken
|
||||
const additionalScreenshotSessions = additionalScreenshotSessionsRef.current
|
||||
|
||||
await Promise.allSettled([
|
||||
cancelUploadSession(sessionId, uploadToken),
|
||||
cancelUploadSession(archiveSessionId, archiveUploadToken),
|
||||
...additionalScreenshotSessions.map((item) => cancelUploadSession(item.sessionId, item.uploadToken)),
|
||||
])
|
||||
|
||||
clearArchiveSession()
|
||||
clearAdditionalScreenshotSessions()
|
||||
dispatchMachine({ type: 'CANCELLED' })
|
||||
onNotice?.({ type: 'warning', message: 'Upload cancelled.' })
|
||||
emitUploadEvent('upload_cancel', { session_id: machine.sessionId || null })
|
||||
} catch (error) {
|
||||
const notice = mapUploadErrorNotice(error, 'Cancel failed.')
|
||||
dispatchMachine({ type: 'ERROR', error: notice.message })
|
||||
onNotice?.(notice)
|
||||
emitUploadEvent('upload_error', { stage: 'cancel', message: notice.message })
|
||||
}
|
||||
}, [machine, abortAllRequests, clearPolling, onNotice, cancelUploadSession, clearArchiveSession, clearAdditionalScreenshotSessions])
|
||||
|
||||
// ── Publish ────────────────────────────────────────────────────────────────
|
||||
/**
|
||||
* handlePublish
|
||||
*
|
||||
* @param {boolean} canPublish
|
||||
* @param {{ mode?: 'now'|'schedule', publishAt?: string|null, timezone?: string, visibility?: string, action?: 'publish'|'submit_review' }} [opts]
|
||||
*/
|
||||
const handlePublish = useCallback(async (canPublish, opts = {}) => {
|
||||
if (!canPublish || publishLockRef.current) return
|
||||
|
||||
publishLockRef.current = true
|
||||
dispatchMachine({ type: 'PUBLISH_START' })
|
||||
|
||||
const { mode = 'now', publishAt = null, timezone = null, visibility = 'public', action = 'publish' } = opts
|
||||
|
||||
const resolvedCategoryId = metadata.subCategoryId || metadata.rootCategoryId || null
|
||||
|
||||
const buildPayload = () => ({
|
||||
title: String(metadata.title || '').trim() || undefined,
|
||||
description: String(metadata.description || '').trim() || null,
|
||||
category: resolvedCategoryId ? String(resolvedCategoryId) : null,
|
||||
tags: Array.isArray(metadata.tags) ? metadata.tags : [],
|
||||
is_mature: Boolean(metadata.isMature),
|
||||
group: String(metadata.group || '').trim() || null,
|
||||
primary_author_user_id: metadata.primaryAuthorUserId ? Number(metadata.primaryAuthorUserId) : null,
|
||||
contributor_user_ids: Array.isArray(metadata.contributorUserIds) ? metadata.contributorUserIds.map((id) => Number(id)).filter((id) => Number.isFinite(id) && id > 0) : [],
|
||||
contributor_credits: Array.isArray(metadata.contributorUserIds)
|
||||
? metadata.contributorUserIds
|
||||
.map((id) => Number(id))
|
||||
.filter((id) => Number.isFinite(id) && id > 0)
|
||||
.map((id) => {
|
||||
const creditMeta = metadata.contributorCredits?.[id] || metadata.contributorCredits?.[String(id)] || {}
|
||||
|
||||
return {
|
||||
user_id: id,
|
||||
credit_role: typeof creditMeta.creditRole === 'string' && creditMeta.creditRole.trim() !== '' ? creditMeta.creditRole.trim() : null,
|
||||
is_primary: Boolean(creditMeta.isPrimary),
|
||||
}
|
||||
})
|
||||
: [],
|
||||
mode,
|
||||
...(mode === 'schedule' && publishAt ? { publish_at: publishAt } : {}),
|
||||
...(timezone ? { timezone } : {}),
|
||||
visibility,
|
||||
})
|
||||
|
||||
try {
|
||||
const publishTargetId =
|
||||
resolvedArtworkIdRef.current || initialDraftId || machine.sessionId
|
||||
|
||||
if (resolvedArtworkIdRef.current && resolvedArtworkIdRef.current > 0) {
|
||||
const publishController = registerController()
|
||||
const publishRes = await window.axios.post(
|
||||
action === 'submit_review'
|
||||
? uploadEndpoints.submitReview(String(resolvedArtworkIdRef.current))
|
||||
: uploadEndpoints.publish(String(resolvedArtworkIdRef.current)),
|
||||
buildPayload(),
|
||||
{ signal: publishController.signal }
|
||||
)
|
||||
unregisterController(publishController)
|
||||
const publishedSlug = publishRes?.data?.slug ?? null
|
||||
dispatchMachine({ type: mode === 'schedule' ? 'SCHEDULED' : 'PUBLISH_SUCCESS', slug: publishedSlug })
|
||||
onNotice?.(mapUploadResultNotice(publishRes?.data || {}, {
|
||||
fallbackType: 'success',
|
||||
fallbackMessage: action === 'submit_review'
|
||||
? 'Artwork submitted for group review.'
|
||||
: mode === 'schedule'
|
||||
? 'Artwork scheduled successfully.'
|
||||
: 'Artwork published successfully.',
|
||||
}))
|
||||
emitUploadEvent(action === 'submit_review' ? 'upload_submit_review' : 'upload_publish', { id: publishTargetId, mode })
|
||||
return
|
||||
}
|
||||
|
||||
if (!publishTargetId) throw new Error('Missing publish id.')
|
||||
|
||||
const publishController = registerController()
|
||||
const publishRes2 = await window.axios.post(
|
||||
action === 'submit_review'
|
||||
? uploadEndpoints.submitReview(publishTargetId)
|
||||
: uploadEndpoints.publish(publishTargetId),
|
||||
buildPayload(),
|
||||
{ signal: publishController.signal }
|
||||
)
|
||||
unregisterController(publishController)
|
||||
const publishedSlug2 = publishRes2?.data?.slug ?? null
|
||||
dispatchMachine({ type: mode === 'schedule' ? 'SCHEDULED' : 'PUBLISH_SUCCESS', slug: publishedSlug2 })
|
||||
onNotice?.(mapUploadResultNotice(publishRes2?.data || {}, {
|
||||
fallbackType: 'success',
|
||||
fallbackMessage: action === 'submit_review'
|
||||
? 'Artwork submitted for group review.'
|
||||
: mode === 'schedule'
|
||||
? 'Artwork scheduled successfully.'
|
||||
: 'Artwork published successfully.',
|
||||
}))
|
||||
emitUploadEvent(action === 'submit_review' ? 'upload_submit_review' : 'upload_publish', { id: publishTargetId, mode })
|
||||
} catch (error) {
|
||||
if (error?.name === 'CanceledError' || error?.code === 'ERR_CANCELED') return
|
||||
const notice = mapUploadErrorNotice(error, 'Publish failed.')
|
||||
dispatchMachine({ type: 'ERROR', error: notice.message })
|
||||
onNotice?.(notice)
|
||||
emitUploadEvent('upload_error', { stage: 'publish', message: notice.message })
|
||||
} finally {
|
||||
publishLockRef.current = false
|
||||
}
|
||||
}, [machine, initialDraftId, metadata, registerController, unregisterController, onNotice])
|
||||
|
||||
// ── Reset ──────────────────────────────────────────────────────────────────
|
||||
const resetMachine = useCallback(() => {
|
||||
clearPolling()
|
||||
abortAllRequests()
|
||||
resolvedArtworkIdRef.current = (() => {
|
||||
const parsed = Number(initialDraftId)
|
||||
return Number.isFinite(parsed) && parsed > 0 ? Math.floor(parsed) : null
|
||||
})()
|
||||
clearArchiveSession()
|
||||
clearAdditionalScreenshotSessions()
|
||||
publishLockRef.current = false
|
||||
dispatchMachine({ type: 'RESET_MACHINE' })
|
||||
}, [clearPolling, abortAllRequests, initialDraftId, clearArchiveSession, clearAdditionalScreenshotSessions])
|
||||
|
||||
// ── Retry ──────────────────────────────────────────────────────────────────
|
||||
/**
|
||||
* handleRetry
|
||||
*
|
||||
* Re-attempts the last action. When the last action was a publish/schedule,
|
||||
* opts must be forwarded so scheduled-publish options are not lost on retry.
|
||||
*
|
||||
* @param {boolean} canPublish
|
||||
* @param {{ mode?: string, publishAt?: string|null, timezone?: string, visibility?: string }} [opts]
|
||||
*/
|
||||
const handleRetry = useCallback((canPublish, opts = {}) => {
|
||||
clearPolling()
|
||||
abortAllRequests()
|
||||
if (machine.lastAction === 'publish') {
|
||||
handlePublish(canPublish, opts)
|
||||
return
|
||||
}
|
||||
runUploadFlow()
|
||||
}, [machine.lastAction, handlePublish, runUploadFlow, clearPolling, abortAllRequests])
|
||||
|
||||
// ── Cleanup on unmount ─────────────────────────────────────────────────────
|
||||
// (callers should call resetMachine or abortAllRequests on unmount if needed)
|
||||
|
||||
return {
|
||||
machine,
|
||||
dispatchMachine,
|
||||
resolvedArtworkId: resolvedArtworkIdRef.current,
|
||||
runUploadFlow,
|
||||
handleCancel,
|
||||
handlePublish,
|
||||
handleRetry,
|
||||
resetMachine,
|
||||
clearPolling,
|
||||
abortAllRequests,
|
||||
startPolling,
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user