This repository has been archived on 2026-03-21. You can view files and clone it, but cannot push or open issues or pull requests.
payfrit-biz/api/setup/uploadSavedPage.cfm
John Mizerek ddaac523bf Add auto-cleanup of old temp extractions (>1 hour)
Security: Also added nginx rule on dev server to block CFM/PHP
execution in /temp/menu-import/ directory.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-13 07:40:10 -08:00

168 lines
6.2 KiB
Text

<cfsetting showdebugoutput="false">
<cfsetting enablecfoutputonly="true">
<cfcontent type="application/json; charset=utf-8" reset="true">
<cfheader name="Cache-Control" value="no-store">
<cfset response = structNew()>
<cfset response["OK"] = false>
<cfset response["MESSAGE"] = "">
<cfset response["URL"] = "">
<cftry>
<!--- Temp directory for extracted saved pages --->
<cfset tempBaseDir = expandPath("/temp/menu-import")>
<!--- Create temp directory if it doesn't exist --->
<cfif NOT directoryExists(tempBaseDir)>
<cfdirectory action="create" directory="#tempBaseDir#" mode="755">
</cfif>
<!--- Cleanup: delete folders older than 1 hour --->
<cftry>
<cfdirectory action="list" directory="#tempBaseDir#" name="oldFolders" type="dir">
<cfset oneHourAgo = dateAdd("h", -1, now())>
<cfloop query="oldFolders">
<cfif oldFolders.dateLastModified LT oneHourAgo AND oldFolders.name NEQ "." AND oldFolders.name NEQ "..">
<cftry>
<cfdirectory action="delete" directory="#tempBaseDir#/#oldFolders.name#" recurse="true">
<cfcatch></cfcatch>
</cftry>
</cfif>
</cfloop>
<cfcatch></cfcatch>
</cftry>
<!--- Check if ZIP file was uploaded --->
<cfif NOT structKeyExists(form, "zipFile") OR form.zipFile EQ "">
<cfset response["MESSAGE"] = "No ZIP file uploaded">
<cfoutput>#serializeJSON(response)#</cfoutput>
<cfabort>
</cfif>
<!--- Generate unique folder name --->
<cfset uniqueId = lCase(replace(createUUID(), "-", "", "all"))>
<cfset extractDir = "#tempBaseDir#/#uniqueId#">
<!--- Upload the ZIP file --->
<cffile action="upload"
filefield="zipFile"
destination="#tempBaseDir#/"
nameconflict="makeunique"
mode="644"
result="uploadResult">
<!--- Validate it's a ZIP file --->
<cfset uploadedFile = "#tempBaseDir#/#uploadResult.serverFile#">
<cfset fileExt = lCase(uploadResult.clientFileExt)>
<cfif fileExt NEQ "zip">
<cffile action="delete" file="#uploadedFile#">
<cfset response["MESSAGE"] = "Only ZIP files are accepted">
<cfoutput>#serializeJSON(response)#</cfoutput>
<cfabort>
</cfif>
<!--- Create extraction directory --->
<cfdirectory action="create" directory="#extractDir#" mode="755">
<!--- Extract the ZIP file --->
<cfzip action="unzip" file="#uploadedFile#" destination="#extractDir#" overwrite="true">
<!--- Delete the uploaded ZIP --->
<cffile action="delete" file="#uploadedFile#">
<!--- Find the main HTML file --->
<cfset htmlFile = "">
<cfset htmlFiles = []>
<!--- First, look for HTML files directly in the extract directory --->
<cfdirectory action="list" directory="#extractDir#" name="topFiles" filter="*.htm*" type="file">
<cfloop query="topFiles">
<cfset arrayAppend(htmlFiles, { "name": topFiles.name, "path": "#extractDir#/#topFiles.name#", "depth": 0 })>
</cfloop>
<!--- Also check one level deep (common for "Save Page As" which creates folder_files alongside .html) --->
<cfdirectory action="list" directory="#extractDir#" name="subDirs" type="dir">
<cfloop query="subDirs">
<cfif subDirs.name NEQ "." AND subDirs.name NEQ "..">
<cfset subDirPath = "#extractDir#/#subDirs.name#">
<cfdirectory action="list" directory="#subDirPath#" name="subFiles" filter="*.htm*" type="file">
<cfloop query="subFiles">
<cfset arrayAppend(htmlFiles, { "name": subFiles.name, "path": "#subDirPath#/#subFiles.name#", "depth": 1 })>
</cfloop>
</cfif>
</cfloop>
<!--- Find the best HTML file (prefer index.html, then top-level, then by size) --->
<cfif arrayLen(htmlFiles) EQ 0>
<!--- Clean up and error --->
<cfdirectory action="delete" directory="#extractDir#" recurse="true">
<cfset response["MESSAGE"] = "No HTML files found in ZIP">
<cfoutput>#serializeJSON(response)#</cfoutput>
<cfabort>
</cfif>
<!--- Priority: index.html at top level, then any index.html, then top-level html, then first found --->
<cfset foundFile = false>
<cfset htmlFile = {}>
<cfloop array="#htmlFiles#" index="hf">
<cfif lCase(hf.name) EQ "index.html" AND hf.depth EQ 0>
<cfset htmlFile = hf>
<cfset foundFile = true>
<cfbreak>
</cfif>
</cfloop>
<cfif NOT foundFile>
<cfloop array="#htmlFiles#" index="hf">
<cfif lCase(hf.name) EQ "index.html">
<cfset htmlFile = hf>
<cfset foundFile = true>
<cfbreak>
</cfif>
</cfloop>
</cfif>
<cfif NOT foundFile>
<cfloop array="#htmlFiles#" index="hf">
<cfif hf.depth EQ 0>
<cfset htmlFile = hf>
<cfset foundFile = true>
<cfbreak>
</cfif>
</cfloop>
</cfif>
<cfif NOT foundFile>
<cfset htmlFile = htmlFiles[1]>
</cfif>
<!--- Build the URL path --->
<cfset relativePath = replace(htmlFile.path, extractDir, "")>
<cfset relativePath = replace(relativePath, "\", "/", "all")>
<cfif left(relativePath, 1) NEQ "/">
<cfset relativePath = "/" & relativePath>
</cfif>
<!--- Determine the server hostname for the URL --->
<cfset serverHost = cgi.HTTP_HOST>
<cfset protocol = cgi.HTTPS EQ "on" ? "https" : "http">
<cfset response["OK"] = true>
<cfset response["MESSAGE"] = "ZIP extracted successfully">
<cfset response["URL"] = "#protocol#://#serverHost#/temp/menu-import/#uniqueId##relativePath#">
<cfset response["FOLDER"] = uniqueId>
<cfset response["FILE"] = htmlFile.name>
<cfset response["FILE_COUNT"] = arrayLen(htmlFiles)>
<cfoutput>#serializeJSON(response)#</cfoutput>
<cfcatch type="any">
<cfset response["OK"] = false>
<cfset response["MESSAGE"] = "Error: #cfcatch.message#">
<cfif len(cfcatch.detail)>
<cfset response["MESSAGE"] = response["MESSAGE"] & " - #cfcatch.detail#">
</cfif>
<cfoutput>#serializeJSON(response)#</cfoutput>
</cfcatch>
</cftry>