// Jenkins Pipeline script to directly copy the zip file from an upstream job's workspace, // derive build identifier from the zip filename, and upload to Google Drive with retention. // Retention is based on the timestamp embedded in the filename and performed entirely in PowerShell. // Designed to run on a Windows agent. // Assumes rclone is installed and its directory is added to the system's PATH on the Windows agent. pipeline { // Agent should be your Windows VM agent agent { label 'Win10-BuildMachine' } // Replace with your Windows agent label if different // Removed parameters block as we don't need upstream job name/build number parameters anymore environment { GDRIVE_REMOTE = 'AzaionGoogleDrive:AzaionSuiteBuilds' // Your rclone remote name and path // Use a relative path within the workspace for the temporary directory on Windows // This temporary directory will hold the copied zip file before upload TMP_UPLOAD_DIR = 'temp_upload' // Path to rclone.conf on the Windows agent. // Adjust this path if your rclone.conf is located elsewhere on the Windows VM. // Using a relative path from the workspace root is often best practice. RCLONE_CONFIG = 'rclone.conf' // Assuming rclone.conf is in the workspace root // Define the FULL path to the upstream job's workspace on the SAME agent // This assumes both jobs run on the same agent and you know the workspace path structure UPSTREAM_WORKSPACE = 'C:\\Jenkins\\workspace\\AzaionSuite' // **Adjust this path if necessary** // Define the path to the zip file relative to the upstream workspace // Based on your description: C:\Jenkins\workspace\AzaionSuite\suite\AzaionSuite.1.4.5-YYYYMMDD-HHMMSS.zip // We'll use a wildcard to find the specific timestamped zip UPSTREAM_ZIP_PATH_RELATIVE = 'suite\\*.zip' // **Reverted filter to only look for .zip** // Number of latest files to keep on Google Drive FILES_TO_KEEP = 3 } stages { stage('Initialize') { steps { echo "Initializing workspace on Windows agent..." // Use standard Windows PowerShell for directory creation and cleanup // Ensure paths are quoted for safety powershell """ # Create temporary upload directory New-Item -ItemType Directory -Force -Path "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}" # Clean up previous temporary files in the upload directory Remove-Item -Recurse -Force "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}\\\\*" -ErrorAction SilentlyContinue """ } } // Removed the 'Copy Last Stable Artifact' stage stage('Copy and Upload Build') { // Combined stage for copying, finding name, and conditional upload steps { script { // Wrap steps in a script block echo "Starting Copy and Upload Build stage..." def upstreamZipFullPath = null def copiedZipFilename = null def copiedZipFilePath = null def expectedZipFilenameOnDrive = null // --- Find the full path of the zip file in the upstream workspace --- try { echo "Attempting to find the zip file in upstream workspace: ${env.UPSTREAM_WORKSPACE}\\\\${env.UPSTREAM_ZIP_PATH_RELATIVE}" // Use PowerShell to find the first file matching the pattern (.zip only) // Use -ErrorAction Stop to ensure the error is caught by Groovy try/catch // Ensure paths are quoted for safety upstreamZipFullPath = powershell(script: "Get-ChildItem -Path \"${env:UPSTREAM_WORKSPACE}\\\\${env:UPSTREAM_ZIP_PATH_RELATIVE}\" -ErrorAction Stop | Select-Object -First 1 -ExpandProperty FullName", returnStdout: true).trim() if (upstreamZipFullPath) { echo "Found upstream file: ${upstreamZipFullPath}" // Extract just the filename from the full path copiedZipFilename = upstreamZipFullPath.substring(upstreamZipFullPath.lastIndexOf('\\') + 1) echo "Derived filename: ${copiedZipFilename}" // DEBUG: Show the derived filename explicitly echo "DEBUG: Derived copiedZipFilename = '${copiedZipFilename}'" // --- Removed Workaround: Remove trailing .zip if it's .zip.zip --- copiedZipFilePath = "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}\\\\${copiedZipFilename}" // Path where it will be copied in THIS workspace expectedZipFilenameOnDrive = copiedZipFilename // The name on Google Drive is the same as the original filename } else { // Updated error message to reflect looking for .zip only error("No *.zip file found in the upstream workspace '${env.UPSTREAM_WORKSPACE}\\\\${env.UPSTREAM_ZIP_PATH_RELATIVE}'. Cannot proceed.") return // Exit script block safely } } catch (e) { echo "Error finding upstream file: ${e.message}" error("Failed to find the zip file in the upstream workspace.") return // Exit script block safely } // --- End Find upstream zip file name --- echo "Target copy path in this workspace: ${copiedZipFilePath}" echo "Expected zip filename on Google Drive: ${expectedZipFilenameOnDrive}" // --- Get list of existing zip files on Google Drive --- def existingZipFiles = [] try { echo "Checking for existing zip files on Google Drive: ${GDRIVE_REMOTE}..." // Use standard Windows PowerShell to execute rclone (now in PATH) // Ensure config path and remote path are quoted, and use backslashes for Windows paths def rcloneListCommand = """ rclone --config "${env:WORKSPACE}\\\\${RCLONE_CONFIG}" lsjson "${GDRIVE_REMOTE}" """ def rcloneListOutput = powershell(script: rcloneListCommand, returnStdout: true).trim() ?: '[]' if (!rcloneListOutput.startsWith('[') || !rcloneListOutput.endsWith(']')) { echo "Warning: rclone lsjson did not return a valid JSON array for existing files check. Output: ${rcloneListOutput}" // Continue, but assume no existing files to be safe existingZipFiles = [] } else { def allFilesJson = readJSON text: rcloneListOutput // Filter for zip files and ensure Name exists // Removed workaround application here existingZipFiles = allFilesJson.findAll { it.Name?.endsWith(".zip") }.collect { it.Name } // DEBUG: Print the exact list of existing zip files found (without workaround) echo "DEBUG: Exact list of existing zip files on Google Drive: ${existingZipFiles}" } } catch (e) { echo "An error occurred while checking existing files on Google Drive: ${e}" // Continue, but assume no existing files to be safe existingZipFiles = [] } // --- End getting existing files list --- // DEBUG: Print the exact expected zip filename being checked echo "DEBUG: Checking for existence of expected zip file: ${expectedZipFilenameOnDrive}" // --- Manual Check if the zip file already exists on Google Drive --- def fileExistsOnDrive = false for (existingFile in existingZipFiles) { // Compare the original filename to the list from Google Drive (without workaround) if (existingFile == expectedZipFilenameOnDrive) { fileExistsOnDrive = true break // Found a match, no need to check further } } // --- End Manual Check --- if (!fileExistsOnDrive) { // Use the result of the manual check // If we reach here, the zip file does NOT exist on Google Drive, so proceed with copying and uploading echo "Zip file ${expectedZipFilenameOnDrive} does not exist on Google Drive. Proceeding with copying and uploading." try { // --- Copy the zip file from the upstream workspace --- // Use the original upstreamZipFullPath for the source path echo "Copying zip file from '${upstreamZipFullPath}' to '${copiedZipFilePath}'..." // DEBUG: Show source and destination paths for Copy-Item echo "DEBUG: Copy-Item Source: '${upstreamZipFullPath}'" echo "DEBUG: Copy-Item Destination: '${copiedZipFilePath}'" // Use standard Windows PowerShell Copy-Item // Ensure paths are quoted for safety and use backslashes powershell "Copy-Item -Path \"${upstreamZipFullPath}\" -Destination \"${copiedZipFilePath}\" -Force" echo "Successfully copied zip file." // --- Upload the copied ZIP archive to Google Drive --- // Use the original filename for the upload source path echo "Starting upload of '${copiedZipFilename}' to ${GDRIVE_REMOTE}..." // DEBUG: Show source path for rclone copy echo "DEBUG: rclone copy Source: '${copiedZipFilePath}'" // Use standard Windows PowerShell to execute rclone (now in PATH) powershell """ rclone --config "${env:WORKSPACE}\\\\${RCLONE_CONFIG}" copy \"${copiedZipFilePath}\" \"${GDRIVE_REMOTE}\" """ echo "Finished uploading ${copiedZipFilename}." } catch (e) { echo "ERROR processing build (copy/upload): ${e}" // Consider adding a flag here to mark that at least one build failed processing // This could be checked in a later post block to decide if the overall build should be marked as unstable error("Failed to copy or upload build: ${e.message}") // Fail the stage on error } finally { // Clean up the copied zip file after upload attempt // Use standard Windows PowerShell Remove-Item echo "Cleaning up copied zip file: ${copiedZipFilePath}" powershell "Remove-Item -Force \"${copiedZipFilePath}\" -ErrorAction SilentlyContinue" } } else { // If the file DOES exist on Google Drive, print the skipping message echo "Skipping upload: ${expectedZipFilenameOnDrive} already exists on Google Drive." // No file was copied in this case, so no cleanup needed in finally } } // end script } // end steps } // end stage stage('Retention on Google Drive') { steps { script { // Wrap script logic in a script block echo "Starting Google Drive retention process (using PowerShell)..." // Ensure rclone is installed and in PATH on the Windows agent, // and the Jenkins agent user has read access to '${env:WORKSPACE}\\\\${RCLONE_CONFIG}'. // PowerShell script block for retention logic powershell """ \$rcloneRemote = "${env:GDRIVE_REMOTE}" \$rcloneConfig = "${env:WORKSPACE}\\\\${env:RCLONE_CONFIG}" \$filesToKeep = ${env.FILES_TO_KEEP} # Get list of files from Google Drive as JSON \$rcloneListOutput = rclone --config "\$rcloneConfig" lsjson "\$rcloneRemote" | Out-String # Parse JSON output # ConvertFrom-Json will throw an error if the input is not valid JSON, # which will cause the PowerShell step and the stage to fail. \$allFilesJson = \$rcloneListOutput | ConvertFrom-Json # Filter for zip files \$zipFiles = \$allFilesJson | Where-Object { \$_.Name -ne \$null -and \$_.Name.EndsWith(".zip") } Write-Host "Found \$(\$zipFiles.Count) total ZIP files on Google Drive." # --- Sorting of ZIP Files by Filename Timestamp (in PowerShell) --- Write-Host "Sorting ZIP files on Google Drive by filename timestamp (YYYYMMDD-HHMMSS)..." # Regex to extract the timestamp from the filename \$timestampRegex = '.*-(\\d{8}-\\d{6})\\.zip' # Sort the files by extracting the timestamp and converting to DateTime for accurate sorting # Sort-Object -Descending ensures newest are first \$sortedZipFiles = \$zipFiles | Sort-Object -Descending { \$name = \$_.Name \$match = [regex]::Match(\$name, \$timestampRegex) if (\$match.Success -and \$match.Groups.Count -gt 1) { \$timestampStr = \$match.Groups[1].Value # Attempt to parse the timestamp string into a DateTime object try { [DateTime]::ParseExact(\$timestampStr, "yyyyMMdd-HHmmss", \$null) } catch { Write-Host "Warning: Could not parse timestamp from filename '\$name': \$(\$_.Exception.Message)" # Handle parsing errors - treat as the oldest possible date (e.g., 1/1/0001) # This ensures unparseable dates are placed at the end (oldest) [DateTime]::MinValue } } else { Write-Host "Warning: Filename '\$name' does not match timestamp regex." # Handle non-matching filenames - treat as the oldest possible date [DateTime]::MinValue } } # --- End Sorting --- # DEBUG: Print the sorted list by filename timestamp with each file on a new line Write-Host "DEBUG: ZIP files on Google Drive sorted by filename timestamp (newest first):" \$sortedZipFiles | ForEach-Object { Write-Host \$_.Name } # Keep the latest N files, identify the rest for deletion if (\$sortedZipFiles.Count -gt \$filesToKeep) { # Select the files to delete (from index FILES_TO_KEEP to the end) \$filesToDelete = \$sortedZipFiles | Select-Object -Skip \$filesToKeep Write-Host "Applying retention: Keeping \$filesToKeep newest files, deleting \$(\$filesToDelete.Count) older files." # DEBUG: Print the list of files identified for deletion Write-Host "DEBUG: Files identified for deletion:" \$filesToDelete | ForEach-Object { Write-Host \$_.Name } # Loop through files to delete and execute rclone delete foreach (\$oldZipInfo in \$filesToDelete) { \$oldZipName = \$oldZipInfo.Name Write-Host "Deleting old ZIP from Google Drive: \$oldZipName" # Ensure filenames are quoted for safety, especially if they contain spaces # Use errorHanding: 'ignore' if you want to continue even if a delete fails rclone --config "\$rcloneConfig" delete "\$rcloneRemote/\$oldZipName" --drive-use-trash=false } } else { Write-Host "Retention check: Found \$(\$sortedZipFiles.Count) ZIP files, which is not more than \$filesToKeep. No files deleted." } """ // End PowerShell script block } // end script } // end steps } // end stage } // End of main 'stages' block post { always { script { // Wrap steps in a script block echo "Executing post-build cleanup..." // Use standard Windows PowerShell Remove-Item for cleanup // Quote TMP_UPLOAD_DIR path for safety and use backslashes for Windows paths powershell """ echo 'Cleaning up temporary upload directory: ${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}' Remove-Item -Recurse -Force "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}" -ErrorAction SilentlyContinue """ // Removed cleanup for copied artifacts directory as files are cleaned up individually now. } // end script } success { script { // Wrap steps in a script block echo "Pipeline finished successfully." // Add any success-specific notifications or actions here } // end script } failure { script { // Wrap steps in a script block echo "Pipeline failed. Check logs for details." // Add any failure-specific notifications or actions here } // end script } } // End of 'post' block } // End of 'pipeline' block