diff --git a/build/jenkins/GDriveUpload b/build/jenkins/GDriveUpload index 0c1c158..f4cf1b4 100644 --- a/build/jenkins/GDriveUpload +++ b/build/jenkins/GDriveUpload @@ -1,310 +1,134 @@ -// Jenkins Pipeline script to directly copy the zip file from an upstream job's workspace, -// derive build identifier from the zip filename, and upload to Google Drive with retention. -// Retention is based on the timestamp embedded in the filename and performed entirely in PowerShell. -// Designed to run on a Windows agent. -// Assumes rclone is installed and its directory is added to the system's PATH on the Windows agent. - +// This pipeline job is triggered by the main AzaionSuite build pipeline. +// It copies build artifacts, zips them, and then triggers the Google Drive upload pipeline. pipeline { - // Agent should be your Windows VM agent - agent { label 'Win10-BuildMachine' } // Replace with your Windows agent label if different - // Removed parameters block as we don't need upstream job name/build number parameters anymore + agent { label 'Win10-BuildMachine' } + + tools { + // Git tool might be needed if this Jenkinsfile is in SCM + git 'Default' + // dotnetsdk is not needed here as we only process artifacts + } environment { - GDRIVE_REMOTE = 'AzaionGoogleDrive:AzaionSuiteBuilds' // Your rclone remote name and path - // Use a relative path within the workspace for the temporary directory on Windows - // This temporary directory will hold the copied zip file before upload - TMP_UPLOAD_DIR = 'temp_upload' - // Path to rclone.conf on the Windows agent. - // Adjust this path if your rclone.conf is located elsewhere on the Windows VM. - // Using a relative path from the workspace root is often best practice. - RCLONE_CONFIG = 'rclone.conf' // Assuming rclone.conf is in the workspace root + // 7-Zip path (assuming default installation) + SEVEN_ZIP_PATH = "C:/Program Files/7-Zip" // Adjust if 7-Zip is installed elsewhere - // Define the FULL path to the upstream job's workspace on the SAME agent - // This assumes both jobs run on the same agent and you know the workspace path structure - UPSTREAM_WORKSPACE = 'C:\\Jenkins\\workspace\\AzaionSuite' // **Adjust this path if necessary** + // Set the PATH environment variable including 7-Zip + PATH = "${SEVEN_ZIP_PATH};${env.PATH}" // Add 7-Zip to PATH - // Define the path to the zip file relative to the upstream workspace - // Based on your description: C:\Jenkins\workspace\AzaionSuite\suite\AzaionSuite.1.4.5-YYYYMMDD-HHMMSS.zip - // We'll use a wildcard to find the specific timestamped zip - UPSTREAM_ZIP_PATH_RELATIVE = 'suite\\*.zip' // **Reverted filter to only look for .zip** - - // Number of latest files to keep on Google Drive - FILES_TO_KEEP = 3 + // Define the name of your existing Google Drive upload pipeline job + // ** IMPORTANT: Replace 'YourGoogleDriveUploadPipelineName' with the actual name ** + GOOGLE_DRIVE_UPLOAD_JOB_NAME = 'YourGoogleDriveUploadPipelineName' // <<== UPDATE THIS } stages { - stage('Initialize') { + stage('Copy Build Artifacts') { steps { - echo "Initializing workspace on Windows agent..." - - // Use standard Windows PowerShell for directory creation and cleanup - // Ensure paths are quoted for safety - powershell """ - # Create temporary upload directory - New-Item -ItemType Directory -Force -Path "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}" - - # Clean up previous temporary files in the upload directory - Remove-Item -Recurse -Force "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}\\\\*" -ErrorAction SilentlyContinue - """ + script { + echo "Starting 'Copy Build Artifacts' stage." + // Copy artifacts from the upstream build job + // Replace 'AzaionSuite' with the actual name of your main build pipeline job + copyArtifacts( + projectName: 'AzaionSuite', // <<== UPDATE THIS if your main build job name is different + selector: lastSuccessful(), // Copy from the last successful build + // Specify which files to copy. These should match the files produced by the build. + // Assuming artifacts are in the 'suite' directory relative to the upstream workspace root. + filter: 'suite/AzaionSuite*.exe, suite/AzaionSuite*.bin' + // By default, artifacts are copied to the current job's workspace root. + ) + echo "Artifacts copied successfully." + } } } - // Removed the 'Copy Last Stable Artifact' stage - - stage('Copy and Upload Build') { // Combined stage for copying, finding name, and conditional upload + stage('Archive Copied Artifacts (7-Zip)') { steps { - script { // Wrap steps in a script block - echo "Starting Copy and Upload Build stage..." + script { + echo "Starting 'Archive Copied Artifacts (7-Zip)' stage." + // The copied artifacts are now in the root of this job's workspace. + def artifactsDirectory = '.' // Artifacts are in the current workspace root + def version = '1.0.0' // Default version + def filesToArchive = [] + def exeFound = false - def upstreamZipFullPath = null - def copiedZipFilename = null - def copiedZipFilePath = null - def expectedZipFilenameOnDrive = null - - // --- Find the full path of the zip file in the upstream workspace --- - try { - echo "Attempting to find the zip file in upstream workspace: ${env.UPSTREAM_WORKSPACE}\\\\${env.UPSTREAM_ZIP_PATH_RELATIVE}" - // Use PowerShell to find the first file matching the pattern (.zip only) - // Use -ErrorAction Stop to ensure the error is caught by Groovy try/catch - // Ensure paths are quoted for safety - upstreamZipFullPath = powershell(script: "Get-ChildItem -Path \"${env:UPSTREAM_WORKSPACE}\\\\${env:UPSTREAM_ZIP_PATH_RELATIVE}\" -ErrorAction Stop | Select-Object -First 1 -ExpandProperty FullName", returnStdout: true).trim() - - if (upstreamZipFullPath) { - echo "Found upstream file: ${upstreamZipFullPath}" - // Extract just the filename from the full path - copiedZipFilename = upstreamZipFullPath.substring(upstreamZipFullPath.lastIndexOf('\\') + 1) - echo "Derived filename: ${copiedZipFilename}" - // DEBUG: Show the derived filename explicitly - echo "DEBUG: Derived copiedZipFilename = '${copiedZipFilename}'" - - // --- Removed Workaround: Remove trailing .zip if it's .zip.zip --- - copiedZipFilePath = "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}\\\\${copiedZipFilename}" // Path where it will be copied in THIS workspace - expectedZipFilenameOnDrive = copiedZipFilename // The name on Google Drive is the same as the original filename + // Find all relevant files in the current workspace root + def foundFiles = findFiles(glob: 'AzaionSuite*.exe') + findFiles(glob: 'AzaionSuite*.bin') + filesToArchive = foundFiles.collect { it.path } // Get list of paths relative to the workspace root + // --- Version Extraction (only from .exe if present) --- + def exeFiles = findFiles(glob: 'AzaionSuite*.exe') + if (exeFiles.size() > 0) { + exeFound = true + def exeFilePath = exeFiles[0].path // Use path from the first found exe + // Regex to find version like 1.2.3 or 1.2.3-4 followed by .exe + // Match against the full path (which is relative to workspace root here) + def matcher = (exeFilePath =~ /AzaionSuite(\d+\.\d+\.\d+(-\d+)?)\.exe/) + if (matcher.find()) { + version = matcher.group(1) + echo "Found version for archive: ${version}" } else { - // Updated error message to reflect looking for .zip only - error("No *.zip file found in the upstream workspace '${env.UPSTREAM_WORKSPACE}\\\\${env.UPSTREAM_ZIP_PATH_RELATIVE}'. Cannot proceed.") - return // Exit script block safely - } - } catch (e) { - echo "Error finding upstream file: ${e.message}" - error("Failed to find the zip file in the upstream workspace.") - return // Exit script block safely - } - // --- End Find upstream zip file name --- - - echo "Target copy path in this workspace: ${copiedZipFilePath}" - echo "Expected zip filename on Google Drive: ${expectedZipFilenameOnDrive}" - - - // --- Get list of existing zip files on Google Drive --- - def existingZipFiles = [] - try { - echo "Checking for existing zip files on Google Drive: ${GDRIVE_REMOTE}..." - // Use standard Windows PowerShell to execute rclone (now in PATH) - // Ensure config path and remote path are quoted, and use backslashes for Windows paths - def rcloneListCommand = """ - rclone --config "${env:WORKSPACE}\\\\${RCLONE_CONFIG}" lsjson "${GDRIVE_REMOTE}" - """ - def rcloneListOutput = powershell(script: rcloneListCommand, returnStdout: true).trim() ?: '[]' - - if (!rcloneListOutput.startsWith('[') || !rcloneListOutput.endsWith(']')) { - echo "Warning: rclone lsjson did not return a valid JSON array for existing files check. Output: ${rcloneListOutput}" - // Continue, but assume no existing files to be safe - existingZipFiles = [] - } else { - def allFilesJson = readJSON text: rcloneListOutput - // Filter for zip files and ensure Name exists - // Removed workaround application here - existingZipFiles = allFilesJson.findAll { it.Name?.endsWith(".zip") }.collect { it.Name } - // DEBUG: Print the exact list of existing zip files found (without workaround) - echo "DEBUG: Exact list of existing zip files on Google Drive: ${existingZipFiles}" - } - } catch (e) { - echo "An error occurred while checking existing files on Google Drive: ${e}" - // Continue, but assume no existing files to be safe - existingZipFiles = [] - } - // --- End getting existing files list --- - - // DEBUG: Print the exact expected zip filename being checked - echo "DEBUG: Checking for existence of expected zip file: ${expectedZipFilenameOnDrive}" - - // --- Manual Check if the zip file already exists on Google Drive --- - def fileExistsOnDrive = false - for (existingFile in existingZipFiles) { - // Compare the original filename to the list from Google Drive (without workaround) - if (existingFile == expectedZipFilenameOnDrive) { - fileExistsOnDrive = true - break // Found a match, no need to check further - } - } - // --- End Manual Check --- - - if (!fileExistsOnDrive) { // Use the result of the manual check - // If we reach here, the zip file does NOT exist on Google Drive, so proceed with copying and uploading - - echo "Zip file ${expectedZipFilenameOnDrive} does not exist on Google Drive. Proceeding with copying and uploading." - - try { - // --- Copy the zip file from the upstream workspace --- - // Use the original upstreamZipFullPath for the source path - echo "Copying zip file from '${upstreamZipFullPath}' to '${copiedZipFilePath}'..." - // DEBUG: Show source and destination paths for Copy-Item - echo "DEBUG: Copy-Item Source: '${upstreamZipFullPath}'" - echo "DEBUG: Copy-Item Destination: '${copiedZipFilePath}'" - // Use standard Windows PowerShell Copy-Item - // Ensure paths are quoted for safety and use backslashes - powershell "Copy-Item -Path \"${upstreamZipFullPath}\" -Destination \"${copiedZipFilePath}\" -Force" - echo "Successfully copied zip file." - - // --- Upload the copied ZIP archive to Google Drive --- - // Use the original filename for the upload source path - echo "Starting upload of '${copiedZipFilename}' to ${GDRIVE_REMOTE}..." - // DEBUG: Show source path for rclone copy - echo "DEBUG: rclone copy Source: '${copiedZipFilePath}'" - // Use standard Windows PowerShell to execute rclone (now in PATH) - powershell """ - rclone --config "${env:WORKSPACE}\\\\${RCLONE_CONFIG}" copy \"${copiedZipFilePath}\" \"${GDRIVE_REMOTE}\" - """ - echo "Finished uploading ${copiedZipFilename}." - - } catch (e) { - echo "ERROR processing build (copy/upload): ${e}" - // Consider adding a flag here to mark that at least one build failed processing - // This could be checked in a later post block to decide if the overall build should be marked as unstable - error("Failed to copy or upload build: ${e.message}") // Fail the stage on error - } finally { - // Clean up the copied zip file after upload attempt - // Use standard Windows PowerShell Remove-Item - echo "Cleaning up copied zip file: ${copiedZipFilePath}" - powershell "Remove-Item -Force \"${copiedZipFilePath}\" -ErrorAction SilentlyContinue" + echo "Warning: Could not extract version from '${exeFiles[0].name}'. Using default: ${version}" } } else { - // If the file DOES exist on Google Drive, print the skipping message - echo "Skipping upload: ${expectedZipFilenameOnDrive} already exists on Google Drive." - // No file was copied in this case, so no cleanup needed in finally + echo "Warning: No executable found to extract version for archive. Using default: ${version}" } - } // end script - } // end steps - } // end stage - stage('Retention on Google Drive') { - steps { - script { // Wrap script logic in a script block - echo "Starting Google Drive retention process (using PowerShell)..." - // Ensure rclone is installed and in PATH on the Windows agent, - // and the Jenkins agent user has read access to '${env:WORKSPACE}\\\\${RCLONE_CONFIG}'. + // --- Zipping Logic --- + if (filesToArchive.size() > 0) { + // Get current date and time in YYYYMMdd-HHMMSS format using PowerShell + // Using a separate bat call to ensure output is captured cleanly + def timestamp = bat( + script: 'powershell -Command "Get-Date -Format YYYYMMdd-HHmmss"', + returnStdout: true + ).trim() // Trim to remove potential newline characters - // PowerShell script block for retention logic - powershell """ - \$rcloneRemote = "${env:GDRIVE_REMOTE}" - \$rcloneConfig = "${env:WORKSPACE}\\\\${env:RCLONE_CONFIG}" - \$filesToKeep = ${env.FILES_TO_KEEP} + def zipFilename = "AzaionSuite.${version}-${timestamp}.zip" + // 7-Zip command requires quoting paths with spaces. + // We provide full paths relative to the workspace root. + def filesListString = filesToArchive.collect { "\"${it}\"" }.join(' ') // Quote each file path and join - # Get list of files from Google Drive as JSON - \$rcloneListOutput = rclone --config "\$rcloneConfig" lsjson "\$rcloneRemote" | Out-String + echo "Creating zip archive: ${zipFilename} using 7-Zip." + echo "Files to include (full paths): ${filesListString}" - # Parse JSON output - # ConvertFrom-Json will throw an error if the input is not valid JSON, - # which will cause the PowerShell step and the stage to fail. - \$allFilesJson = \$rcloneListOutput | ConvertFrom-Json + // Construct the full 7z command string in Groovy + def sevenZipCommand = "7z a -tzip \"${zipFilename}\" ${filesListString}" - # Filter for zip files - \$zipFiles = \$allFilesJson | Where-Object { \$_.Name -ne \$null -and \$_.Name.EndsWith(".zip") } + // Execute the constructed command string using a single bat step + bat """ + @echo off + echo Zipping files with 7-Zip... + ${sevenZipCommand} + if %errorlevel% neq 0 ( + echo Error creating zip archive with 7-Zip. 7z exit code: %errorlevel% + exit /b %errorlevel% + ) + echo Zip archive created successfully by 7-Zip. + """ - Write-Host "Found \$(\$zipFiles.Count) total ZIP files on Google Drive." + // Archive the created zip file using Jenkins built-in step + // This makes the zip available for the downstream Google Drive upload job + archiveArtifacts artifacts: "${zipFilename}", fingerprint: true + echo "Archive step completed." - # --- Sorting of ZIP Files by Filename Timestamp (in PowerShell) --- - Write-Host "Sorting ZIP files on Google Drive by filename timestamp (YYYYMMDD-HHMMSS)..." - - # Regex to extract the timestamp from the filename - \$timestampRegex = '.*-(\\d{8}-\\d{6})\\.zip' - - # Sort the files by extracting the timestamp and converting to DateTime for accurate sorting - # Sort-Object -Descending ensures newest are first - \$sortedZipFiles = \$zipFiles | Sort-Object -Descending { - \$name = \$_.Name - \$match = [regex]::Match(\$name, \$timestampRegex) - if (\$match.Success -and \$match.Groups.Count -gt 1) { - \$timestampStr = \$match.Groups[1].Value - # Attempt to parse the timestamp string into a DateTime object - try { - [DateTime]::ParseExact(\$timestampStr, "yyyyMMdd-HHmmss", \$null) - } catch { - Write-Host "Warning: Could not parse timestamp from filename '\$name': \$(\$_.Exception.Message)" - # Handle parsing errors - treat as the oldest possible date (e.g., 1/1/0001) - # This ensures unparseable dates are placed at the end (oldest) - [DateTime]::MinValue - } - } else { - Write-Host "Warning: Filename '\$name' does not match timestamp regex." - # Handle non-matching filenames - treat as the oldest possible date - [DateTime]::MinValue - } - } - # --- End Sorting --- - - # DEBUG: Print the sorted list by filename timestamp with each file on a new line - Write-Host "DEBUG: ZIP files on Google Drive sorted by filename timestamp (newest first):" - \$sortedZipFiles | ForEach-Object { Write-Host \$_.Name } - - - # Keep the latest N files, identify the rest for deletion - if (\$sortedZipFiles.Count -gt \$filesToKeep) { - # Select the files to delete (from index FILES_TO_KEEP to the end) - \$filesToDelete = \$sortedZipFiles | Select-Object -Skip \$filesToKeep - - Write-Host "Applying retention: Keeping \$filesToKeep newest files, deleting \$(\$filesToDelete.Count) older files." - # DEBUG: Print the list of files identified for deletion - Write-Host "DEBUG: Files identified for deletion:" - \$filesToDelete | ForEach-Object { Write-Host \$_.Name } - - - # Loop through files to delete and execute rclone delete - foreach (\$oldZipInfo in \$filesToDelete) { - \$oldZipName = \$oldZipInfo.Name - Write-Host "Deleting old ZIP from Google Drive: \$oldZipName" - # Ensure filenames are quoted for safety, especially if they contain spaces - # Use errorHanding: 'ignore' if you want to continue even if a delete fails - rclone --config "\$rcloneConfig" delete "\$rcloneRemote/\$oldZipName" --drive-use-trash=false - } - } else { - Write-Host "Retention check: Found \$(\$sortedZipFiles.Count) ZIP files, which is not more than \$filesToKeep. No files deleted." - } - """ // End PowerShell script block - } // end script - } // end steps - } // end stage - - } // End of main 'stages' block - - post { - always { - script { // Wrap steps in a script block - echo "Executing post-build cleanup..." - // Use standard Windows PowerShell Remove-Item for cleanup - // Quote TMP_UPLOAD_DIR path for safety and use backslashes for Windows paths - powershell """ - echo 'Cleaning up temporary upload directory: ${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}' - Remove-Item -Recurse -Force "${env:WORKSPACE}\\\\${env:TMP_UPLOAD_DIR}" -ErrorAction SilentlyContinue - """ - // Removed cleanup for copied artifacts directory as files are cleaned up individually now. - } // end script + } else { + error "No files (.exe or .bin) found in the copied artifacts to archive. Cannot create zip." + } + } + } } - success { - script { // Wrap steps in a script block - echo "Pipeline finished successfully." - // Add any success-specific notifications or actions here - } // end script - } - failure { - script { // Wrap steps in a script block - echo "Pipeline failed. Check logs for details." - // Add any failure-specific notifications or actions here - } // end script - } - } // End of 'post' block -} // End of 'pipeline' block + stage('Trigger Google Drive Upload') { + steps { + script { + echo "Triggering Google Drive Upload pipeline: ${env.GOOGLE_DRIVE_UPLOAD_JOB_NAME}" + // Trigger the Google Drive upload pipeline + // This assumes the Google Drive job is configured to copy artifacts + // from THIS job (the one creating the zip). + build job: env.GOOGLE_DRIVE_UPLOAD_JOB_NAME + } + } + } + } + // No post section needed for this job, post actions will be handled by the triggered job +}