The next adaption was to take the existing HTML report, turn it into a CSV and upload it to a central location. This CSV would then be manually copied into the master Excel document.
==============================================================
Add-PSSnapin VeeamPsSnapIn
$email = “someone@somewhere.com”
$jobs = Get-VBRJob
$now = Get-Date
$date = get-date -Format ddMMyy
$bckuwindow = $now.AddHours(-24)
$jobs = $jobs | Sort-Object -Property Name
$report = @()
foreach ($job in $jobs){
$name = $job.Name
$latrun = $job.ScheduleOptions.LatestRunlocal
$runin = $job.IsRunning
$laststat = $job.GetLastState()
$lastres = $job.GetLastResult()
$nextrun = $job.ScheduleOptions.NextRun
$enabled = $job.IsScheduleEnabled
if($runin -eq $True){
$status = “Job Running”
}
elseif ($latrun -gt $bckuwindow){
$status = “$($lastres) on $($latrun.ToShortDateString()) at $($latrun.ToShortTimeString())”
}
elseif ($nextrun -ne “”){
$nextruntd = [datetime]$nextrun
$status = “Next Run $($nextruntd.DayOfWeek) $($nextruntd.Day) at $($nextruntd.ToShortTimeString())”
}
elseif ($enabled -eq $false){
$status = “Job Disabled”
}
elseif ($nextrun -eq “”){
$status = “Job Not Scheduled”
}
else {
$status = “Error Loading Job Details – Investigate”
}
$report += New-Object psobject -Property @{Name=$name;Status=$status}
}
$Currentcheckfile = “C:\PSLogs\Job_List\Job_List_Current.txt”
$checkjobname = $($jobs).name
$checkimport = Get-Content -Path $currentcheckfile
Clear-Content -Path $Currentcheckfile
Add-content -Value $checkjobname -Path $Currentcheckfile
$compare = Compare-Object -ReferenceObject (Get-content -path $Currentcheckfile) -DifferenceObject $checkimport
$comparejoblist = foreach ($comparejob in $compare){
Switch ($comparejob.sideindicator){
“<=” {
$newjoblist = @{
‘Status:’ = ‘Job Added’
‘Name:’ = $comparejob.inputobject
}
Break
}
“=>” {
$newjoblist = @{
‘Status:’ = ‘Job Removed’
‘Name:’ = $comparejob.inputobject
}
Break
}
}
New-Object -TypeName PSObject -Property $newjoblist
}
$comparehtml = $comparejoblist | ConvertTo-Html -Title “Backup Log $Date” -PreContent “<br>”
$style = “<style>BODY{font-family: Arial; font-size: 10pt;}”
$style = $style + “TABLE{border: 1px solid black; border-collapse: collapse;}”
$style = $style + “TH{border: 1px solid black; background: #dddddd; padding: 5px; }”
$style = $style + “TD{border: 1px solid black; padding: 5px; }”
$style = $style + “</style> <center>”
$finalhtml = $report | Export-Csv “C:\PSLogs\$env:COMPUTERNAME – $date.csv”| ConvertTo-Html -Title “Backup Log $Date” -Head $style -PostContent $comparehtml
#Directory where to find reports to upload
$Dir= ‘C:\PSLogs\’
#Directory where to save uploaded reports
$saveDir = ‘C:\CSVLogs\’
#ftp server params
$ftp = ‘ftp://ftp.somewhere.com/’
$user = ‘username’
$pass = ‘password’
#Connect to ftp webclient
$webclient = New-Object System.Net.WebClient
$webclient.Credentials = New-Object System.Net.NetworkCredential($user,$pass)
#Search for reports in directory
foreach($item in (dir $Dir “*.csv”))
{
#Set default network status to 1
$onNetwork = “1”
#Get picture creation dateTime…
$pictureDateTime = (Get-ChildItem $item.fullName).CreationTime
#Convert dateTime to timeStamp
$pictureTimeStamp = (Get-Date $pictureDateTime).ToFileTime()
#Get actual timeStamp
$timeStamp = (Get-Date).ToFileTime()
#Get picture lifeTime
$pictureLifeTime = $timeStamp – $pictureTimeStamp
#We only treat reports that are fully written on the disk
#So, we put a 2 second delay to ensure even big reports have been fully wirtten in the disk
if($pictureLifeTime -gt “2”) {
#If upload fails, we set network status at 0
try{
$uri = New-Object System.Uri($ftp+$item.Name)
$webclient.UploadFile($uri, $item.FullName)
} catch [Exception] {
$onNetwork = “0”
write-host $_.Exception.Message;
}
#If upload succeeded, we do further actions
if($onNetwork -eq “1”){
“Copying $item…”
Copy-Item -path $item.fullName -destination $saveDir$item
“Deleting $item…”
Remove-Item $item.fullName
}
}
}
