Backup Sharepoint Sites with Powershell

This will backup your sharepoint sites to a given path and delete any backups older than 3 days old.

param(
    $backupPath = $("You must enter a location to place the backup files")
)

$ver = $host | select version
if ($ver.Version.Major -gt 1)  {$Host.Runspace.ThreadOptions = "ReuseThread"}
Add-PsSnapin Microsoft.SharePoint.PowerShell

$fileDate = get-date -f yyyyMMdd
get-spwebapplication | get-spsite | %{$filePath = $backupPath + $_.URL.Replace("http://", "").Replace("/", "-") + "_" + $fileDate + ".bak" ; backup-spsite -identity $_.URL -path $filePath}

$files = get-childitem -path $backupPath | where{$_.Extension -eq ".bak"}
foreach($file in $files)
{
    if($file.CreationTime -lt (get-date).addDays(-3))
    {
        remove-item $file.FullName -force
    }
}

Access is Denied error when Crawling Sharepoint 2010

When trying to get search working correctly, the full crawl on our sharepoint2010 server kept coming back with “Access is denied. Check that the Default Content Access Account has access to this content, or add a crawl rule to crawl this content”.  This error is due to MS implementing a loopback check to prevent reflection attacks.  To disable the loopback check, do the following:

  1. Open Regedit
  2. Navigate to HKLM\CurrentControlSet\Control\Lsa
  3. Create a new DWord (32-bit) value called DisableLoopbackCheck and set its value to 1
  4. Kick off a crawl
  5. Profit $$$

Get items from sharepoint list

private ArrayList GetServerList()
{
ArrayList sNames = new ArrayList();
Guid id = new Guid(“46089E30-FCD1-45C9-9437-454754C5058D”);
SPSite SiteCol = new SPSite(id);
SPList splist = SiteCol.AllWebs[“”].Lists[“Servers”];

SPQuery oQuery = new SPQuery();
oQuery.Query = “” +
“True”;
SPListItemCollection listItems = splist.GetItems(oQuery);

foreach (SPListItem listitem in listItems)
{
sNames.Add(listitem[“ServerName”].ToString());
//string test = listitem[“ServerName”].ToString();
}
return sNames;
}