Subscribe:

Labels

Thursday, September 27, 2018

Adding Robots Text to all Site Collections


$Site=Get-SPSite http://abc.com          
       
         $spWebApp = $Site.WebApplication        
        #  write-host "Below is the list of all sitecollections for webapplication" + $spWebApp
         foreach($allsites in $spWebApp.Sites)
         {
            $siteurl = Get-SPSite $allsites.Url  
            
            New-Item c:\robots.txt -type file -force

          
           $list1 =   "Disallow: /en/pages/bill-pay.aspx"
              
           $list2 =   "Disallow: /en/pages/customer-services.aspx"

           $list3 =   "Disallow: /es/Paginas/bill-pay.aspx"
              
           $list4=    "Disallow: /es/Paginas/customer-services.aspx"
           
           $list5 =   "Disallow: /_layouts"
          
           $list6 =   "Disallow: /en/_layouts"
              
           $list7 =   "Disallow: /es/_layouts"

           $list8 =   "Disallow: /PressReleases/_layouts"
              
           $list9 =   "Disallow: /Search/_layouts"
                  
          # write-host $list
        Add-Content -Value "User-agent: *" -Path c:\robots.txt
        Add-Content -Value $list1 -Path c:\robots.txt
        Add-Content -Value $list2 -Path c:\robots.txt
        Add-Content -Value $list3 -Path c:\robots.txt
        Add-Content -Value $list4 -Path c:\robots.txt
        Add-Content -Value $list5 -Path c:\robots.txt
        Add-Content -Value $list6 -Path c:\robots.txt
        Add-Content -Value $list7 -Path c:\robots.txt
        Add-Content -Value $list8 -Path c:\robots.txt
        Add-Content -Value $list9 -Path c:\robots.txt

        $fileBytes = [system.io.file]::ReadAllBytes("c:\robots.txt");
        $siteurl.RootWeb.Files.Add("robots.txt", $fileBytes, $true);
    
         }    



No comments:

Post a Comment