| RemoveDuplicateLinks.ps1.txt ********************************** |
#New version of the detect and removal script:
# You put the affected site collections into a textfile -> sites.txt <- which is stored in the same path as the ps script
# It will check all sites of each site collection of the sites.txt file to see if there are duplicates in the QuickLaunch and/or TopNavigation (NEW!)
# Logging to a logfile commented out as it was not efficient (filling up disk space) …
# Tool will still output everything to the cmd prompt.
# THIS CODE-SAMPLE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR
# FITNESS FOR A PARTICULAR PURPOSE.
#
# This sample is not supported under any Microsoft standard support program or service.
# The script is provided AS IS without warranty of any kind. Microsoft further disclaims all
# implied warranties including, without limitation, any implied warranties of merchantability
# or of fitness for a particular purpose. The entire risk arising out of the use or performance
# of the sample and documentation remains with you. In no event shall Microsoft, its authors,
# or anyone else involved in the creation, production, or delivery of the script be liable for
# any damages whatsoever (including, without limitation, damages for loss of business profits,
# business interruption, loss of business information, or other pecuniary loss) arising out of
# the use of or inability to use the sample or documentation, even if Microsoft has been advised
# of the possibility of such damages.
################################################################################################
Add-PSSnapin Microsoft.SharePoint.PowerShell -ErrorAction SilentlyContinue
function tryDeleteNode
{
param
(
$node,$dictionary,$nodeCollection
)
$title = $node.Title
if(!$dictionary.ContainsKey($title))
{
$dictionary.Add($node.Title,$node.Url)
}
else
{
if($dictionary[$title] -eq $node.Url)
{
if($node.Children.Count -eq 0)
{
echo " -> Deleting Duplicate Node: $title"
$nodeCollection.Delete($node)
$global:didDelete= $true
$temp = (get-date).ToString() +";"+ ($site.Url) +";"+ ($title)
echo "$temp"
}
else
{
echo " -> Dupe Node $title has children, Skipping..."
}
}
else
{
echo " -> Duplicate title $title found, but mismatched link, Skipping..."
}
}
}
function deleteNodesRecurse
{
$nodes = @{}
foreach($node in $quickLaunch)
{
$childNodes = @{}
foreach($child in $node.Children)
{
tryDeleteNode -node $child -dictionary $childNodes -nodeCollection $node.Children
}
tryDeleteNode -node $node -dictionary $nodes -nodeCollection $quickLaunch
}
}
function deleteGlobalNodesRecurse
{
$nodes = @{}
foreach($node in $gnavNodes)
{
$childNodes = @{}
foreach($child in $node.Children)
{
tryDeleteNode -node $child -dictionary $childNodes -nodeCollection $node.Children
}
tryDeleteNode -node $node -dictionary $nodes -nodeCollection $gnavNodes
}
}
#$affectedSiteCollections = get-content sites.txt
#foreach ($sitecoll in $affectedSiteCollections)
#{
# $sitecoll = Get-SPSite "http://isgdev5:43007"
# write-host "SiteCollection: " $sitecoll.URL
# write-host "SiteCollection: " $sitecoll.URL
# foreach ($site in $sitecoll.AllWebs)
# {
$site = Get-SPWeb "http://portal.ibsplc.com/crgltr"
write-host " -> Site: " $site.URL
do
{
$quickLaunch = $site.Navigation.QuickLaunch
$global:didDelete = $false
deleteNodesRecurse
$pub= [Microsoft.SharePoint.Publishing.PublishingWeb]::GetPublishingWeb($site)
$gnavNodes = $pub.Navigation.GlobalNavigationNodes;
deleteGlobalNodesRecurse
}
while($global:didDelete)
$site.Dispose()
#}
# $sitecoll.Dispose()
#}