11# Must included with 'json.ps1'
22function find_hash_in_rdf ([String ] $url , [String ] $basename ) {
3- $data = $null
3+ $xml = $null
44 try {
55 # Download and parse RDF XML file
66 $wc = New-Object Net.Webclient
77 $wc.Headers.Add (' Referer' , (strip_filename $url ))
88 $wc.Headers.Add (' User-Agent' , (Get-UserAgent ))
9- [xml ]$data = $wc.downloadstring ($url )
9+ $data = $wc.DownloadData ($url )
10+ [xml ]$xml = (Get-Encoding ($wc )).GetString($data )
1011 } catch [system.net.webexception ] {
1112 write-host -f darkred $_
1213 write-host -f darkred " URL $url is not valid"
1314 return $null
1415 }
1516
1617 # Find file content
17- $digest = $data .RDF.Content | Where-Object { [String ]$_.about -eq $basename }
18+ $digest = $xml .RDF.Content | Where-Object { [String ]$_.about -eq $basename }
1819
1920 return format_hash $digest.sha256
2021}
@@ -35,7 +36,8 @@ function find_hash_in_textfile([String] $url, [Hashtable] $substitutions, [Strin
3536 $wc = New-Object Net.Webclient
3637 $wc.Headers.Add (' Referer' , (strip_filename $url ))
3738 $wc.Headers.Add (' User-Agent' , (Get-UserAgent ))
38- $hashfile = $wc.downloadstring ($url )
39+ $data = $wc.DownloadData ($url )
40+ $hashfile = (Get-Encoding ($wc )).GetString($data )
3941 } catch [system.net.webexception ] {
4042 write-host -f darkred $_
4143 write-host -f darkred " URL $url is not valid"
@@ -88,7 +90,8 @@ function find_hash_in_json([String] $url, [Hashtable] $substitutions, [String] $
8890 $wc = New-Object Net.Webclient
8991 $wc.Headers.Add (' Referer' , (strip_filename $url ))
9092 $wc.Headers.Add (' User-Agent' , (Get-UserAgent ))
91- $json = $wc.downloadstring ($url )
93+ $data = $wc.DownloadData ($url )
94+ $json = (Get-Encoding ($wc )).GetString($data )
9295 } catch [system.net.webexception ] {
9396 write-host -f darkred $_
9497 write-host -f darkred " URL $url is not valid"
@@ -108,7 +111,8 @@ function find_hash_in_xml([String] $url, [Hashtable] $substitutions, [String] $x
108111 $wc = New-Object Net.Webclient
109112 $wc.Headers.Add (' Referer' , (strip_filename $url ))
110113 $wc.Headers.Add (' User-Agent' , (Get-UserAgent ))
111- $xml = [xml ]$wc.downloadstring ($url )
114+ $data = $wc.DownloadData ($url )
115+ $xml = [xml ]((Get-Encoding ($wc )).GetString($data ))
112116 } catch [system.net.webexception ] {
113117 write-host -f darkred $_
114118 write-host -f darkred " URL $url is not valid"
0 commit comments