Bug 1532902 - Check https certificates during update verify, r=sfraser
authorNick Thomas <nthomas@mozilla.com>
Wed, 06 Mar 2019 08:53:41 +0000
changeset 520445 332c017e85ec70c2f26c9a5bc3df39c6abebe3ca
parent 520444 4348e60c81ed02630b24c9dfa4b49cf5d0a2b696
child 520446 2632c70628d7f70dae6425e894cbaf9b9dc87e5f
push id10862
push userffxbld-merge
push dateMon, 11 Mar 2019 13:01:11 +0000
treeherdermozilla-beta@a2e7f5c935da [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfraser
bugs1532902
milestone67.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1532902 - Check https certificates during update verify, r=sfraser Differential Revision: https://phabricator.services.mozilla.com/D22267
tools/update-verify/release/common/cached_download.sh
tools/update-verify/release/common/download_mars.sh
tools/update-verify/release/get-update-xml.sh
tools/update-verify/release/test-mar-url.sh
tools/update-verify/release/updates/verify.sh
--- a/tools/update-verify/release/common/cached_download.sh
+++ b/tools/update-verify/release/common/cached_download.sh
@@ -24,17 +24,17 @@ cached_download () {
 
     if fgrep -x "${url}" "${cache_dir}/urls.list" >/dev/null; then
         echo "Retrieving '${url}' from cache..."
         local line_number="$(fgrep -nx  "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
         cp "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache" "${output_file}"
     else
         echo "Downloading '${url}' and placing in cache..."
         rm -f "${output_file}"
-        $retry wget -O "${output_file}" --progress=dot:mega --server-response --no-check-certificate "${url}" 2>&1
+        $retry wget -O "${output_file}" --progress=dot:mega --server-response "${url}" 2>&1
         local exit_code=$?
         if [ "${exit_code}" == 0 ]; then
             echo "${url}" >> "${cache_dir}/urls.list"
             local line_number="$(fgrep -nx  "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
             cp "${output_file}" "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache"
         else
             return "${exit_code}"
         fi
--- a/tools/update-verify/release/common/download_mars.sh
+++ b/tools/update-verify/release/common/download_mars.sh
@@ -67,17 +67,17 @@ download_mars () {
       fi
 
       command=`echo $line | sed -e 's/^.*<patch //' -e 's:/>.*$::' -e 's:\&amp;:\&:g'`
       eval "export $command"
 
       if [ "$test_only" == "1" ]
       then
         echo "Testing $URL"
-        curl -k -s -I -L $URL
+        curl -s -I -L $URL
         return
       else
         cached_download "update/${patch_type}.mar" "${URL}"
       fi
       if [ "$?" != 0 ]; then
         echo "Could not download $patch_type!"
         echo "from: $URL"
       fi
--- a/tools/update-verify/release/get-update-xml.sh
+++ b/tools/update-verify/release/get-update-xml.sh
@@ -1,16 +1,16 @@
 #!/bin/bash
 
 update_xml_url="${1}"
 patch_types="${2}"
 update_xml="$(mktemp -t update.xml.XXXXXXXXXX)"
 update_xml_headers="$(mktemp -t update.xml.headers.XXXXXXXXXX)"
 update_xml_debug="$(mktemp -t update.xml.debug.XXXXXXXXXX)"
-curl --retry 50 --retry-max-time 300 -k -s -D "${update_xml_headers}" -L -v "${update_xml_url}" > "${update_xml}" 2>"${update_xml_debug}"
+curl --retry 50 --retry-max-time 300 -s -D "${update_xml_headers}" -L -v "${update_xml_url}" > "${update_xml}" 2>"${update_xml_debug}"
 update_xml_curl_exit_code=$?
 if [ "${update_xml_curl_exit_code}" == 0 ]
 then
     update_xml_actual_url="$(sed -e "s/$(printf '\r')//" -n -e 's/^Location: //p' "${update_xml_headers}" | tail -1)"
     [ -n "${update_xml_actual_url}" ] && update_xml_url_with_redirects="${update_xml_url} => ${update_xml_actual_url}" || update_xml_url_with_redirects="${update_xml_url}"
     echo "$(date):  Downloaded update.xml file from ${update_xml_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
     for patch_type in ${patch_types//,/ }
     do  
--- a/tools/update-verify/release/test-mar-url.sh
+++ b/tools/update-verify/release/test-mar-url.sh
@@ -1,23 +1,23 @@
 #!/bin/bash
 mar_url="${1}"
 mar_required_size="${2}"
 
 mar_headers_file="$(mktemp -t mar_headers.XXXXXXXXXX)"
 mar_headers_debug_file="$(mktemp -t mar_headers_debug.XXXXXXXXXX)"
-curl --retry 50 --retry-max-time 300 -k -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+curl --retry 50 --retry-max-time 300 -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
 mar_file_curl_exit_code=$?
 
 # Bug 894368 - HTTP 408's are not handled by the "curl --retry" mechanism; in this case retry in bash
 attempts=1
 while [ "$((++attempts))" -lt 50 ] && grep 'HTTP/1\.1 408 Request Timeout' "${mar_headers_file}" &>/dev/null
 do
     sleep 1
-    curl --retry 50 --retry-max-time 300 -k -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+    curl --retry 50 --retry-max-time 300 -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
     mar_file_curl_exit_code=$?
 done
 
 # check file size matches what was written in update.xml
 # strip out dos line returns from header if they occur
 # note: below, using $(printf '\r') for Darwin compatibility, rather than simple '\r'
 # (i.e. shell interprets '\r' rather than sed interpretting '\r')
 mar_actual_size="$(sed -e "s/$(printf '\r')//" -n -e 's/^Content-Range: bytes 0-2\///ip' "${mar_headers_file}" | tail -1)"
--- a/tools/update-verify/release/updates/verify.sh
+++ b/tools/update-verify/release/updates/verify.sh
@@ -153,17 +153,17 @@ do
         fi
         if [ "$err" != "0" ]; then
           echo "FAIL: [${release} ${locale} ${patch_type}] download_mars returned non-zero exit code: ${err}"
           continue
         fi
       else
         mkdir -p updates/${update_path}/complete
         mkdir -p updates/${update_path}/partial
-        $retry wget --no-check-certificate -q -O ${patch_type} updates/${update_path}/${patch_type}/update.xml "${aus_server}/update/3/${update_path}/update.xml?force=1"
+        $retry wget -q -O ${patch_type} updates/${update_path}/${patch_type}/update.xml "${aus_server}/update/3/${update_path}/update.xml?force=1"
 
       fi
       if [ "$runmode" == "$COMPLETE" ]
       then
         if [ -z "$from" ] || [ -z "$to" ]
         then
           continue
         fi