|
- #!/usr/bin/env luajit
-
- local now = os.time()
-
- local APT = require 'apt-panopticommon'
- local D = APT.D
- local I = APT.I
- local T = APT.T
- local W = APT.W
- local E = APT.E
- local C = APT.C
- local arg, sendArgs = APT.parseArgs({...})
- APT.html = true
-
-
- local defaultURL = {scheme = "http"}
- local releases = {"jessie", "ascii", "beowulf", "ceres"}
- local releaseFiles =
- {
- -- Release file.
- "Release", -- 3.7 MB
- "Release.gpg", --
- -- "InRelease", -- 3.7 MB
- -- "main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently.
- -- Contents files. -- 3.3 GB
- -- "main/Contents-all.xz",
- -- "main/Contents-amd64.xz",
- -- "main/Contents-arm64.xz",
- -- "-security/main/Contents-all.xz",
- -- "-security/main/Contents-amd64.xz",
- -- "-security/main/Contents-arm64.xz",
- }
- local notExist =
- {
- "ceres-security" -- This will never exist, it's our code name for the testing suite.
- }
- local referenceDebs =
- {
- -- Debian package.
- "merged/pool/DEBIAN/main/d/debian-keyring/debian-keyring_2019.02.25_all.deb",
- -- Debian security package. NOTE this one should always be redirected?
- "merged/pool/DEBIAN-SECURITY/updates/main/a/apt/apt-transport-https_1.4.10_amd64.deb",
- }
- local referenceDevs =
- {
- -- Devuan package. NOTE this one should not get redirected, but that's more a warning than an error.
- "merged/pool/DEVUAN/main/d/devuan-keyring/devuan-keyring_2017.10.03_all.deb",
- }
-
- local curlStatus =
- {
- [1 ] = "Unsupported protocol. This build of curl has no support for this protocol.",
- [2 ] = "Failed to initialize.",
- [3 ] = "URL malformed. The syntax was not correct.",
- [4 ] = "A feature or option that was needed to perform the desired request was not enabled or was explicitly disabled at build-time. To make curl able to do this, you probably need another build of libcurl!",
- [5 ] = "Couldn't resolve proxy. The given proxy host could not be resolved.",
- [6 ] = "Couldn't resolve host. The given remote host was not resolved.",
- [7 ] = "Failed to connect to host.",
- [8 ] = "Weird server reply. The server sent data curl couldn't parse.",
- [9 ] = "FTP access denied. The server denied login or denied access to the particular resource or directory you wanted to reach. Most often you tried to change to a directory that doesn't exist on the server.",
- [10] = "While waiting for the server to connect back when an active FTP session is used, an error code was sent over the control connection or similar.",
- [11] = "FTP weird PASS reply. Curl couldn't parse the reply sent to the PASS request.",
- [12] = "During an active FTP session while waiting for the server to connect, the CURLOPT_ACCEPTTIMEOUT_MS (or the internal default) timeout expired.",
- [13] = "FTP weird PASV reply, Curl couldn't parse the reply sent to the PASV request.",
- [14] = "FTP weird 227 format. Curl couldn't parse the 227-line the server sent.",
- [15] = "FTP can't get host. Couldn't resolve the host IP we got in the 227-line.",
- [16] = "A problem was detected in the HTTP2 framing layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
- [17] = "FTP couldn't set binary. Couldn't change transfer method to binary.",
- [18] = "Partial file. Only a part of the file was transferred.",
- [19] = "FTP couldn't download/access the given file, the RETR (or similar) command failed.",
-
- [21] = "FTP quote error. A quote command returned error from the server.",
- [22] = "HTTP page not retrieved. The requested url was not found or returned another error with the HTTP error code being 400 or above. This return code only appears if -f, --fail is used.",
- [23] = "Write error. Curl couldn't write data to a local filesystem or similar.",
-
- [25] = "FTP couldn't STOR file. The server denied the STOR operation, used for FTP uploading.",
- [26] = "Read error. Various reading problems.",
- [27] = "Out of memory. A memory allocation request failed.",
- [28] = "Operation timeout. The specified time-out period was reached according to the conditions.",
-
- [30] = "FTP PORT failed. The PORT command failed. Not all FTP servers support the PORT command, try doing a transfer using PASV instead!",
- [31] = "FTP couldn't use REST. The REST command failed. This command is used for resumed FTP transfers.",
-
- [33] = "HTTP range error. The range \"command\" didn't work.",
- [34] = "HTTP post error. Internal post-request generation error.",
- [35] = "SSL connect error. The SSL handshaking failed.",
- [36] = "FTP bad download resume. Couldn't continue an earlier aborted download.",
- [37] = "FILE couldn't read file. Failed to open the file. Permissions?",
- [38] = "LDAP cannot bind. LDAP bind operation failed.",
- [39] = "LDAP search failed.",
-
- [41] = "Function not found. A required LDAP function was not found.",
- [42] = "Aborted by callback. An application told curl to abort the operation.",
- [43] = "Internal error. A function was called with a bad parameter.",
-
- [45] = "Interface error. A specified outgoing interface could not be used.",
-
- [47] = "Too many redirects. When following redirects, curl hit the maximum amount.",
- [48] = "Unknown option specified to libcurl. This indicates that you passed a weird option to curl that was passed on to libcurl and rejected. Read up in the manual!",
- [49] = "Malformed telnet option.",
-
- [51] = "The peer's SSL certificate or SSH MD5 fingerprint was not OK.",
- [52] = "The server didn't reply anything, which here is considered an error.",
- [53] = "SSL crypto engine not found.",
- [54] = "Cannot set SSL crypto engine as default.",
- [55] = "Failed sending network data.",
- [56] = "Failure in receiving network data.",
-
- [58] = "Problem with the local certificate.",
- [59] = "Couldn't use specified SSL cipher.",
- [60] = "Peer certificate cannot be authenticated with known CA certificates.",
- [61] = "Unrecognized transfer encoding.",
- [62] = "Invalid LDAP URL.",
- [63] = "Maximum file size exceeded.",
- [64] = "Requested FTP SSL level failed.",
- [65] = "Sending the data requires a rewind that failed.",
- [66] = "Failed to initialise SSL Engine.",
- [67] = "The user name, password, or similar was not accepted and curl failed to log in.",
- [68] = "File not found on TFTP server.",
- [69] = "Permission problem on TFTP server.",
- [70] = "Out of disk space on TFTP server.",
- [71] = "Illegal TFTP operation.",
- [72] = "Unknown TFTP transfer ID.",
- [73] = "File already exists (TFTP).",
- [74] = "No such user (TFTP).",
- [75] = "Character conversion failed.",
- [76] = "Character conversion functions required.",
- [77] = "Problem with reading the SSL CA cert (path? access rights?).",
- [78] = "The resource referenced in the URL does not exist.",
- [79] = "An unspecified error occurred during the SSH session.",
- [80] = "Failed to shut down the SSL connection.",
- [81] = "Socket is not ready for send/recv wait till it's ready and try again. This return code is only returned from curl_easy_recv and curl_easy_send.",
- [82] = "Could not load CRL file, missing or wrong format (added in 7.19.0).",
- [83] = "Issuer check failed (added in 7.19.0).",
- [84] = "The FTP PRET command failed",
- [85] = "RTSP: mismatch of CSeq numbers",
- [86] = "RTSP: mismatch of Session Identifiers",
- [87] = "unable to parse FTP file list",
- [88] = "FTP chunk callback reported error",
- [89] = "No connection available, the session will be queued",
- [90] = "SSL public key does not matched pinned public key",
- [91] = "Status returned failure when asked with CURLOPT_SSL_VERIFYSTATUS.",
- [92] = "Stream error in the HTTP/2 framing layer.",
- [93] = "An API function was called from inside a callback.",
- [94] = "An authentication function returned an error.",
- [95] = "A problem was detected in the HTTP/3 layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
- }
-
-
- local socket = require 'socket'
- local ftp = require 'socket.ftp'
- local http = require 'socket.http'
- local url = require 'socket.url'
-
-
- local ip = ""
- local cor = nil
- local Updating = false
-
- local downloadLock = "flock -n results/curl-"
- local arw = ' <font color="magenta"><b>-></b></font> '
-
- local repoExists = function (r)
- r = r:match("([%a-]*)")
- if nil == r then return false end
- for k, v in pairs(notExist) do
- if v == r then return false end
- end
- return true
- end
-
- local IP = {}
- gatherIPs = function (host)
- if nil == IP[host] then
- local IPs
- -- Takes about 30 seconds to look up the lot.
- -- I tested using dig's -f option, it didn't seem much faster.
- -- The sort -r assumes that deb.devuan.org is the first alphabetically.
- local dig = io.popen('dig +keepopen +noall +nottlid +answer ' .. host .. ' A ' .. host .. ' AAAA ' .. host .. ' CNAME ' .. host .. ' SRV | sort -r | uniq')
- repeat
- IPs = dig:read("*l")
- if nil ~= IPs then
- for k, t, v in IPs:gmatch("([%w_%-%.]*)%.%s*IN%s*(%a*)%s*(.*)") do
- if "." == v:sub(-1, -1) then v = v:sub(1, -2) end
- if nil == IP[k] then IP[k] = {} end
- IP[k][v] = t
- D(" DNS record " .. host .. " == " .. k .. " type " .. t .. " -> " .. v)
- if t == "CNAME" then
- gatherIPs(v)
- IP[k][v] = IP[v]
- elseif v == "SRV" then
- print("SVR record found, now what do we do?")
- end
- end
- end
- until nil == IPs
- end
-
- -- If this is the DNS-RR domain name, gather the IPs for the mirrors that mirror_list.txt says should be in it.
- if host == APT.options.roundRobin.value then
- for k, m in pairs(APT.mirrors) do
- if "yes" == m.DNSRR then
- gatherIPs(m.FQDN)
- IP[host][m.FQDN] = IP[m.FQDN]
- end
- end
- end
- return IP[host]
- end
-
- -- Returns FTP directory listing
- local nlst = function (u)
- local t = {}
- local p = url.parse(u)
- p.command = "nlst"
- p.sink = ltn12.sink.table(t)
- local r, e = ftp.get(p)
- return r and table.concat(t), e
- end
-
- local timeouts = 0;
- local totalTimeouts = 0
- local spcd = ' '
- checkHEAD = function (host, URL, r, retry, sanity)
- if nil == r then r = 0 end
- if nil == retry then retry = 0 end
- if true == sanity then sanity = 'URLSanity' else sanity = '' end
- local check = "HEAD testing file"
- local PU = url.parse(URL, defaultURL)
- local pu = url.parse(PU.scheme .. "://" .. host, defaultURL)
- local fname = host .. "_" .. PU.host .. "_" .. PU.path:gsub("/", "_") .. ".log.txt"
- local hdr = ""
- local IP = ""
- if pu.host ~= PU.host then
- if "http" == PU.scheme then
- hdr = '-H "Host: ' .. host .. '"'
- end
- IP = '--connect-to "' .. pu.host .. '::' .. PU.host .. ':"'
- fname = host .. "_" .. pu.host .. '_' .. PU.host .. "_" .. PU.path:gsub("/", "_") .. ".txt"
- end
- os.execute('rm -f results/HEADERS_' .. fname .. ' 2>/dev/null; rm -f results/STATUS_' .. fname .. ' 2>/dev/null; touch results/STATUS_' .. fname)
- if not APT.testing(PU.scheme, host) and APT.redir then I(spcd .. string.upper(PU.scheme) .. " not supported, not tested. " .. URL, host); return end
- if 0 < r then
- check = "Redirecting to"
- end
- if 0 < retry then
- os.execute("sleep " .. math.random(1, 3))
- check = "Retry " .. retry .. " " .. check
- end
- if 2 <= timeouts then
- E(spcd .. spcd .. "too many timeouts! " .. check .. " " .. host .. arw .. URL, PU.scheme, "", host)
- return
- end
- if APT.options.timeouts.value <= (totalTimeouts) then
- E(spcd .. spcd .. "Way too many timeouts!", PU.scheme, "", host)
- return
- end
- if 20 <= r then
- E(spcd .. spcd .. "too many redirects! " .. check .. " " .. host .. arw .. URL, PU.scheme, "", host)
- return
- end
- if APT.options.retries.value <= retry then
- E(spcd .. spcd .. "too many retries! " .. check .. " " .. host .. arw .. URL, PU.scheme, "", host)
- return
- end
- if "https" == PU.scheme and APT.options.roundRobin.value == host then
- I(spcd .. "Not testing " .. APT.lnk(URL) .. " mirrors wont have the correct HTTPS certificate for the round robin.", host)
- return
- else
- I(spcd .. check .. " " .. APT.lnk(URL), host)
- end
-
- --[[ Using curl command line -
- -I - HEAD
- --connect-to domain:port:IP:port - connect to IP, but use SNI from URL.
- -header "" - add extra headers.
- -L - DO follow redirects.
- --max-redirs n - set maximum redirects, default is 50, -1 = unlimited.
- -4 or -6 - Only use IPv4 or IPv6
- --retry n - maximum retries, default is 0, no retries.
- -o file - write to file instead of stdout.
- --path-as-is - https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html might be useful for URLSanity.
- -s silent - don't output progress or error messages.
- --connect-timeout n - timeout in seconds.
- Should return with error code 28 on a timeout?
- -D file - write the received headers to a file. This includes the status code and string.
- ]]
- local status = APT.exe(
- 'curl -I --retry 0 -s --path-as-is --connect-timeout ' .. APT.options.timeout.value .. ' --max-redirs 0 ' .. APT.IPv46 .. ' ' ..
- IP .. ' ' .. '-o /dev/null -D results/"HEADERS_' .. fname .. '" ' ..
- hdr .. ' -w "#%{http_code} %{ssl_verify_result} %{url_effective}\\n" ' .. PU.scheme .. '://' .. host .. PU.path .. ' >>results/"STATUS_' .. fname .. '"'
- ):Nice():log():Do().status
- if 0 < r then
- APT.tested(PU.scheme, 'Redirects', host)
- else
- APT.tested(PU.scheme, '', host)
- end
- local code = "???"
- local cstr = ""
- local location = nil
- local tmot = 1
- while not APT.checkFile('results/STATUS_' .. fname) do
- D(spcd .. spcd .. 'Waiting for results/STATUS_' .. fname .. ' file.')
- os.execute('sleep ' .. tmot)
- tmot = tmot * 2
- if 8 < tmot then
- T(spcd .. spcd .. "TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1 .. ' ' .. APT.lnk(URL), PU.scheme, sanity, host)
- timeouts = timeouts + 1
- checkHEAD(host, URL, r, retry + 1, '' ~= sanity)
- os.execute('cat results/"HEADERS_' .. fname .. '" >>results/"STATUS_' .. fname .. '" 2>/dev/null; rm -f results/"HEADERS_' .. fname .. '" 2>/dev/null')
- return
- end
- end
- os.execute('cat results/"HEADERS_' .. fname .. '" >>results/"STATUS_' .. fname .. '" 2>/dev/null; rm -f results/"HEADERS_' .. fname .. '" 2>/dev/null')
- if 0 ~= status then
- local msg = curlStatus[status]
- if nil == msg then msg = "UNKNOWN CURL STATUS CODE!" end
- if (28 == status) or (7 == status) then
- T(spcd .. spcd .. "TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1 .. ' ' .. APT.lnk(URL), PU.scheme, sanity, host)
- timeouts = timeouts + 1
- else
- E(spcd .. spcd .. "The curl command return an error code of " .. status .. " - " .. msg .. ' for '.. APT.lnk(URL), PU.scheme, sanity, host)
- end
- if 60 == status then return end -- Certificate is invalid, don't bother retrying.
- checkHEAD(host, URL, r, retry + 1, '' ~= sanity)
- return
- end
- local rfile, e = io.open("results/STATUS_" .. fname, "r")
- if nil == rfile then W("opening results/STATUS_" .. fname .. " file - " .. e) else
- for line in rfile:lines("*l") do
- if "#" == line:sub(1, 1) then
- code = line:sub(2, 4)
- if ("https" == PU.scheme) and ("0" ~= line:sub(6, 6)) then
- os.execute('cp results/STATUS_' .. fname .. ' results/STATUS_' .. fname .. '_SAVED')
- if '' ~= sanity then
- E(spcd .. spcd .. "The certificate is invalid.", PU.scheme, sanity, host)
- else
- E(spcd .. spcd .. "The certificate is invalid.", PU.scheme, "https", host)
- end
- end
- elseif "http" == line:sub(1, 4):lower() then
- -- -2 coz the headers file gets a \r at the end.
- cstr = line:sub(14, -2)
- elseif "location" == line:sub(1, 8):lower() then
- location = line:sub(11, -2)
- end
- end
- if '???' == code then
- W(spcd .. spcd .. 'Could not find response code. ' .. APT.lnk(URL), PU.scheme, sanity, host)
- end
- end
- os.execute('cat results/STATUS_' .. fname .. ' >> results/curl_HEAD_' .. fname .. '; rm -f results/STATUS_' .. fname .. ' 2>/dev/null')
-
- if ("4" == tostring(code):sub(1, 1)) or ("5" == tostring(code):sub(1, 1)) then
- E(spcd .. spcd .. code .. " " .. cstr .. ". " .. check .. " " .. APT.lnk(URL), PU.scheme, sanity, host)
- else
- if not APT.testing(PU.scheme, host) then
- I(spcd .. spcd .. "Not supported, but works " .. PU.scheme .. " " .. APT.lnk(URL), PU.scheme, "", host)
- end
- I(spcd .. spcd .. code .. " " .. cstr .. ". " .. check .. " " .. APT.lnk(URL), host)
- -- timeouts = timeouts - 1 -- Backoff the timeouts count if we managed to get through.
- if nil ~= location then
- pu = url.parse(location, defaultURL)
- if (pu.host == APT.options.roundRobin.value) and (nil ~= PU.path:find('merged/pool/DEVUAN/')) then
- E('DEVUAN packages must not be redirected to ' .. APT.options.roundRobin.value .. ' - ' .. APT.lnk(URL) .. arw .. APT.lnk(location), PU.scheme, 'Redirects', host)
- end
- if APT.testing("Protocol") then
- if ('http' == location:sub(1, 4)) and (pu.scheme ~= PU.scheme) then -- Sometimes a location sans scheme is returned, this is not a protocol change.
- if APT.options.roundRobin.value == host then -- Coz HTTPS shouldn't happen via the round robin.
- E(spcd .. spcd .. "Protocol changed during redirect! " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), PU.scheme, "Protocol", host)
- end
- W(spcd .. spcd .. "Protocol changed during redirect! " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), PU.scheme, "Protocol", host)
- else
- end
- APT.tested(PU.scheme, 'Protocol', host)
- end
-
- if location == URL then
- E(spcd .. spcd .. "Redirect loop! " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), PU.scheme, "", host)
- elseif nil == pu.host then
- I(spcd .. spcd .. "Relative redirect. " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), host)
- if 1 <= APT.options.bandwidth.value then checkHEAD(host, PU.scheme .. "://" .. PU.host .. location, r + 1, retry, '' ~= sanity) end
- elseif (PU.host == pu.host) or (host == pu.host) then
- if PU.host ~= host then
- local t = pu.host
- pu.host = PU.host
- location = url.build(pu)
- pu.host = t
- end
- I(spcd .. spcd .. "Redirect to same host. " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), host)
- if 1 <= APT.options.bandwidth.value then checkHEAD(host, location, r + 1, retry, '' ~= sanity) end
- else
- I(spcd .. spcd .. "Redirect to different host. " .. check .. " " .. APT.lnk(URL) .. arw .. APT.lnk(location), host)
- if 1 <= APT.options.bandwidth.value then
- --[[ The hard part here is that we end up throwing ALL of the test files at the redirected location.
- Not good for deb.debian.org, which we should only be throwing .debs at.
- What we do is loop through the DNS entries, and only test the specific protocol & file being tested here.
- ]]
- local u = pu.host .. "/" .. pu.path
- local file = pu.path:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
- local path = pu.path:sub(2, -1 -(#file))
- local check = u:gsub("/", "_")
- local extraArgs = sendArgs .. ' -o -r '
- if 'https' == pu.scheme then extraArgs = extraArgs .. ' --tests=-http' end
- if 'http' == pu.scheme then extraArgs = extraArgs .. ' --tests=-https' end
- local pth = path:match('^(.*/pool/).*$')
- if nil ~= pth then table.insert(APT.results[PU.scheme].redirects, pu.host .. "/" .. pth) else E(spcd .. spcd .. 'Odd redirect path ' .. path) end
- I(spcd .. spcd .. "Now checking redirected host " .. u .. ' for ' .. APT.lnk(URL) .. arw .. APT.lnk(location), host)
- APT.exe(downloadLock .. "REDIR-" .. check .. ".log.txt" .. " ./apt-panopticon.lua " .. extraArgs .. ' ' .. pu.host .. "/" .. path .. " " .. file):Nice():log():fork()
- D(spcd .. 'logging to ' .. APT.logName(pu.host, nil, file)[2])
- APT.tested(PU.scheme, 'Redirects', host)
- end
- end
- elseif nil ~= PU.path:find('merged/pool/DEBIAN-SECURITY/') then
- W('DEBIAN-SECURITY packages must be redirected to a Debian mirror - ' .. APT.lnk(URL) .. arw .. APT.lnk(location), PU.scheme, 'Redirects', host)
- end
- end
- end
-
- local checkTimeouts = function(host, scheme, URL)
- totalTimeouts = totalTimeouts + timeouts; timeouts = 0
- checkHEAD(host, scheme .. "://" .. URL)
- if (1 <= APT.options.bandwidth.value) and APT.testing("URLSanity") then
- URL = URL:gsub("/", "///")
- URL = URL:gsub("///", "/", 1)
- checkHEAD(host, scheme .. "://" .. URL, 0, 0, true)
- APT.tested(scheme, 'URLSanity', host)
- end
- if nil ~= cor then
- D('*>* About to resume coroutine after checkHEAD(' .. host .. ' , ' .. scheme .. ' :// ' .. URL .. ')')
- local ok, message = coroutine.resume(cor)
- if not ok then cor = nil; print(message) end
- end
- if APT.options.timeouts.value <= (totalTimeouts) then
- E("Way too many timeouts!", scheme, "URLSanity", host)
- return true
- end
- return false
- end
-
- local checkFiles = function (host, ip, path, file)
- timeouts = 0
- if nil == path then path = "" end
- if nil ~= file then
- if "redir" == ip then ip = host end
- if checkTimeouts(host, "http", ip .. path .. "/" .. file) then return end
- if checkTimeouts(host, "https", ip .. path .. "/" .. file) then return end
- else
- I(" HEAD testing files for " .. host .. arw .. ip .. " " .. path, host)
- if 1 <= APT.options.bandwidth.value then
- -- Do these first, coz they are likely to fork off a different server.
- for i, s in pairs(referenceDebs) do
- if checkTimeouts(host, "http", ip .. path .. "/" .. s) then return end
- if checkTimeouts(host, "https", ip .. path .. "/" .. s) then return end
- end
- end
- for i, s in pairs(releases) do
- for j, k in pairs(releaseFiles) do
- if repoExists(s .. k) then
- if checkTimeouts(host, "http", ip .. path .. "/merged/dists/" .. s .. '/' .. k) then return end
- if 1 <= APT.options.bandwidth.value then
- if checkTimeouts(host, "https", ip .. path .. "/merged/dists/" .. s .. '/' .. k) then return end
- else
- break
- end
- end
- if 2 >= APT.options.bandwidth.value then break end
- end
- if 2 >= APT.options.bandwidth.value then break end
- end
- if 1 <= APT.options.bandwidth.value then
- for i, s in pairs(referenceDevs) do
- if checkTimeouts(host, "http", ip .. path .. "/" .. s) then return end
- if checkTimeouts(host, "https", ip .. path .. "/" .. s) then return end
- end
- end
- end
- end
-
- checkHost = function (orig, host, path, ip, file)
- if nil == host then host = orig end
- if nil == path then path = "" end
- if nil == file then file = "" end
- local ph = url.parse("http://" .. host)
- if (nil ~= ip) and ("redir" ~= ip) then
- local po = url.parse("http://" .. orig)
- if "" ~= file then
- D("checking redirected file " .. po.host .. " " .. file)
- checkFiles(po.host, ip, path, file)
- else
- checkFiles(po.host, ip, path)
- end
- else
- if orig == host then
- I("Testing mirror " .. orig .. "" .. file)
- APT.exe("./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file):Nice():log():fork()
- D('logging to ' .. APT.logName(ph.host, nil, file)[2])
- else D("checkHost " .. orig .. arw .. host) end
- end
- end
-
-
- local addDownload = function(host, URL, f, r, k)
- local file = k:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
- if APT.checkFile("results/" .. host .. "/merged/dists/" .. r .. '/' .. k) then
- -- Curls "check timestamp and overwrite file" stuff sucks.
- -- -R means the destination file gets the timestamp of the remote file.
- -- Can only do ONE timestamp check per command.
- -- This doesn't work either. All downloads get all these headers. Pffft
- -- local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z" results/' .. host .. "/merged/dists/" .. r .. '/' .. k .. ' | cut -d " " -f 6-11')
- -- f:write('header "If-Modified-Since: ' .. ts:sub(2, -2) .. '"\n')
- -- Curl will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first,
- -- which wont work with multiple files in multiple directories. WTF?
- --TODO - change tactic, do a HEAD if-modified test first before adding the file to the list to download.
- os.execute(" mv results/" .. host .. "/merged/dists/" .. r .. '/' .. k ..
- " results/" .. host .. "/merged/dists/" .. r .. '/' .. k .. ".old")
- end
-
- D('Downloading http://' .. host .. URL .. '/merged/dists/' .. r .. '/' .. k)
- f:write('url "' .. 'http://' .. host .. URL .. '/merged/dists/' .. r .. '/' .. k .. '"\n')
- f:write('output "results/' .. host .. '/merged/dists/' .. r .. '/' .. k .. '"\n')
- end
-
- local postDownload = function(host, r, k)
- local file = k:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
- if nil == file then file = k end
- os.execute("if [ -f results/" .. host .. "/merged/dists/" .. r .. '/' .. k .. ".old ]" ..
- " && [ ! -f results/" .. host .. "/merged/dists/" .. r .. '/' .. k .. " ]; then cp -a" ..
- " results/" .. host .. "/merged/dists/" .. r .. '/' .. k .. ".old" ..
- " results/" .. host .. "/merged/dists/" .. r .. '/' .. k .. "; fi")
- if APT.checkFile('results/' .. host .. '/merged/dists/' .. r .. '/' .. k) then
- if ".gz" == k:sub(-3, -1) then APT.exe("gzip -dfk results/" .. host .. "/merged/dists/" .. r .. '/' .. k):Nice():noErr():Do() end
- if ".xz" == k:sub(-3, -1) then APT.exe("xz -dfk results/" .. host .. "/merged/dists/" .. r .. '/' .. k):Nice():noErr():Do() end
- end
- end
-
- local download = "curl" ..
- " --connect-timeout " .. APT.options.timeout.value ..
- " --create-dirs -f -L" ..
- " --fail-early" ..
- " --max-time " .. APT.options.maxtime.value ..
- APT.IPv46 .. ' ' ..
- " --retry " .. APT.options.retries.value ..
- " -R -v -z 'results/stamp.old' --stderr results/"
- local downloads = function(host, URL, meta, release, list)
- if nil == URL then URL = "" end
- local files = 'curl-' .. meta .. '-' .. host .. '.files.txt'
- local lock = meta .. "-" .. host .. ".log.txt"
- local log = "curl-" .. meta .. "-" .. host .. ".log.txt"
- local cm = downloadLock .. lock .. " " .. download .. log .. " -K results/" .. files
- if APT.testing("IPv4") and (not APT.testing("IPv6")) then cm = cm .. ' -4' end
- if (not APT.testing("IPv4")) and APT.testing("IPv6") then cm = cm .. ' -6' end
- f, e = io.open("results/curl-" .. meta .. '-' .. host .. ".files.txt", "a+")
- if nil == f then C("opening curl downloads list file - " .. e); return end
-
- if nil ~= list then
- if "" ~= list then
- if nil ~= release then
- for l in list:gmatch("\n*([^\n]+)\n*") do
- addDownload(host, URL, f, release, "/" .. l)
- end
- else
- I('Downloading ' .. APT.lnk('http://' .. host .. URL .. '/merged/' .. list))
- f:write('url "' .. 'http://' .. host .. URL .. '/merged/' .. list .. '"\n')
- f:write('output "results/' .. host .. '/merged/' .. list .. '"\n')
- end
- f:close()
- return
- end
- else
- for i, s in pairs(releases) do
- for j, k in pairs(releaseFiles) do
- if repoExists(s .. k) then
- addDownload(host, URL, f, s, k)
- end
- end
- end
- end
- f:close()
- APT.exe(cm):Nice():log():fork()
- D('logging to <a href="' .. log .. '">' .. log .. '</a>, with <a href="' .. files .. '">these files</a>')
- end
-
-
- local validateURL = function(m)
- if " " == m.BaseURL:sub(-1, -1) then
- W("space at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
- m.BaseURL = m.BaseURL:sub(1, -2)
- end
- if "/" == m.BaseURL:sub(-1, -1) then
- W("slash at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
- m.BaseURL = m.BaseURL:sub(1, -2)
- end
- local p = url.parse("http://" .. m.BaseURL)
- if nil == p.path then p.path = '' end
- if nil ~= p.port then p.authority = authority .. ':' .. p.port end
- if nil == m.FDQN then W("Something wrong in FDQN from mirror_list.txt! nil", "", "", p.authority) else
- if m.FQDN ~= p.authority then W("Something wrong in FDQN from mirror_list.txt! " .. m.FDQN, "", "", p.authority) end
- end
- if nil == m.BaseURL then W("Something wrong in BaseURL from mirror_list.txt! nil", "", "", p.authority) else
- if m.BaseURL ~= (p.authority .. p.path) then W("Something wrong in BaseURL from mirror_list.txt! " .. m.BaseURL, "", "", p.authority) end
- end
- if (nil ~= p.query) or (nil ~= p.fragment) or (nil ~= p.params) then W("Something wrong in BaseURL from mirror_list.txt, should be nothing after the path! " .. m.BaseURL, "", "", p.authority) end
- if (nil ~= p.user) or (nil ~= p.userinfo) or (nil ~= p.password) then W("Something wrong in BaseURL from mirror_list.txt, should be no credentials! " .. m.BaseURL, "", "", p.authority) end
- m.FQDN = p.authority
- m.BaseURL = p.authority .. p.path
- return m
- end
-
- local getMirrors = function ()
- local mirrors = {}
- local host = ""
- local m = {}
- local active = true
- local URL = 'http://' .. APT.options.referenceSite.value .. '/mirror_list.txt'
- I('Downloading and parsing http://' .. APT.options.referenceSite.value .. '/mirror_list.txt')
- local p, c, h = http.request(URL)
- if nil == p then E(c .. " fetching " .. URL) else
-
- for l in p:gmatch("\n*([^\n]+)\n*") do
- local t, d = l:match("(%a*):%s*(.*)")
- d = string.lower(d)
- if "FQDN" == t then
- if "" ~= host then
- mirrors[host] = validateURL(m)
- m = {}
- active = true
- end
- host = d
- m[t] = d
- elseif "Protocols" == t then
- local prot = {}
- for w in d:gmatch("(%w+)") do
- if APT.search(APT.protocols, w:lower()) then prot[w] = true end
- end
- m[t] = prot
- elseif "Active" == t and nil == d:sub(1, 3):find("yes", 1, true) then
- W("Mirror " .. host .. " is not active - " .. d, "", "", host)
- active = false
- m[t] = d
- -- TODO - Should do some more input validation on everything.
- elseif "Rate" == t then
- local time, unit = d:match('(%d+) *(%a+)')
- time = tonumber(time)
- unit = unit:sub(1, 1)
- m[t] = time .. ' ' .. unit
- if 'm' == unit then
- time = time * 60
- elseif 'h' == unit then
- time = time * 60 * 60
- else
- C('Unknown Rate for mirror ' .. host)
- end
- m['Updated'] = time
- else
- m[t] = d
- end
- end
- if "" ~= host --[[and active]] then
- mirrors[host] = validateURL(m)
- end
- end
- if APT.testing("DNSRR") then
- mirrors[APT.options.roundRobin.value] = { ["Protocols"] = { ["http"] = true; }; ['Updated'] = 300; ['DNSRR'] = false;
- ["FQDN"] = APT.options.roundRobin.value; ["Active"] = 'yes'; ["BaseURL"] = APT.options.roundRobin.value; }
- end
- local file, e = io.open("results/mirrors.lua", "w+")
- if nil == file then C("opening mirrors file - " .. e) else
- file:write(APT.dumpTable(mirrors, "mirrors") .. "\nreturn mirrors\n")
- file:close()
- end
- if 42 < #mirrors then print(#mirrors .. ' is too many mirrors!') ; os.exit(false) end
- return mirrors
- end
-
-
- local postParse = function(host, list)
- if APT.options.referenceSite.value == host then
- if nil ~= list then
- local sem = 'results/NEW_' .. list.out .. '_%s.txt'
- for i, n in pairs(releases) do
- local f = sem:format(n)
- if APT.checkFile(f .. '.tmp') then
- os.execute('mv ' .. f .. '.tmp ' .. f)
- else
- os.execute('touch ' .. f)
- end
- end
- end
- end
- end
-
- local parseDebs = function(host)
- for i, n in pairs(releases) do
- local inFile = 'results/NEW_debs_' .. n .. '.txt'
- local nfile, e = io.open(inFile, "r")
- if nil == nfile then W("opening " .. inFile .. " file - " .. e) else
- for l in nfile:lines() do
- local v, p, sz, sha = l:match(' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) |')
- if nil ~= p then
- if APT.checkFile('results/' .. host .. "/merged/" .. p) then
- local fsz = APT.exe('ls -l results/' .. host .. "/merged/" .. p .. ' | cut -d " " -f 5-5'):Do().result
- if APT.testing("Integrity") then
- if sz ~= fsz:sub(2, -2) then -- The sub bit is to slice off the EOLs at each end.
- E('Package size mismatch - results/' .. host .. "/merged/" .. p .. ' should be ' .. sz .. ', but is ' .. fsz:sub(2, -2) .. '.', 'http', 'Integrity', host)
- else
- local fsha = APT.exe('sha256sum results/' .. host .. "/merged/" .. p .. ' | cut -d " " -f 1'):log():Do().result
- if sha ~= fsha:sub(2, -2) then E('Package SHA256 sum mismatch - results/' .. host .. "/merged/" .. p, 'http', 'Integrity', host) end
- -- TODO - maybe check the PGP key, though packages are mostly not signed.
- end
- APT.tested('http', 'Integrity', host)
- end
- if APT.testing("Updated") then
- if sz ~= fsz:sub(2, -2) then
- E('Package size mismatch for ' .. host .. "/merged/" .. p, 'http', 'Updated', host)
- end
- APT.tested('http', 'Updated', host)
- end
- os.execute('rm -f results/' .. host .. "/merged/" .. p)
- else
- if Updating then
- W('Not yet able to download, awaiting update for ' .. host .. "/merged/" .. p, 'http', 'Updated', host)
- else
- E('Failed to download ' .. host .. "/merged/" .. p, 'http', 'Updated', host)
- end
- end
- end
- end
- end
- end
- return nil
- end
-
- local parsePackages = function(host)
- local list = {inf = 'Packages', parser = parseDebs, out = 'debs', files = {}, nextf = ''}
- for i, n in pairs(releases) do
- local inFile = 'results/NEW_' .. list.inf .. '_' .. n .. '.txt'
- local outFile = 'results/NEW_' .. list.out .. '_' .. n .. '.txt'
- if APT.options.referenceSite.value == host then
- outFile = outFile .. '.tmp'
- end
- local dFile, e = io.open(inFile, "r")
- if nil == dFile then W("opening " .. inFile .. " file - " .. e) else
- for l in dFile:lines() do
- postDownload(host, n, l)
-
- l = '/' .. l
- local file = l:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
- local dir = l:sub(1, 0 - (#file + 1))
-
- if "Packages." == file:sub(1, 9) then
- -- TODO - compare the SHA256 sums in pkgmaster's Release for both the packed and unpacked versions.
- -- Also note that this might get only a partial download due to maxtime.
- if APT.options.referenceSite.value == host then
- local Pp, e = io.open('results/' .. host .. '/merged/dists/'.. n .. dir .. 'Packages.parsed', "w+")
- if nil == Pp then W('opening results/' .. host .. '/merged/dists/'.. n .. dir .. 'Packages.parsed' .. ' file - ' .. e) else
- local pp = {}
- for l in io.lines('results/' .. host .. '/merged/dists/'.. n .. dir .. 'Packages') do
- if "Package: " == l:sub(1, 9) then
- if 0 ~= #pp then
- for i = 1, 5 do
- if nil == pp[i] then print(host .. " " .. n .. " " .. dir .. " " .. i) else Pp:write(pp[i] .. " | ") end
- end
- Pp:write("\n")
- end
- pp = {}
- pp[1] = l:sub(10, -1)
- elseif "Version: " == l:sub(1, 9) then
- pp[2] = l:sub(10, -1)
- elseif "Filename: " == l:sub(1, 10) then
- pp[3] = l:sub(11, -1)
- elseif "Size: " == l:sub(1, 6) then
- pp[4] = l:sub(7, -1)
- elseif "SHA256: " == l:sub(1, 8) then
- pp[5] = l:sub(9, -1)
- end
- end
- Pp:close()
- os.execute('sort results/' .. host .. '/merged/dists/'.. n .. dir .. 'Packages.parsed >results/' .. host .. '/merged/dists/'.. n .. dir .. 'Packages_parsed-sorted')
- if APT.checkFile('Packages/' .. n .. dir .. 'Packages_parsed-sorted') then
- os.execute('diff -U 0 Packages/' .. n .. dir .. 'Packages_parsed-sorted ' ..
- 'results/' .. APT.options.referenceSite.value .. '/merged/dists/' .. n .. dir .. 'Packages_parsed-sorted ' ..
- ' | grep -E "^-" | grep -Ev "^\\+\\+\\+|^---" >>results/OLD_' .. list.out .. '_' .. n .. '.txt')
- os.execute('diff -U 0 Packages/' .. n .. dir .. 'Packages_parsed-sorted ' ..
- 'results/' .. APT.options.referenceSite.value .. '/merged/dists/' .. n .. dir .. 'Packages_parsed-sorted ' ..
- ' | grep -E "^\\+" | grep -Ev "^\\+\\+\\+|^---" >>results/NEW_' .. list.out .. '_TMP_' .. n .. '.txt')
- for i, s in pairs(referenceDebs) do
- if 0 == APT.exe('grep -q "' .. s:sub(8, -1) .. '" results/OLD_' .. list.out .. '_' .. n .. '.txt'):log():Do().status then
- print('Reference package is out of date (' .. n .. ') - ' .. s)
- end
- end
- for i, s in pairs(referenceDevs) do
- if 0 == APT.exe('grep -q "' .. s:sub(8, -1) .. '" results/OLD_' .. list.out .. '_' .. n .. '.txt'):log():Do().status then
- print('Reference package is out of date (' .. n .. ') - ' .. s)
- end
- end
- else
- W("Can't find file Packages/" .. n .. dir .. "Packages_parsed-sorted")
- end
- os.execute('mkdir -p Packages/' .. n .. dir)
- os.execute('mv -f results/' .. APT.options.referenceSite.value .. '/merged/dists/' .. n .. dir .. 'Packages_parsed-sorted Packages/' .. n .. dir .. 'Packages_parsed-sorted')
- end
- else
- end
- os.execute('rm -fr results/' .. host .. '/merged/dists/' .. n .. dir .. ' 2>/dev/null')
- end
- end
-
- if APT.checkFile('results/NEW_' .. list.out .. '_TMP_' .. n .. '.txt') then
- -- Sort by size.
- os.execute('sort -b -k 9,9 -n results/NEW_' .. list.out .. '_TMP_' .. n .. '.txt >results/NEW_' .. list.out .. '_' .. n .. '.sorted.txt')
- os.execute('grep -s " | pool/DEVUAN/" results/NEW_' .. list.out .. '_' .. n .. '.sorted.txt 2>/dev/null | head -n 1 >>' .. outFile)
- os.execute('grep -s " | pool/DEBIAN-SECURITY/" results/NEW_' .. list.out .. '_' .. n .. '.sorted.txt 2>/dev/null | head -n 1 >>' .. outFile)
- os.execute('grep -s " | pool/DEBIAN/" results/NEW_' .. list.out .. '_' .. n .. '.sorted.txt 2>/dev/null | head -n 1 >' .. outFile)
- os.execute('rm -f results/NEW_' .. list.out .. '_TMP_' .. n .. '.txt')
- end
-
- end
-
- local nfile, e = io.open(outFile, "r")
- if nil ~= nfile then
- -- for l in nfile:lines() do
- local l = nfile:read('*l')
- if nil ~= l then
- local p = l:match('(pool/.*%.deb)')
- if nil ~= p then
- table.insert(list.files, p)
- end
- end
- -- end
- end
- end
-
- postParse(host, list)
- return list
- end
-
- local parseRelease = function(host)
- local list = {inf = 'Release', parser = parsePackages, out = 'Packages', files = {}, nextf = 'debs'}
- local updated = false
- local now = tonumber(os.date('%s'))
- for i, n in pairs(releases) do
- for l, o in pairs(releaseFiles) do
- if repoExists(i .. o) then
- postDownload(host, n, o)
- if (".gpg" == o:sub(-4, -1)) and (APT.checkFile('results/' .. host .. '/merged/dists/' .. n .. '/' .. o)) then
- if APT.testing("Integrity") then
- local status = APT.exe( "gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/" .. host .. "/merged/dists/" .. n .. '/' .. o ..
- " results/" .. host .. "/merged/dists/" .. n .. '/' .. o:sub(1, -5)):Nice():noErr():log():Do().status
- if 0 ~= status then E("GPG check failed for " .. host .. "/merged/dists/" .. n .. '/' .. o, "http", "Integrity", host) end
- -- TODO - should check the PGP sig of InRelease as well.
- APT.tested('http', 'Integrity', host)
- end
- os.execute('rm results/' .. host .. '/merged/dists/' .. n .. '/' .. o)
- end
- end
- end
-
- local fR = 'results/' .. host .. '/merged/dists/' .. n .. '/Release'
- local fRp = APT.options.referenceSite.value .. '/merged/dists/' .. n .. '/Release.SORTED'
- if APT.checkFile(fR) then
- os.execute('sort -k 3 ' .. fR .. ' >' .. fR .. '.SORTED')
- local outFile = 'results/NEW_' .. list.out .. '_' .. n .. '.txt'
- if APT.checkFile('results_old/' .. fRp) then
- if APT.options.referenceSite.value == host then
- outFile = outFile .. '.tmp'
- os.execute('diff -U 0 results_old/' .. fRp .. ' ' ..
- 'results/' .. fRp .. ' ' ..
- '| grep -v "@@" | grep "^+" | grep "Packages.xz$" | cut -c 77- >' .. outFile)
- -- TODO - Maybe check the date in Release, though since they are updated daily, is there any point? Perhaps it's for checking amprolla got run?
- -- Also check if that date is in the future, apt recently got a check for that, though not sure why.
- os.execute('rm -f ' .. fR .. ' 2>/dev/null; ')
- else
- -- TODO - compare to the pkgmaster copy.
- if APT.testing('Updated') then
- while not APT.checkFile('results_old/' .. fRp) do
- D('*<* About to yield coroutine while waiting on - not APT.checkFile(results_old/' .. fRp .. ')')
- coroutine.yield()
- D('*>* Resumed coroutine while waiting on - not APT.checkFile(results_old/' .. fRp .. ')')
- end
- local pkt = tonumber(APT.exe([[TZ="GMT" date -d "$(grep '^Date:' results/]] .. fRp .. [[ | cut -d ' ' -f 2-)" '+%s']]):Do().result:sub(2, -2))
- local new = tonumber(APT.exe([[TZ="GMT" date -d "$(grep '^Date:' ]] .. fR .. [[.SORTED | cut -d ' ' -f 2-)" '+%s']]):Do().result:sub(2, -2))
- local upd = pkt + APT.mirrors[host].Updated
- local updd = pkt + (APT.mirrors[host].Updated * 1.5) -- Give the mirror time to actually do the update.
- if pkt > new then
- D( 'pkt is ' .. os.date('!%F %T', pkt) .. ' new is ' .. os.date('!%F %T', new) ..
- ' upd is ' .. os.date('!%F %T', upd) .. ' updd is ' .. os.date('!%F %T', updd) ..
- ' now is ' .. os.date('!%F %T', now) .. ' Updated is ' .. APT.mirrors[host].Updated)
- if updd >= now then
- W('Release ' .. n .. ' not updated yet, should update @ ' .. os.date('!%F %T', upd) .. ', and was last updated @ ' .. os.date('!%F %T', new), 'http', 'Updated', host)
- Updating = true
- else
- E('Release ' .. n .. ' not updated, should have updated @ ' .. os.date('!%F %T', upd) .. ', but was last updated @ ' .. os.date('!%F %T', new), 'http', 'Updated', host)
- end
- updated = false
- else
- updated = true
- end
- APT.tested('http', 'Updated', host)
- end
- end
-
- -- TODO - if it's not Integrity and not reference, then just do a HEAD check and compare file times?
- -- TODO - like we do with debs, pick just the smallest Packages.xz that has changed.
- -- Though we are sorting Release by name, so we can do the diff with the one from results_old, so we'll need to sort by size to.
- -- pkgmaster still needs to download all the changed Packages.xz files though.
- if (2 <= APT.options.bandwidth.value) and (updated or APT.testing("Integrity") or (APT.options.referenceSite.value == host)) then
- local dfile, e = io.open(outFile, "r")
- if nil == dfile then W("opening " .. outFile .. " file - " .. e) else
- for l in dfile:lines() do
- table.insert(list.files, 'dists/' .. n .. '/' .. l)
- end
- end
- end
- end
- end
-
- end
- postParse(host, list)
- return list
- end
-
- local parseStart = function(host)
- local list = {inf = '', parser = parseRelease, out = 'Release', files = {}, nextf = 'Packages'}
- for i, n in pairs(releases) do
- local outFile = 'results/NEW_' .. list.out .. '_' .. n .. '.txt'
- for l, o in pairs(releaseFiles) do
- if repoExists(n .. o) then
- if APT.options.referenceSite.value == host then
- local dfile, e = io.open(outFile .. '.tmp', "a+")
- if nil == dfile then W("opening " .. outFile .. ".tmp file - " .. e) else
- dfile:write(o .. '\n')
- end
- end
- table.insert(list.files, 'dists/' .. n .. '/' .. o)
- end
- end
- end
- postParse(host, list)
- return list
- end
-
- local doDownloads = function(host, path, list)
- while nil ~= list do
- if 0 ~= #(list.files) then
- for j, f in pairs(list.files) do
- downloads(host, path, list.out, nil, f)
- end
- downloads(host, path, list.out, nil, '')
- --[[ I've seen flock & curl act oddly. Perhaps flock didn't have time to start up?
- /var/www/html/apt-panopticon/apt-panopticon/results_2019-12-22-15-00
-
- Mon Dec 23 01:02:54 2019 DEBUG : forking
-
- flock -n results/curl-debs-pkgmaster.devuan.org.log curl --connect-timeout 5 --create-dirs -f -L --fail-early --max-time 300 --retry 3 -R -v -z 'results/stamp.old' --stderr results/curl-debs-pkgmaster.devuan.org.log -K results/curl-debs-pkgmaster.devuan.org.files
-
-
- Mon Dec 23 01:02:54 2019 DEBUG : 0 flock -n results/curl-debs-pkgmaster.devuan.org.log commands still running.
- Mon Dec 23 01:02:54 2019 DEBUG : *>* Resumed coroutine NO LONGER waiting on - 0 < APT.checkExes(flock -n results/curl-debs-pkgmaster.devuan.org.log
- Mon Dec 23 01:02:54 2019 DEBUG : *** Doing list.parser() for debs
- Mon Dec 23 01:02:54 2019 ERROR (http Updated pkgmaster.devuan.org): Failed to download - results/pkgmaster.devuan.org/merged/pool/DEBIAN/main/a/aptly/aptly_1.3.0+ds1-4_amd64.deb
-
- drwxr-x--- 2 www-data www-data 4096 2019-12-23 01:02:57.000000000 +1000 aptly
- -rw-r--r-- 1 www-data www-data 7129 2019-12-23 01:03:54.000000000 +1000 curl-debs-pkgmaster.devuan.org.log
-
- ]]
- os.execute('sleep 1') -- Wait for things to start up before checking for them.
- while 0 < APT.checkExes(downloadLock .. list.out .. "-" .. host .. ".log.txt") do
- D('*<* About to yield coroutine while waiting on - 0 < APT.checkExes(' .. downloadLock .. list.out .. '-' .. host .. '.log.txt')
- coroutine.yield()
- D('*>* Resumed coroutine while waiting on - 0 < APT.checkExes(' .. downloadLock .. list.out .. '-' .. host .. '.log.txt')
- end
- D('*>* Resumed coroutine NO LONGER waiting on - 0 < APT.checkExes(' .. downloadLock .. list.out .. '-' .. host .. '.log.txt')
-
- local f = 'results/curl-' .. list.out .. "-" .. host .. ".log.txt"
-
- local f = 'results/curl-' .. list.out .. "-" .. host .. ".log.txt"
- -- Should not be needed, but maybe this is why sometimes I don't see the speeds, though the file is there aand valid when I look later.
- while not APT.checkFile(f) do
- D('*<* About to yield coroutine while waiting on - not APT.checkFile(' .. f .. ')')
- coroutine.yield()
- D('*>* Resumed coroutine while waiting on - not APT.checkFile(' .. f .. ')')
- end
-
- --[[ TODO - should try to figure out which server the file actually got downloaded from, and attribute the speed and errors to that server.
- Which means parsing the curl logs, not just a simple match().
- Watch out for misplaced ^M, they don't all come at the end of the line.
- Also note curl-Release-mirror.devuan.de.log.txt, timeouts don't always show the "Connected to" string.
- * Immediate connect fail for 2001:4ca0:4300::1:19: Network is unreachable
- * Connected to debian.ipacct.com (2a01:9e40::251) port 80 (#1)
- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0* Connected to devuan.bio.lmu.de (141.84.43.19) port 80 (#0)
- curl: (22) The requested URL returned error: 404 Not Found
- ]]
- local min, max, spd = 999999999999, 0
- local num = '[%d%.]+[kM]?'
- if APT.checkFile(f) then
- for l in io.lines(f) do
- local speed, crrnt = l:match('^%c *'..num..' +'..num..' +%d+ +'..num..' +%d+ +%d+ +('..num..') +%d+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +('..num..')')
- if nil ~= speed then
- if 'k' == speed:sub(-1, -1) then speed = tonumber(speed:sub(1, -2)) * 1024
- elseif 'M' == speed:sub(-1, -1) then speed = tonumber(speed:sub(1, -2)) * 1024 * 1024
- end
- speed = tonumber(speed)
- if 'k' == crrnt:sub(-1, -1) then crrnt = tonumber(crrnt:sub(1, -2)) * 1024
- elseif 'M' == crrnt:sub(-1, -1) then crrnt = tonumber(crrnt:sub(1, -2)) * 1024 * 1024
- end
- crrnt = tonumber(crrnt)
- if speed < min and speed ~= 0 then min = speed end
- if speed > max then max = speed end
- if crrnt < min and crrnt ~= 0 then min = crrnt end
- if crrnt > max then max = crrnt end
- end
- if l:find('timed out') ~= nil then
- E(" TIMEOUT " .. timeouts + 1 .. ', details in <a href="curl-' .. list.out .. '-' .. host .. '.log.txt">curl-' .. list.out .. '-' .. host .. '.log.txt</a>', 'http', '', host)
- timeouts = timeouts + 1
- APT.results["timeout"] = true
- end
- end
- end
- APT.results["speed"] = {["min"] = min, ["max"] = max}
- end
-
- if (APT.options.referenceSite.value ~= host) and ('' ~= list.nextf) then
- local sem = 'results/NEW_' .. list.nextf .. '_%s.txt'
- for i, n in pairs(releases) do
- local f = sem:format(n)
- while not APT.checkFile(f) do
- D('*<* About to yield coroutine while waiting on - not APT.checkFile(' .. f .. ')')
- coroutine.yield()
- D('*>* Resumed coroutine while waiting on - not APT.checkFile(' .. f .. ')')
- end
- end
- end
- D('*** Doing list.parser() for ' .. list.out)
- list = list.parser(host)
- if APT.options.timeouts.value <= (totalTimeouts) then break end
- end
- D('*<<* About to end coroutine.')
- cor = nil
- end
-
-
- if 0 < #arg then
- if "/" == arg[1]:sub(-1, -1) then
- W("slash at end of path! " .. arg[1])
- arg[1] = arg[1]:sub(1, -2)
- end
- if " " == arg[1]:sub(-1, -1) then
- W("space at end of path! " .. arg[1])
- arg[1] = arg[1]:sub(1, -2)
- end
- local pu = url.parse("http://" .. arg[1])
-
- if APT.redir and (nil == arg[3])then
- arg[3] = arg[2]
- arg[2] = nil
- end
-
- if APT.testing("Integrity") or APT.testing("Updated") then
- if APT.origin and APT.options.referenceSite.value == pu.host then
- -- if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
- end
- end
-
- if not APT.logOpen(pu.host, arg[2], arg[3]) then return end
- I("Starting tests for " .. arg[1] .. " with these tests - " .. table.concat(APT.options.tests.value, ", "))
- APT.mirrors = loadfile("results/mirrors.lua")()
- APT.results = APT.padResults(APT.results)
- if APT.origin or APT.redir then APT.results["IPs"] = gatherIPs(pu.host) end
- if nil ~= arg[2] then I(" Using IP " .. arg[2]); ip = arg[2] end
- if nil ~= arg[3] then I(" Using file " .. arg[3]); end
-
- if APT.origin then
- local file = arg[3]
- if nil == file then file = '' end
- local path = pu.path
- if nil == path then path = '' end
- if APT.origin then
- local ips = APT.results["IPs"]
- if nil ~= ips then
- APT.allpairs(ips,
- function(k, v)
- if v == "A" then
- if APT.testing("IPv4") then APT.exe('./apt-panopticon.lua ' .. sendArgs .. ' -4 ' .. pu.host .. path .. ' ' .. k .. ' ' .. file):Nice():log():fork() end
- elseif v == "AAAA" then
- if APT.testing("IPv6") then APT.exe('./apt-panopticon.lua ' .. sendArgs .. ' -6 ' .. APT.IPv46 .. ' ' .. pu.host .. path .. ' ' .. k .. ' ' .. file):Nice():log():fork() end
- end
- D('logging to ' .. APT.logName(pu.host, k, file)[2])
- end
- )
- else
- E("no IPs for " .. pu.host)
- end
- end
-
- if not APT.redir then
- if (1 <= APT.options.bandwidth.value) and (APT.testing("Integrity") or APT.testing("Updated")) then
- if APT.origin and (APT.options.roundRobin.value ~= pu.host) then
- I("Starting file downloads for " .. pu.host)
- D('*>* About to create coroutine.')
- cor = coroutine.create(doDownloads)
- local ok, message = coroutine.resume(cor, pu.host, pu.path, parseStart(pu.host))
- if not ok then cor = nil; print(message) end
- end
- end
- checkFiles(pu.host, pu.host, pu.path);
- else
- checkFiles(pu.host, pu.host, pu.path:sub(1, -1), file);
- end
- else
- checkHost(pu.host, pu.host, pu.path, arg[2], arg[3])
- end
-
- while nil ~= cor do
- os.execute('sleep 10')
- D('*>* About to resume coroutine before writing results.')
-
- local ok, message = coroutine.resume(cor)
- if not ok then cor = nil; print(message); break end
- end
-
- local f = pu.host
- if "" ~= ip then f = f .. "_" .. ip end
- -- TODO - perhaps number them if there's more than one?
- if APT.redir then f = f .. '_' .. 'R' end
- local rfile, e = io.open("results/" .. f .. ".lua", "w+")
- if nil == rfile then C("opening results file - " .. e) else
- rfile:write(APT.dumpTable(APT.results, "results") .. "\nreturn results\n")
- rfile:close()
- end
-
- if APT.origin and (not APT.redir) and (APT.options.referenceSite.value ~= pu.host) then
- os.execute('sleep 1') -- Wait for things to start up before checking for them.
- while 0 < APT.checkExes(downloadLock .. "Release-" .. pu.host .. ".log.txt") do os.execute("sleep 10") end
- while 0 < APT.checkExes(downloadLock .. "Packages-" .. pu.host .. ".log.txt") do os.execute("sleep 10") end
- while 0 < APT.checkExes(downloadLock .. "package-" .. pu.host .. ".log.txt") do os.execute("sleep 10") end
- os.execute("sleep 5")
- if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
- end
-
- APT.logPost()
- else
- local adt = APT.exe("ls -dl results_old 2>/dev/null | cut -d '>' -f 2 | cut -d ' ' -f 2"):Do().result:sub(2, -2)
- if (nil ~= adt) and APT.checkFile(adt) then
- APT.exe('mkdir -p ' .. adt:sub(1, 18))
- :And():Nice('tar -c --xz ' .. adt .. ' -f ' .. adt:sub(1, 18) .. '/' .. adt .. '.tar.xz')
- :And('rm -fr ' .. adt):noErr():fork()
- end
- local dt = os.date('!%F-%H-%M')
- local odt = APT.exe('TZ="GMT" date -r results/stamp +%F-%H-%M 2>/dev/null'):Do().result:sub(2, -2)
- if nil ~= odt then os.execute(' rm -f results_old; ln -s results_' .. odt .. ' results_old 2>/dev/null') end
- if nil ~= dt then os.execute('mkdir -p results_' .. dt .. '; rm -f results; ln -s results_' .. dt .. ' results 2>/dev/null') end
- os.execute('if [ -f results/stamp ]; then mv results/stamp results/stamp.old; else touch results/stamp.old -t 199901010000; fi; touch results/stamp')
- if not APT.keep then
- os.execute("rm -f results/*.html 2>/dev/null")
- os.execute("rm -f results/*.txt 2>/dev/null")
- end
-
- if not APT.logOpen('apt-panopticon') then return end
- I("Starting tests " .. table.concat(APT.options.tests.value, ", "))
- os.execute("mkdir -p results")
- APT.mirrors = getMirrors()
- checkHost(APT.options.referenceSite.value)
-
- for k, m in pairs(APT.mirrors) do
- local pu = url.parse("http://" .. m.BaseURL)
- if APT.options.referenceSite.value ~= pu.host then
- checkHost(m.BaseURL)
- end
- end
-
- while not APT.checkFile('results/LOG_' .. APT.options.referenceSite.value .. '.html') do -- Wait for things to start up before checking for them.
- D('Waiting for results/LOG_' .. APT.options.referenceSite.value .. '.html');
- os.execute("sleep 5")
- -- TODO - count these, and abort if it takes too long.
- -- Try something similar for the other "Wait for things to start up before checking for them.", maybe fold it into APT.exe.
- end
- while 1 <= APT.checkExes("apt-panopticon.lua " .. sendArgs) do os.execute("sleep 5") end
-
- local APT_args = APT.args
- local APT_logFile = APT.logFile
- local debians = {}
- local srvs = io.popen('ls -1 results/*.lua')
- for l in srvs:lines() do
- local hst = l:sub(9, -5)
- if nil ~= l:find('_R%.lua') then hst = hst:sub(1, -3) end
- if (hst:find('_') == nil) and (nil == APT.mirrors[hst]) then
- local ips = loadfile(l)().IPs
- if nil ~= ips then
- debians[hst] = {Country = '', FDQN = hst, Active = 'yes', Rate = '', BaseURL = hst, Protocols = {http = true, https = true}, Bandwidth = '', IPs = ips}
- local baseFiles = {}
- local IPfiles = {}
- for i, a in pairs(ips) do
- IPfiles[i] = {}
- if type(a) == 'table' then
- for j, b in pairs(a) do
- IPfiles[i][j] = {}
- end
- end
- end
- local files = io.popen('ls -1 results/LOG_' .. hst .. '_*.html')
- for ll in files:lines() do
- local dn = false
- for i, a in pairs(ips) do
- if type(a) == 'table' then
- for j, b in pairs(a) do
- if nil ~= ll:match('(results/LOG_' .. hst .. '_' .. j .. '_.*)') then
- table.insert(IPfiles[i][j], ll)
- dn = true
- end
- end
- else
- if nil ~= ll:match('(results/LOG_' .. hst .. '_' .. i .. '_.*)') then
- table.insert(IPfiles[i], ll)
- dn = true
- end
- end
- end
- if not dn then table.insert(baseFiles, ll) end
- end
-
- local combine = function(ip, a)
- if not APT.logOpen(hst, ip) then
- print('PROBLEM OPENING LOG FILE ' .. hst .. ' ' .. ip)
- else
- APT.logFile:write('<h1>Note log lines will be out of order, this is a bunch of other log files combined.</h1>\n')
- for i, f in pairs(a) do
- f = f:sub(9, -1)
- APT.logFile:write('<hr>\n<hr>\n<h2><a href="' .. f .. '">' .. f .. '</a></h2>\n')
- for ln in io.lines('results/' .. f) do
- if ln:match('^' .. os.date('!%Y%-%m%-%d ') .. '.*$') then APT.logFile:write(ln .. '\n') end -- %F isn't good enough, coz we have to escape the '-'.
- end
- end
- end
- APT.logPost()
- APT.args = APT_args
- APT.logFile = APT_logFile
- end
- combine('', baseFiles)
- for ip, a in pairs(IPfiles) do
- if nil == a[1] then
- for i, f in pairs(a) do
- combine(i, f)
- end
- else
- combine(ip, a)
- end
- end
-
- end
- end
- end
- local file, e = io.open("results/debians.lua", "w+")
- if nil == file then C("opening debians file - " .. e) else
- file:write(APT.dumpTable(debians, "debians") .. "\nreturn debians\n")
- file:close()
- end
-
-
- for k, v in pairs(APT.mirrors) do
- local f = 'results/' .. k .. '.lua'
- if APT.checkFile(f) then
- results = loadfile(f)()
- APT.mirrors[k]['IPs'] = results.IPs
- end
- end
- local file, e = io.open("results/mirrors.lua", "w+")
- if nil == file then C("opening mirrors file - " .. e) else
- file:write(APT.dumpTable(APT.mirrors, "mirrors") .. "\nreturn mirrors\n")
- file:close()
- end
-
- -- Create the reports.
- for n, r in pairs(APT.options.reports.value) do
- if APT.checkFile("apt-panopticon-report-" .. r .. ".lua") then
- I("Creating " .. r .. " report.")
- APT.exe("./apt-panopticon-report-" .. r .. ".lua " .. sendArgs):log():Do()
- end
- end
-
- I('Total run time was ' .. (os.time() - now) .. ' seconds.')
- APT.logPost()
- end
|