A Lua script to check the health of Devuan Linux package mirrors. The master repo is at https://sledjhamr.org/cgit/apt-panopticon/ and the master issues tracker is at https://sledjhamr.org/mantisbt/project_page.php?project_id=13 https://sledjhamr.org/cgit/apt-panopticon/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

888 lines
39 KiB

  1. #!/usr/bin/env luajit
  2. local APT = require 'apt-panopticommon'
  3. local D = APT.D
  4. local I = APT.I
  5. local W = APT.W
  6. local E = APT.E
  7. local C = APT.C
  8. local arg, sendArgs = APT.parseArgs({...})
  9. APT.html = true
  10. --[[ TODO - What to do about HTTPS://deb.devuan.org/ redirects.
  11. Some mirrors give a 404.
  12. Sledjhamr gives a 404, coz it's not listening on 443 for deb.devuan.org.
  13. Some mirrors give a 200.
  14. They shouldn't have the proper certificate, but are giving a result anyway.
  15. ]]
  16. local defaultURL = {scheme = "http"}
  17. local releases = {"jessie", "ascii", "beowulf", "ceres"}
  18. local releaseFiles =
  19. {
  20. -- Release file.
  21. "/Release", -- 3.7 MB
  22. "/Release.gpg", --
  23. -- "/InRelease", -- 3.7 MB
  24. -- "/main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently.
  25. -- Contents files. -- 3.3 GB
  26. -- "/main/Contents-all.xz",
  27. -- "/main/Contents-amd64.xz",
  28. -- "/main/Contents-arm64.xz",
  29. -- "-security/main/Contents-all.xz",
  30. -- "-security/main/Contents-amd64.xz",
  31. -- "-security/main/Contents-arm64.xz",
  32. }
  33. local notExist =
  34. {
  35. "ceres-security" -- This will never exist, it's our code name for the testing suite.
  36. }
  37. local referenceDebs =
  38. {
  39. -- Debian package.
  40. "merged/pool/DEBIAN/main/d/dash/dash_0.5.8-2.4_amd64.deb",
  41. -- Debian security package. NOTE this one should always be redirected?
  42. "merged/pool/DEBIAN-SECURITY/updates/main/a/apt/apt-transport-https_1.4.9_amd64.deb",
  43. }
  44. local referenceDevs =
  45. {
  46. -- Devuan package. NOTE this one should not get redirected, but that's more a warning than an error.
  47. "merged/pool/DEVUAN/main/d/desktop-base/desktop-base_3.0_all.deb",
  48. "merged/pool/DEVUAN/main/u/util-linux/util-linux_2.32.1-0.1+devuan2.1_amd64.deb",
  49. }
  50. local curlStatus =
  51. {
  52. [1 ] = "Unsupported protocol. This build of curl has no support for this protocol.",
  53. [2 ] = "Failed to initialize.",
  54. [3 ] = "URL malformed. The syntax was not correct.",
  55. [4 ] = "A feature or option that was needed to perform the desired request was not enabled or was explicitly disabled at build-time. To make curl able to do this, you probably need another build of libcurl!",
  56. [5 ] = "Couldn't resolve proxy. The given proxy host could not be resolved.",
  57. [6 ] = "Couldn't resolve host. The given remote host was not resolved.",
  58. [7 ] = "Failed to connect to host.",
  59. [8 ] = "Weird server reply. The server sent data curl couldn't parse.",
  60. [9 ] = "FTP access denied. The server denied login or denied access to the particular resource or directory you wanted to reach. Most often you tried to change to a directory that doesn't exist on the server.",
  61. [10] = "While waiting for the server to connect back when an active FTP session is used, an error code was sent over the control connection or similar.",
  62. [11] = "FTP weird PASS reply. Curl couldn't parse the reply sent to the PASS request.",
  63. [12] = "During an active FTP session while waiting for the server to connect, the CURLOPT_ACCEPTTIMEOUT_MS (or the internal default) timeout expired.",
  64. [13] = "FTP weird PASV reply, Curl couldn't parse the reply sent to the PASV request.",
  65. [14] = "FTP weird 227 format. Curl couldn't parse the 227-line the server sent.",
  66. [15] = "FTP can't get host. Couldn't resolve the host IP we got in the 227-line.",
  67. [16] = "A problem was detected in the HTTP2 framing layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
  68. [17] = "FTP couldn't set binary. Couldn't change transfer method to binary.",
  69. [18] = "Partial file. Only a part of the file was transferred.",
  70. [19] = "FTP couldn't download/access the given file, the RETR (or similar) command failed.",
  71. [21] = "FTP quote error. A quote command returned error from the server.",
  72. [22] = "HTTP page not retrieved. The requested url was not found or returned another error with the HTTP error code being 400 or above. This return code only appears if -f, --fail is used.",
  73. [23] = "Write error. Curl couldn't write data to a local filesystem or similar.",
  74. [25] = "FTP couldn't STOR file. The server denied the STOR operation, used for FTP uploading.",
  75. [26] = "Read error. Various reading problems.",
  76. [27] = "Out of memory. A memory allocation request failed.",
  77. [28] = "Operation timeout. The specified time-out period was reached according to the conditions.",
  78. [30] = "FTP PORT failed. The PORT command failed. Not all FTP servers support the PORT command, try doing a transfer using PASV instead!",
  79. [31] = "FTP couldn't use REST. The REST command failed. This command is used for resumed FTP transfers.",
  80. [33] = "HTTP range error. The range \"command\" didn't work.",
  81. [34] = "HTTP post error. Internal post-request generation error.",
  82. [35] = "SSL connect error. The SSL handshaking failed.",
  83. [36] = "FTP bad download resume. Couldn't continue an earlier aborted download.",
  84. [37] = "FILE couldn't read file. Failed to open the file. Permissions?",
  85. [38] = "LDAP cannot bind. LDAP bind operation failed.",
  86. [39] = "LDAP search failed.",
  87. [41] = "Function not found. A required LDAP function was not found.",
  88. [42] = "Aborted by callback. An application told curl to abort the operation.",
  89. [43] = "Internal error. A function was called with a bad parameter.",
  90. [45] = "Interface error. A specified outgoing interface could not be used.",
  91. [47] = "Too many redirects. When following redirects, curl hit the maximum amount.",
  92. [48] = "Unknown option specified to libcurl. This indicates that you passed a weird option to curl that was passed on to libcurl and rejected. Read up in the manual!",
  93. [49] = "Malformed telnet option.",
  94. [51] = "The peer's SSL certificate or SSH MD5 fingerprint was not OK.",
  95. [52] = "The server didn't reply anything, which here is considered an error.",
  96. [53] = "SSL crypto engine not found.",
  97. [54] = "Cannot set SSL crypto engine as default.",
  98. [55] = "Failed sending network data.",
  99. [56] = "Failure in receiving network data.",
  100. [58] = "Problem with the local certificate.",
  101. [59] = "Couldn't use specified SSL cipher.",
  102. [60] = "Peer certificate cannot be authenticated with known CA certificates.",
  103. [61] = "Unrecognized transfer encoding.",
  104. [62] = "Invalid LDAP URL.",
  105. [63] = "Maximum file size exceeded.",
  106. [64] = "Requested FTP SSL level failed.",
  107. [65] = "Sending the data requires a rewind that failed.",
  108. [66] = "Failed to initialise SSL Engine.",
  109. [67] = "The user name, password, or similar was not accepted and curl failed to log in.",
  110. [68] = "File not found on TFTP server.",
  111. [69] = "Permission problem on TFTP server.",
  112. [70] = "Out of disk space on TFTP server.",
  113. [71] = "Illegal TFTP operation.",
  114. [72] = "Unknown TFTP transfer ID.",
  115. [73] = "File already exists (TFTP).",
  116. [74] = "No such user (TFTP).",
  117. [75] = "Character conversion failed.",
  118. [76] = "Character conversion functions required.",
  119. [77] = "Problem with reading the SSL CA cert (path? access rights?).",
  120. [78] = "The resource referenced in the URL does not exist.",
  121. [79] = "An unspecified error occurred during the SSH session.",
  122. [80] = "Failed to shut down the SSL connection.",
  123. [81] = "Socket is not ready for send/recv wait till it's ready and try again. This return code is only returned from curl_easy_recv and curl_easy_send.",
  124. [82] = "Could not load CRL file, missing or wrong format (added in 7.19.0).",
  125. [83] = "Issuer check failed (added in 7.19.0).",
  126. [84] = "The FTP PRET command failed",
  127. [85] = "RTSP: mismatch of CSeq numbers",
  128. [86] = "RTSP: mismatch of Session Identifiers",
  129. [87] = "unable to parse FTP file list",
  130. [88] = "FTP chunk callback reported error",
  131. [89] = "No connection available, the session will be queued",
  132. [90] = "SSL public key does not matched pinned public key",
  133. [91] = "Status returned failure when asked with CURLOPT_SSL_VERIFYSTATUS.",
  134. [92] = "Stream error in the HTTP/2 framing layer.",
  135. [93] = "An API function was called from inside a callback.",
  136. [94] = "An authentication function returned an error.",
  137. [95] = "A problem was detected in the HTTP/3 layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
  138. }
  139. local socket = require 'socket'
  140. local ftp = require 'socket.ftp'
  141. local http = require 'socket.http'
  142. local url = require 'socket.url'
  143. local ip = ""
  144. local repoExists = function (r)
  145. r = r:match("([%a-]*)")
  146. if nil == r then return false end
  147. for k, v in pairs(notExist) do
  148. if v == r then return false end
  149. end
  150. return true
  151. end
  152. local IP = {}
  153. gatherIPs = function (host)
  154. if nil == IP[host] then
  155. local IPs
  156. local dig = io.popen('dig +keepopen +noall +nottlid +answer ' .. host .. ' A ' .. host .. ' AAAA ' .. host .. ' CNAME ' .. host .. ' SRV | sort -r | uniq')
  157. repeat
  158. IPs = dig:read("*l")
  159. if nil ~= IPs then
  160. for k, t, v in IPs:gmatch("([%w_%-%.]*)%.%s*IN%s*(%a*)%s*(.*)") do
  161. if "." == v:sub(-1, -1) then v = v:sub(1, -2) end
  162. if nil == IP[k] then IP[k] = {} end
  163. IP[k][v] = t
  164. D(" DNS record " .. host .. " == " .. k .. " type " .. t .. " -> " .. v)
  165. if t == "CNAME" then
  166. gatherIPs(v)
  167. IP[k][v] = IP[v]
  168. elseif v == "SRV" then
  169. print("SVR record found, now what do we do?")
  170. end
  171. end
  172. end
  173. until nil == IPs
  174. end
  175. end
  176. -- Returns FTP directory listing
  177. local nlst = function (u)
  178. local t = {}
  179. local p = url.parse(u)
  180. p.command = "nlst"
  181. p.sink = ltn12.sink.table(t)
  182. local r, e = ftp.get(p)
  183. return r and table.concat(t), e
  184. end
  185. local timeouts = 0;
  186. local totalTimeouts = 0
  187. checkHEAD = function (host, URL, r, retry, sanity)
  188. if nil == r then r = 0 end
  189. if nil == retry then retry = 0 end
  190. if nil == sanity then sanity = false end
  191. local check = "Checking file"
  192. local PU = url.parse(URL, defaultURL)
  193. local pu = url.parse(PU.scheme .. "://" .. host, defaultURL)
  194. if 0 < r then
  195. check = "Redirecting to"
  196. end
  197. if 0 < retry then
  198. os.execute("sleep " .. math.random(1, 3))
  199. check = "Retry " .. retry .. " " .. check
  200. end
  201. if 2 <= timeouts then
  202. E("too many timeouts! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
  203. return
  204. end
  205. if 4 <= (totalTimeouts) then
  206. E("Way too many timeouts!", PU.scheme, "", host)
  207. return
  208. end
  209. if 20 <= r then
  210. E("too many redirects! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
  211. return
  212. end
  213. if 4 <= retry then
  214. E("too many retries! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
  215. return
  216. end
  217. D(PU.scheme .. " :// " .. check .. " " .. host .. " -> " .. URL)
  218. if not APT.testing(PU.scheme, host) then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL); return end
  219. -- TODO - Perhaps we should try it anyway, and mark it as a warning if it DOES work?
  220. if "https" == PU.scheme and APT.options.roundRobin.value == host then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL .. " mirrors shouldn't have the correct cert."); return end
  221. --[[ Using curl command line -
  222. -I - HEAD
  223. --connect-to domain:port:IP:port - connect to IP, but use SNI from URL.
  224. -header "" - add extra headers.
  225. -L - DO follow redirects.
  226. --max-redirs n - set maximum redirects, default is 50, -1 = unlimited.
  227. --retry n - maximum retries, default is 0, no retries.
  228. -o file - write to file instead of stdout.
  229. --path-as-is - https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html might be useful for URLSanity.
  230. -s silent - don't output progress or error messages.
  231. --connect-timeout n - timeout in seconds.
  232. Should return with error code 28 on a timeout?
  233. -D file - write the received headers to a file. This includes the status code and string.
  234. ]]
  235. local fname = host .. "_" .. PU.host .. "_" .. PU.path:gsub("/", "_") .. ".txt"
  236. local hdr = ""
  237. local IP = ""
  238. if pu.host ~= PU.host then
  239. if "http" == PU.scheme then
  240. hdr = '-H "Host: ' .. host .. '"'
  241. end
  242. IP = '--connect-to "' .. pu.host .. '::' .. PU.host .. ':"'
  243. end
  244. local cmd = 'ionice -c3 nice -n 19 curl -I --retry 0 -s --path-as-is --connect-timeout ' .. APT.options.timeout.value .. ' --max-redirs 0 ' ..
  245. IP .. ' ' .. '-o /dev/null -D results/"HEADERS_' .. fname .. '" ' ..
  246. hdr .. ' -w "#%{http_code} %{ssl_verify_result} %{url_effective}\\n" ' .. PU.scheme .. '://' .. host .. PU.path .. ' >>results/"STATUS_' .. fname .. '"'
  247. local status, result = APT.execute(cmd)
  248. os.execute('cat results/"HEADERS_' .. fname .. '" >>results/"STATUS_' .. fname .. '" 2>/dev/null; rm -f results/"HEADERS_' .. fname .. '" 2>/dev/null')
  249. if "0" ~= status then
  250. local msg = curlStatus[0 + status]
  251. if nil == msg then msg = "UNKNOWN CURL STATUS CODE!" end
  252. if sanity then
  253. E(" The curl command return an error code of " .. status .. " - " .. msg, PU.scheme, "URLSanity", host)
  254. else
  255. E(" The curl command return an error code of " .. status .. " - " .. msg, PU.scheme, "", host)
  256. end
  257. if ("28" == status) or ("7" == status) then
  258. if sanity then
  259. E(" TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1, PU.scheme, "URLSanity", host)
  260. else
  261. E(" TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1, PU.scheme, "", host)
  262. end
  263. timeouts = timeouts + 1
  264. end
  265. checkHEAD(host, URL, r, retry + 1, sanity)
  266. return
  267. end
  268. local rfile, e = io.open("results/STATUS_" .. fname, "r")
  269. local code = "000"
  270. local cstr = ""
  271. local location = nil
  272. if nil == rfile then W("opening results/STATUS_" .. fname .. " file - " .. e) else
  273. for line in rfile:lines("*l") do
  274. if "#" == line:sub(1, 1) then
  275. code = line:sub(2, 4)
  276. if ("https" == PU.scheme) and ("0" ~= line:sub(6, 6)) then E(" The certificate is invalid.", PU.scheme, "https", host) end
  277. elseif "http" == line:sub(1, 4):lower() then
  278. -- -2 coz the headers file gets a \r at the end.
  279. cstr = line:sub(14, -2)
  280. elseif "location" == line:sub(1, 8):lower() then
  281. location = line:sub(11, -2)
  282. end
  283. end
  284. end
  285. os.execute('rm -f results/"STATUS_' .. fname .. '" 2>/dev/null')
  286. if ("4" == tostring(code):sub(1, 1)) or ("5" == tostring(code):sub(1, 1)) then
  287. if sanity then
  288. E(" " .. code .. " " .. cstr .. ". " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "URLSanity", host)
  289. else
  290. E(" " .. code .. " " .. cstr .. ". " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
  291. end
  292. else
  293. I(" " .. code .. " " .. cstr .. ". " .. check .. " " .. host .. " -> " .. URL)
  294. -- timeouts = timeouts - 1 -- Backoff the timeouts count if we managed to get through.
  295. if nil ~= location then
  296. pu = url.parse(location, defaultURL)
  297. if ('http' == location:sub(1, 4)) and (pu.scheme ~= PU.scheme) then -- Sometimes a location sans scheme is returned, this is not a protocol change.
  298. if APT.testing("Protocol") then W(" protocol changed during redirect! " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location, PU.scheme, "Protocol", host) end
  299. if (pu.host == host) and pu.path == PU.path then D("Not testing protocol change " .. URL .. " -> " .. location); return end
  300. end
  301. if location == URL then
  302. E(" redirect loop! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
  303. elseif nil == pu.host then
  304. I(" relative redirect. " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location)
  305. checkHEAD(host, PU.scheme .. "://" .. PU.host .. location, r + 1, retry, sanity)
  306. elseif (PU.host == pu.host) or (host == pu.host) then
  307. checkHEAD(pu.host, location, r + 1, retry, sanity)
  308. else
  309. --[[ The hard part here is that we end up throwing ALL of the test files at the redirected location.
  310. Not good for deb.debian.org, which we should only be throwing .debs at.
  311. What we do is loop through the DNS entries, and only test the specific protocol & file being tested here.
  312. This is what I came up with for checking if we are already testing a specific URL.
  313. Still duplicates a tiny bit, but much less than the previous find based method.
  314. ]]
  315. local file = pu.host .. "://" .. pu.path
  316. local f = io.popen(string.format('if [ ! -f results/%s.check ] ; then touch results/%s.check; echo -n "check"; fi', file:gsub("/", "_"), file:gsub("/", "_") )):read("*a")
  317. if (nil == f) or ("check" == f) then
  318. I(" Now checking redirected host " .. file)
  319. checkHost(pu.host, pu.host, nil, "redir", pu.path)
  320. else
  321. D(" Already checking " .. file)
  322. end
  323. end
  324. end
  325. end
  326. end
  327. local checkTimeouts = function(host, scheme, URL)
  328. if APT.testing(scheme) then
  329. totalTimeouts = totalTimeouts + timeouts; timeouts = 0
  330. checkHEAD(host, scheme .. "://" .. URL)
  331. if 4 <= (totalTimeouts) then
  332. E("Way too many timeouts!", scheme, "", host)
  333. return true
  334. end
  335. end
  336. if APT.testing("URLSanity") then
  337. URL = URL:gsub("merged/", "merged///")
  338. totalTimeouts = totalTimeouts + timeouts; timeouts = 0
  339. checkHEAD(host, scheme .. "://" .. URL, 0, 0, true)
  340. if 4 <= (totalTimeouts) then
  341. E("Way too many timeouts!", scheme, "URLSanity", host)
  342. return true
  343. end
  344. end
  345. return false
  346. end
  347. local checkFiles = function (host, ip, path, file)
  348. timeouts = 0
  349. if nil == path then path = "" end
  350. if nil ~= file then
  351. if "redir" == ip then ip = host end
  352. I(" Checking IP for file " .. host .. " -> " .. ip .. " " .. path .. " " .. file)
  353. if checkTimeouts(host, "http", ip .. path .. "/" .. file) then return end
  354. if checkTimeouts(host, "https", ip .. path .. "/" .. file) then return end
  355. else
  356. I(" Checking IP " .. host .. " -> " .. ip .. " " .. path)
  357. for i, s in pairs(referenceDevs) do
  358. if checkTimeouts(host, "http", ip .. path .. "/" .. s) then return end
  359. if checkTimeouts(host, "https", ip .. path .. "/" .. s) then return end
  360. end
  361. for i, s in pairs(releases) do
  362. for j, k in pairs(releaseFiles) do
  363. if repoExists(s .. k) then
  364. if checkTimeouts(host, "http", ip .. path .. "/merged/dists/" .. s .. k) then return end
  365. if checkTimeouts(host, "https", ip .. path .. "/merged/dists/" .. s .. k) then return end
  366. end
  367. end
  368. end
  369. end
  370. end
  371. checkHost = function (orig, host, path, ip, file)
  372. if nil == host then host = orig end
  373. if nil == path then path = "" end
  374. if nil == file then file = "" end
  375. local ph = url.parse("http://" .. host)
  376. if (nil ~= ip) and ("redir" ~= ip) then
  377. local po = url.parse("http://" .. orig)
  378. if "" ~= file then
  379. D("checking redirected file " .. po.host .. " " .. file)
  380. checkFiles(po.host, ip, path, file)
  381. else
  382. checkFiles(po.host, ip, path)
  383. end
  384. else
  385. if orig == host then
  386. D("checkHost " .. orig .. "" .. file)
  387. if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file) end
  388. else D("checkHost " .. orig .. " -> " .. host) end
  389. local h = APT.mirrors[ph.host]
  390. if nil == h then return end
  391. for k, v in pairs(h.IPs) do
  392. if "table" == type(v) then
  393. for k1, v1 in pairs(v) do
  394. if v1 == "A" then
  395. if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
  396. elseif v1 == "AAAA" then
  397. if APT.testing("IPv6") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
  398. end
  399. end
  400. else
  401. if v == "A" then
  402. if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
  403. elseif v == "AAAA" then
  404. if APT.testing("IPv6") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
  405. end
  406. end
  407. end
  408. end
  409. end
  410. local addDownload = function(host, URL, f, r, k)
  411. local file = k:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
  412. if APT.checkFile("results/" .. host .. "/merged/dists/" .. r .. k) then
  413. -- Curls "check timestamp and overwrite file" stuff sucks.
  414. -- -R means the destination file gets the timestamp of the remote file.
  415. -- Can only do ONE timestamp check per command.
  416. -- This doesn't work either. All downloads get all these headers. Pffft
  417. -- local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z" results/' .. host .. "/merged/dists/" .. r .. k .. ' | cut -d " " -f 6-11')
  418. -- f:write('header "If-Modified-Since: ' .. ts:sub(2, -2) .. '"\n')
  419. -- Curl will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first,
  420. -- which wont work with multiple files in multiple directories. WTF?
  421. os.execute(" mv results/" .. host .. "/merged/dists/" .. r .. k ..
  422. " results/" .. host .. "/merged/dists/" .. r .. k .. ".old")
  423. end
  424. D('Downloading http://' .. host .. URL .. '/merged/dists/' .. r .. k)
  425. f:write('url "' .. 'http://' .. host .. URL .. '/merged/dists/' .. r .. k .. '"\n')
  426. f:write('output "results/' .. host .. '/merged/dists/' .. r .. k .. '"\n')
  427. end
  428. local postDownload = function(host, r, k)
  429. local file = k:match(".*/([%w%.%+%-_]*)$") -- Get the filename.
  430. local dir = k:sub(1, 0 - (#file + 1))
  431. os.execute("if [ -f results/" .. host .. "/merged/dists/" .. r .. k .. ".old ]" ..
  432. " && [ ! -f results/" .. host .. "/merged/dists/" .. r .. k .. " ]; then cp -a" ..
  433. " results/" .. host .. "/merged/dists/" .. r .. k .. ".old" ..
  434. " results/" .. host .. "/merged/dists/" .. r .. k .. "; fi")
  435. if ".gz" == k:sub(-3, -1) then APT.execute("ionice -c3 nice -n 19 gzip -dfk results/" .. host .. "/merged/dists/" .. r .. k) end
  436. if ".xz" == k:sub(-3, -1) then APT.execute("ionice -c3 nice -n 19 xz -dfk results/" .. host .. "/merged/dists/" .. r .. k .. " 2>/dev/null") end
  437. if APT.testing("Integrity") then
  438. if ".gpg" == k:sub(-4, -1) then
  439. local status, out = APT.execute("gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/" .. host .. "/merged/dists/" .. r .. k ..
  440. " results/" .. host .. "/merged/dists/" .. r .. k:sub(1, -5) .. " 2>/dev/null")
  441. if "0" ~= status then E("GPG check failed - " .. host .. "/merged/dists/" .. r .. k, "http", "Integrity", host) end
  442. end
  443. -- TODO - should check the PGP sig of InRelease as well.
  444. end
  445. if APT.testing("Integrity") or APT.testing("Updated") then
  446. if "Packages." == file:sub(1, 9) then
  447. -- TODO - compare the SHA256 sums in pkgmaster's Release for both the packed and unpacked versions.
  448. -- Also note that this might get only a partial download due to maxtime.
  449. if APT.options.referenceSite.value == host then
  450. local Pp, e = io.open('results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed', "w+")
  451. if nil == Pp then W('opening results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed' .. ' file - ' .. e) else
  452. local pp = {}
  453. for l in io.lines('results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages') do
  454. if "Package: " == l:sub(1, 9) then
  455. if 0 ~= #pp then
  456. for i = 1, 5 do
  457. if nil == pp[i] then print(host .. " " .. r .. " " .. dir .. " " .. i) else Pp:write(pp[i] .. " | ") end
  458. end
  459. Pp:write("\n")
  460. end
  461. pp = {}
  462. pp[1] = l:sub(10, -1)
  463. elseif "Version: " == l:sub(1, 9) then
  464. pp[2] = l:sub(10, -1)
  465. elseif "Filename: " == l:sub(1, 10) then
  466. pp[3] = l:sub(11, -1)
  467. elseif "Size: " == l:sub(1, 6) then
  468. pp[4] = l:sub(7, -1)
  469. elseif "SHA256: " == l:sub(1, 8) then
  470. pp[5] = l:sub(9, -1)
  471. end
  472. end
  473. Pp:close()
  474. os.execute('sort results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed >results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages_parsed-sorted')
  475. if APT.checkFile('Packages/' .. r .. dir .. 'Packages_parsed-sorted') then
  476. os.execute('diff -U 0 Packages/' .. r .. dir .. 'Packages_parsed-sorted ' ..
  477. 'results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted ' ..
  478. ' | grep -E "^-" | grep -Ev "^\\+\\+\\+|^---" >>results/OLD_PACKAGES_' .. r .. '.txt')
  479. os.execute('diff -U 0 Packages/' .. r .. dir .. 'Packages_parsed-sorted ' ..
  480. 'results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted ' ..
  481. ' | grep -E "^\\+" | grep -Ev "^\\+\\+\\+|^---" >>results/NEW_Packages_' .. r .. '.txt')
  482. -- Find the smallest new package for each release.
  483. os.execute('sort -b -k 9,9 -n results/NEW_Packages_' .. r .. '.txt >results/NEW_Packages_' .. r .. '.sorted.txt')
  484. os.execute('grep -s " | pool/DEBIAN/" results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1 >results/NEW_Packages_' .. r .. '.test.txt')
  485. os.execute('grep -s " | pool/DEBIAN-SECURITY/" results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_' .. r .. '.test.txt')
  486. os.execute('grep -s " | pool/DEVUAN/" results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_' .. r .. '.test.txt')
  487. else
  488. C("Can't find file Packages/" .. r .. dir .. "Packages_parsed-sorted")
  489. end
  490. os.execute('mkdir -p Packages/' .. r .. dir)
  491. os.execute('mv -f results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted Packages/' .. r .. dir .. 'Packages_parsed-sorted')
  492. end
  493. end
  494. os.execute('rm -f results/' .. host .. '/merged/dists/' .. r .. dir .. 'Packages 2>/dev/null')
  495. os.execute('rm -f results/' .. host .. '/merged/dists/' .. r .. dir .. 'Packages.* 2>/dev/null')
  496. end
  497. end
  498. end
  499. local downloadLock = "flock -n results/curl-"
  500. local download = "curl --connect-timeout " .. APT.options.timeout.value .. " --create-dirs -f -L --max-time " .. APT.options.maxtime.value .. " -z 'results/stamp.old' -v -R "
  501. local downloads = function(host, URL, release, list)
  502. if nil == URL then URL = "" end
  503. local lock = "META-" .. host .. ".lock"
  504. local log = " --stderr results/curl-META-" .. host .. ".log"
  505. local cm = "ionice -c3 nice -n 19 " .. downloadLock .. lock .. " " .. download .. log .. " -K results/" .. host .. ".curl"
  506. if APT.testing("IPv4") and (not APT.testing("IPv6")) then cm = cm .. ' -4' end
  507. if (not APT.testing("IPv4")) and APT.testing("IPv6") then cm = cm .. ' -6' end
  508. f, e = io.open("results/" .. host .. ".curl", "a+")
  509. if nil == f then C("opening curl file - " .. e); return end
  510. if nil ~= list then
  511. if "" ~= list then
  512. if nil ~= release then
  513. for l in list:gmatch("\n*([^\n]+)\n*") do
  514. addDownload(host, URL, f, release, "/" .. l)
  515. end
  516. else
  517. D('Downloading http://' .. host .. URL .. '/merged/' .. list)
  518. f:write('url "' .. 'http://' .. host .. URL .. '/merged/' .. list .. '"\n')
  519. f:write('output "results/' .. host .. '/merged/' .. list .. '"\n')
  520. end
  521. f:close()
  522. return
  523. end
  524. else
  525. for i, s in pairs(releases) do
  526. for j, k in pairs(releaseFiles) do
  527. if repoExists(s .. k) then
  528. addDownload(host, URL, f, s, k)
  529. end
  530. end
  531. end
  532. end
  533. f:close()
  534. APT.fork(cm)
  535. end
  536. local getMirrors = function ()
  537. local mirrors = {}
  538. local host = ""
  539. local m = {}
  540. local active = true
  541. local URL = "http://" .. APT.options.referenceSite.value .. "/mirror_list.txt"
  542. I("getting mirrors.")
  543. local p, c, h = http.request(URL)
  544. if nil == p then E(c .. " fetching " .. URL) else
  545. for l in p:gmatch("\n*([^\n]+)\n*") do
  546. local t, d = l:match("(%a*):%s*(.*)")
  547. d = string.lower(d)
  548. if "FQDN" == t then
  549. if "" ~= host then
  550. mirrors[host] = m
  551. m = {}
  552. active = true
  553. end
  554. host = d
  555. m[t] = d
  556. gatherIPs(host)
  557. m["IPs"] = IP[host]
  558. elseif "Protocols" == t then
  559. local prot = {}
  560. for w in d:gmatch("(%w+)") do
  561. prot[w] = true;
  562. end
  563. m[t] = prot
  564. elseif "Active" == t and nil == d:sub(1, 3):find("yes", 1, true) then
  565. W("Mirror " .. host .. " is not active - " .. d, "", "", host)
  566. active = false
  567. m[t] = d
  568. -- TODO - Should do some input validation on BaseURL, and everything else.
  569. else
  570. m[t] = d
  571. end
  572. end
  573. if "" ~= host --[[and active]] then
  574. mirrors[host] = m
  575. end
  576. end
  577. if APT.testing("DNSRR") then
  578. mirrors[APT.options.roundRobin.value] = { ["Protocols"] = { ["http"] = true; ["https"] = true; }; ["FQDN"] = 'deb.devuan.org'; ["Active"] = 'yes'; ["BaseURL"] = 'deb.devuan.org'; }
  579. gatherIPs(APT.options.roundRobin.value)
  580. mirrors[APT.options.roundRobin.value].IPs = IP[APT.options.roundRobin.value]
  581. end
  582. local file, e = io.open("results/mirrors.lua", "w+")
  583. if nil == file then C("opening mirrors file - " .. e) else
  584. file:write(APT.dumpTable(mirrors, "", "mirrors") .. "\nreturn mirrors\n")
  585. file:close()
  586. end
  587. return mirrors
  588. end
  589. if 0 < #arg then
  590. if "/" == arg[1]:sub(-1, -1) then
  591. W("slash at end of path! " .. arg[1])
  592. arg[1] = arg[1]:sub(1, -2)
  593. end
  594. if " " == arg[1]:sub(-1, -1) then
  595. W("space at end of path! " .. arg[1])
  596. arg[1] = arg[1]:sub(1, -2)
  597. end
  598. local pu = url.parse("http://" .. arg[1])
  599. if APT.testing("Integrity") or APT.testing("Updated") then
  600. if APT.origin and APT.options.referenceSite.value == pu.host then
  601. -- if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
  602. end
  603. end
  604. if nil ~= arg[2] then
  605. APT.logFile, e = io.open("results/LOG_" .. pu.host .. "_" .. arg[2] .. ".html", "a+")
  606. else
  607. APT.logFile, e = io.open("results/LOG_" .. pu.host .. ".html", "a+")
  608. end
  609. if nil == APT.logFile then C("opening log file - " .. e); return end
  610. APT.logPre()
  611. I("Starting tests for " .. arg[1] .. " with these tests - " .. table.concat(APT.options.tests.value, ", "))
  612. APT.mirrors = loadfile("results/mirrors.lua")()
  613. if nil ~= arg[2] then I(" Using IP " .. arg[2]); ip = arg[2] end
  614. if nil ~= arg[3] then I(" Using file " .. arg[3]); end
  615. for k, v in pairs{"ftp", "http", "https", "rsync"} do
  616. if APT.testing(v) then
  617. local tests = {errors = 0; warnings = 0}
  618. if APT.testing("Integrity") then tests.Integrity = {errors = 0; warnings = 0} end
  619. if APT.testing("Protocol") then tests.Protocol = {errors = 0; warnings = 0} end
  620. if APT.testing("Updated") then tests.Updated = {errors = 0; warnings = 0} end
  621. if APT.testing("URLSanity") then tests.URLSanity = {errors = 0; warnings = 0} end
  622. APT.results[v] = tests
  623. end
  624. end
  625. if APT.origin then
  626. if APT.testing("Integrity") or APT.testing("Updated") then
  627. if APT.origin and (APT.options.roundRobin.value ~= pu.host) then
  628. I("Starting file downloads for " .. pu.host)
  629. downloads(pu.host, pu.path)
  630. end
  631. end
  632. checkFiles(pu.host, pu.host, pu.path);
  633. else
  634. checkHost(pu.host, pu.host, pu.path, arg[2], arg[3])
  635. end
  636. if APT.testing("Integrity") or APT.testing("Updated") then
  637. if 4 > (totalTimeouts) then
  638. if APT.origin and (APT.options.roundRobin.value ~= pu.host) then
  639. while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
  640. os.execute( "rm -f results/" .. pu.host .. ".curl 2>/dev/null; rm -f results/curl-META-" .. pu.host .. ".lock 2>/dev/null; " ..
  641. "mv results/curl-META-" .. pu.host .. ".log results/curl-Release-" .. pu.host .. ".log")
  642. for i, n in pairs(releases) do
  643. for l, o in pairs(releaseFiles) do
  644. if repoExists(i .. o) then
  645. postDownload(pu.host, n, o)
  646. end
  647. end
  648. if APT.checkFile('results/' .. pu.host .. '/merged/dists/' .. n .. '/Release') then
  649. os.execute('sort -k 3 results/' .. pu.host .. '/merged/dists/' .. n .. '/Release >results/' .. pu.host .. '/merged/dists/' .. n .. '/Release.SORTED')
  650. if APT.checkFile('results_old/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED') then
  651. if APT.options.referenceSite.value == pu.host then
  652. os.execute('diff -U 0 results_old/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED ' ..
  653. 'results/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED ' ..
  654. '| grep -v "@@" | grep "^+" | grep "Packages.xz$" | cut -c 77- >results/NEW_Release_' .. n .. '.txt')
  655. -- TODO - Maybe check the date in Release, though since they are updated daily, is there any point? Perhaps it's for checking amprolla got run?
  656. os.execute('rm -f results/' .. pu.host .. '/merged/dists/' .. n .. '/Release 2>/dev/null')
  657. else
  658. -- TODO - compare to the pkgmaster copy.
  659. end
  660. local dfile, e = io.open('results/NEW_Release_' .. n .. '.txt', "r")
  661. if nil == dfile then W("opening results/NEW_Release_" .. n .. " file - " .. e) else
  662. local diff = dfile:read("*a")
  663. if "" ~= diff then
  664. downloads(pu.host, pu.path, n, diff)
  665. end
  666. end
  667. end
  668. end
  669. end
  670. downloads(pu.host, pu.path, "", "")
  671. while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
  672. os.execute( "rm -f results/" .. pu.host .. ".curl 2>/dev/null; rm -f results/curl-META-" .. pu.host .. ".lock 2>/dev/null; " ..
  673. "mv results/curl-META-" .. pu.host .. ".log results/curl-Packages-" .. pu.host .. ".log")
  674. for i, n in pairs(releases) do
  675. local dfile, e = io.open('results/NEW_Release_' .. n .. '.txt', "r")
  676. if nil == dfile then W("opening results/NEW_Release_" .. n .. ".txt file - " .. e) else
  677. local diff = dfile:read("*a")
  678. for l in diff:gmatch("\n*([^\n]+)\n*") do
  679. postDownload(pu.host, n, "/" .. l)
  680. end
  681. end
  682. if APT.options.referenceSite.value == pu.host then
  683. -- In case it wasn't dealt with already.
  684. os.execute('touch results/NEW_Packages_' .. n .. '.test.txt')
  685. end
  686. end
  687. for i, n in pairs(releases) do
  688. local nfile, e = io.open('results/NEW_Packages_' .. n .. '.test.txt', "r")
  689. if nil == nfile then W("opening results/NEW_Packages_" .. n .. ".test.txt file - " .. e) else
  690. for l in nfile:lines() do
  691. local p = l:match('(pool/.*%.deb)')
  692. if nil ~= p then
  693. downloads(pu.host, pu.path, nil, p)
  694. end
  695. end
  696. end
  697. end
  698. downloads(pu.host, pu.path, nil, "")
  699. while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
  700. for i, n in pairs(releases) do
  701. local nfile, e = io.open('results/NEW_Packages_' .. n .. '.test.txt', "r")
  702. if nil == nfile then W("opening results/NEW_Packages_" .. n .. ".test.txt file - " .. e) else
  703. for l in nfile:lines() do
  704. local v, p, sz, sha = l:match(' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) |')
  705. if nil ~= p then
  706. if APT.checkFile('results/' .. pu.host .. "/merged/" .. p) then
  707. local status, fsz = APT.execute('ls -l results/' .. pu.host .. "/merged/" .. p .. ' | cut -d " " -f 5-5')
  708. if APT.testing("Integrity") then
  709. if sz ~= fsz:sub(2, -2) then -- The sub bit is to slice off the EOLs at each end.
  710. E('Package size mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Integrity', pu.host)
  711. print('|' .. sz .. '~=' .. fsz:sub(2, -2) .. '|')
  712. else
  713. local status, fsha = APT.execute('sha256sum results/' .. pu.host .. "/merged/" .. p .. ' | cut -d " " -f 1')
  714. if sha ~= fsha:sub(2, -2) then E('Package SHA256 sum mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Integrity', pu.host) end
  715. -- TODO - maybe check the PGP key, though packages are mostly not signed.
  716. end
  717. end
  718. if APT.testing("Updated") then
  719. if sz ~= fsz:sub(2, -2) then
  720. E('Package size mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Updated', pu.host)
  721. end
  722. end
  723. else
  724. E('Failed to download - results/' .. pu.host .. "/merged/" .. p, 'http', 'Updated', pu.host)
  725. end
  726. end
  727. end
  728. end
  729. end
  730. end
  731. APT.results["timeout"] = false
  732. else
  733. APT.results["timeout"] = true
  734. end
  735. end
  736. if APT.origin and APT.options.referenceSite.value ~= pu.host then
  737. if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
  738. os.execute('rm STATUS_' .. pu.host .. '* 2>/dev/null')
  739. end
  740. local min, max, spd = 999999999999, 0
  741. for i, mt in pairs({'Release', 'Packages', 'META'}) do
  742. if APT.checkFile("results/curl-" .. mt .. "-" .. pu.host .. ".log") then
  743. for l in io.lines("results/curl-" .. mt .. "-" .. pu.host .. ".log") do
  744. local speed, crrnt = l:match('^%c *%d+ +%d+k? +%d+ +%d+k? +%d+ +%d+ +(%d+k?) +%d+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +(%d+k?)')
  745. if nil ~= speed then
  746. if 'k' == speed:sub(-1, -1) then speed = speed:sub(1, -2) .. '000' end
  747. if 'k' == crrnt:sub(-1, -1) then crrnt = crrnt:sub(1, -2) .. '000' end
  748. speed = tonumber(speed)
  749. crrnt = tonumber(crrnt)
  750. if speed < min and speed ~= 0 then min = speed end
  751. if speed > max then max = speed end
  752. if crrnt < min and crrnt ~= 0 then min = crrnt end
  753. if crrnt > max then max = crrnt end
  754. end
  755. end
  756. end
  757. end
  758. APT.results["speed"] = {min = min, max = max}
  759. local f = pu.host
  760. if "" ~= ip then f = f .. "_" .. ip end
  761. local rfile, e = io.open("results/" .. f .. ".lua", "w+")
  762. if nil == rfile then C("opening results file - " .. e) else
  763. rfile:write(APT.dumpTable(APT.results, "", "results") .. "\nreturn results\n")
  764. rfile:close()
  765. end
  766. APT.logPost()
  767. APT.logFile:close()
  768. else
  769. local fadt = io.popen("ls -dl results_old 2>/dev/null | cut -d '>' -f 2 | cut -d ' ' -f 2")
  770. local adt = fadt:read('*l')
  771. fadt:close()
  772. if nil ~= adt then os.execute('tar -c --xz ' .. adt .. ' -f ' .. adt .. '.tar.xz') end
  773. local dt = os.date('!%Y-%m-%d-%H-%M')
  774. local fodt = io.popen('TZ="GMT" date -r results/stamp +%Y-%m-%d-%H-%M 2>/dev/null', 'r')
  775. local odt = fodt:read('*l')
  776. fodt:close()
  777. if nil ~= odt then os.execute(' rm -f results_old; ln -s results_' .. odt .. ' results_old 2>/dev/null') end
  778. if nil ~= dt then os.execute('mkdir -p results_' .. dt .. '; rm -f results; ln -s results_' .. dt .. ' results 2>/dev/null') end
  779. os.execute('if [ -f results/stamp ]; then mv results/stamp results/stamp.old; else touch results/stamp.old -t 199901010000; fi; touch results/stamp')
  780. os.execute("rm -f results/*.check 2>/dev/null")
  781. if not APT.keep then
  782. os.execute("rm -f results/*.curl 2>/dev/null")
  783. os.execute("rm -f results/*.log 2>/dev/null")
  784. os.execute("rm -f results/*.html 2>/dev/null")
  785. os.execute("rm -f results/*.txt 2>/dev/null")
  786. end
  787. APT.logFile, e = io.open("results/LOG_apt-panopticon.html", "a+")
  788. if nil == APT.logFile then C("opening log file - " .. e); return end
  789. APT.logPre()
  790. I("Starting tests " .. table.concat(APT.options.tests.value, ", "))
  791. os.execute("mkdir -p results")
  792. APT.mirrors = getMirrors()
  793. checkHost(APT.options.referenceSite.value)
  794. for i, n in pairs(releases) do
  795. while not APT.checkFile('results/NEW_Packages_' .. n .. '.test.txt') do os.execute("sleep 10") end
  796. end
  797. for k, m in pairs(APT.mirrors) do
  798. if "/" == m.BaseURL:sub(-1, -1) then
  799. W("slash at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
  800. m.BaseURL = m.BaseURL:sub(1, -2)
  801. end
  802. if " " == m.BaseURL:sub(-1, -1) then
  803. W("space at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
  804. m.BaseURL = m.BaseURL:sub(1, -2)
  805. end
  806. local pu = url.parse("http://" .. m.BaseURL)
  807. if APT.options.referenceSite.value ~= pu.host then
  808. checkHost(m.BaseURL)
  809. APT.checkExes("apt-panopticon.lua " .. sendArgs)
  810. if APT.testing("Integrity") or APT.testing("Updated") then APT.checkExes(downloadLock) end
  811. end
  812. end
  813. while 1 <= APT.checkExes("apt-panopticon.lua " .. sendArgs) do os.execute("sleep 10") end
  814. os.execute("rm -f results/*.check; rm -f results/*.lock 2>/dev/null")
  815. -- Create the reports.
  816. for n, r in pairs(APT.options.reports.value) do
  817. if APT.checkFile("apt-panopticon-report-" .. r .. ".lua") then
  818. I("Creating " .. r .. " report.")
  819. APT.execute("./apt-panopticon-report-" .. r .. ".lua")
  820. end
  821. end
  822. if nil ~= adt then os.execute('rm -fr ' .. adt .. ' 2>/dev/null') end
  823. APT.logPost()
  824. APT.logFile:close()
  825. end