forked from public/mirror-checker
updated slackware
This commit is contained in:
parent
68e13c327e
commit
fb7337457e
|
@ -74,11 +74,11 @@ raspbian: http://archive.raspbian.org/raspbian/ snapshotindex.txt is most likely
|
|||
sagemath: same source tarballs as them (the sage-*.tar.gz files under 'Source Code')
|
||||
salt stack: checking the "Latest release" text under the 'About' header
|
||||
scientific: https://scientificlinux.org/downloads/sl-mirrors/ not checking this one since it's abandoned
|
||||
slackware: https://mirrors.slackware.com/mirrorlist/ https://mirrors.slackware.com/slackware/ checking using the last updated date here, don't know if it's entirely accurate
|
||||
slackware: https://mirrors.slackware.com/slackware/ check whether we have each release and whether the timestamp for CHECKSUMS.md5 in each release is the same, for slackware-iso, just make sure that our list of directories is the same
|
||||
tdf: https://download.documentfoundation.org/
|
||||
trisquel: https://trisquel.info/mirmon/index.html out of date website!? please recheck this!!!
|
||||
ubuntu: https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive
|
||||
ubuntu-ports: http://ports.ubuntu.com/ubuntu-ports/ checks the file anonster.canonical.com, which appears to be a timestamp (check it to make sure!!!)
|
||||
ubuntu-ports: http://ports.ubuntu.com/ubuntu-ports/ checking the Release files in dists
|
||||
ubuntu-ports-releases: https://cdimage.ubuntu.com/releases/ has public repo, no timestamp, no status tracker, brute force looped it
|
||||
ubuntu-releases: https://releases.ubuntu.com/
|
||||
vlc: http://download.videolan.org/pub/videolan/
|
||||
|
|
|
@ -266,8 +266,8 @@
|
|||
"slackware": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://mirrors.slackware.com/mirrorlist/",
|
||||
"csc": "slackware/",
|
||||
"upstream": "https://mirrors.slackware.com/slackware/",
|
||||
"file": ""
|
||||
},
|
||||
"trisquel": {
|
||||
|
|
|
@ -1,26 +1,74 @@
|
|||
"""
|
||||
Contains slackware class
|
||||
"""
|
||||
|
||||
import os
|
||||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
from datetime import datetime
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class slackware(Project):
|
||||
"""slackware class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
if (page.find("mirror.csclub.uwaterloo.ca/slackware/") != -1):
|
||||
indexOfFile = page.find("Last Updated:")
|
||||
matches = list(datefinder.find_dates(page[indexOfFile:]))
|
||||
date = matches[0] # date is of type datetime.datetime
|
||||
return(pd.to_datetime(current_time, unit='s') - date.replace(tzinfo=None) <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s'))
|
||||
else:
|
||||
return False
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not re.match(r'slackware-([1-7]|8\.0).*', href) and href != "slackware-iso/" and href != "slackware-current/" and href != "slackware-pre-1.0-beta/" and href != "unsupported/":
|
||||
print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
elif cls.checker(site1+href, "CHECKSUMS.md5") != cls.checker(site2+href, "CHECKSUMS.md5"):
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def check_iso(site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not href.startswith("http"):
|
||||
print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# print(cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/"))
|
||||
|
||||
return cls.scrape(upstream_url, csc_url) and cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/")
|
|
@ -21,7 +21,7 @@ class ubuntu_ports(Project):
|
|||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/"):
|
||||
print(href)
|
||||
# print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
elif requests.get(site1+href+"Release").text != requests.get(site2+href+"Release").text:
|
||||
|
|
4
test.py
4
test.py
|
@ -7,7 +7,7 @@ from datetime import timedelta
|
|||
import time
|
||||
import pandas as pd
|
||||
import re # for salt stack specifically
|
||||
from projects import ubuntu_ports
|
||||
from projects import slackware
|
||||
import json # import json to read project info stored in json file
|
||||
|
||||
# this function is brute force looping through the whole directory and checking dates
|
||||
|
@ -65,7 +65,7 @@ def get_latest_date(web_dir):
|
|||
if __name__ =="__main__":
|
||||
with open("data.json", "r", encoding="utf-8") as file:
|
||||
data = json.load(file)
|
||||
print(ubuntu_ports.check(data, "ubuntu_ports"))
|
||||
print(slackware.check(data, "slackware"))
|
||||
|
||||
"""# website to be scrape
|
||||
site="https://cdimage.ubuntu.com/releases/"
|
||||
|
|
Loading…
Reference in New Issue