forked from public/mirror-checker
parent
1df671b9e0
commit
fe7d22e1e5
@ -0,0 +1,91 @@ |
||||
from bs4 import BeautifulSoup |
||||
import requests |
||||
import re |
||||
import datefinder # another date finding library |
||||
from project import Project |
||||
from shared import CSC_MIRROR |
||||
|
||||
class netbsd(Project): |
||||
"""netbsd class""" |
||||
@staticmethod |
||||
def checker(directory_URL, file_name): |
||||
page = requests.get(directory_URL).text |
||||
file_index = page.find(file_name) |
||||
# print(page) |
||||
|
||||
if file_index == -1: |
||||
return False |
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\w{3}-\d{2} \d{2}:\d{2})', page[file_index:]) |
||||
|
||||
# print(directory_URL, file_name) |
||||
# print(list(datefinder.find_dates("".join(str_dates[0])))[0]) |
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0] |
||||
|
||||
@classmethod |
||||
def check_version(cls, site1, site2): |
||||
# getting the request from url |
||||
r = requests.get(site1) |
||||
r1 = requests.get(site2) |
||||
|
||||
page1 = r.text |
||||
page2 = r1.text |
||||
|
||||
# converting the text |
||||
s1 = BeautifulSoup(page1,"html.parser") |
||||
s2 = BeautifulSoup(page2,"html.parser") |
||||
|
||||
hrefs1 = s1.find_all("a") |
||||
hrefs2 = s2.find_all("a") |
||||
|
||||
for i in hrefs1: # for a href directories |
||||
href = i.attrs['href'] |
||||
|
||||
if re.match(r'NetBSD-\d.*', href): |
||||
date1 = cls.checker(site1+href, "CHANGES") |
||||
if not date1: # if the version is empty, ignore it |
||||
continue |
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (date1 > cls.checker(site2+href, "CHANGES")): |
||||
return False |
||||
elif href.startswith("NetBSD-") and href != "NetBSD-daily/": |
||||
date1 = cls.checker(site1+href+"src/doc/", "CHANGES") |
||||
if not date1: |
||||
continue |
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (date1 > cls.checker(site2+href+"src/doc/", "CHANGES")): |
||||
return False |
||||
return True |
||||
|
||||
@classmethod |
||||
def check_iso(cls, site1, site2): |
||||
# getting the request from url |
||||
r = requests.get(site1) |
||||
r1 = requests.get(site2) |
||||
|
||||
page1 = r.text |
||||
page2 = r1.text |
||||
|
||||
# converting the text |
||||
s1 = BeautifulSoup(page1,"html.parser") |
||||
s2 = BeautifulSoup(page2,"html.parser") |
||||
|
||||
hrefs1 = s1.find_all("a") |
||||
hrefs2 = s2.find_all("a") |
||||
|
||||
for i in hrefs1: # for a href directories |
||||
href = i.attrs['href'] |
||||
|
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (cls.checker(site1+href, "SHA512") > cls.checker(site2+href, "SHA512")) or (cls.checker(site1+href, "MD5") > cls.checker(site2+href, "MD5")): |
||||
return False |
||||
return True |
||||
|
||||
|
||||
@classmethod |
||||
def check(cls, data, project): |
||||
"""Check if project packages are up-to-date""" |
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"] |
||||
upstream_url = data[project]["upstream"] + data[project]["file"] |
||||
|
||||
# print(cls.check_version(upstream_url, csc_url)) |
||||
# print(cls.check_iso(upstream_url+"iso/", csc_url+"iso/")) |
||||
return cls.check_version(upstream_url, csc_url) and cls.check_iso(upstream_url+"iso/", csc_url+"iso/") |
Loading…
Reference in new issue