|
|
|
import requests
|
|
|
|
import re
|
|
|
|
from project import Project
|
|
|
|
from shared import CSC_MIRROR
|
|
|
|
import itertools
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
|
|
|
class racket(Project):
|
|
|
|
"""racket class"""
|
|
|
|
@staticmethod
|
|
|
|
def max_version(processed_versions):
|
|
|
|
latest_version = processed_versions[0]
|
|
|
|
for version in processed_versions:
|
|
|
|
if len(latest_version) > len(version):
|
|
|
|
latest_version_portion = itertools.islice(latest_version, len(version))
|
|
|
|
comparisons = list(map(lambda p, q: p == q, latest_version_portion, version))
|
|
|
|
if False in comparisons:
|
|
|
|
index = comparisons.index(False)
|
|
|
|
if latest_version[index] < version[index]:
|
|
|
|
latest_version = version
|
|
|
|
if len(latest_version) < len(version):
|
|
|
|
version_portion = itertools.islice(version, len(latest_version))
|
|
|
|
comparisons = list(map(lambda p, q: p == q, version_portion, latest_version))
|
|
|
|
if False in comparisons:
|
|
|
|
index = comparisons.index(False)
|
|
|
|
if latest_version[index] < version[index]:
|
|
|
|
latest_version = version
|
|
|
|
latest_version = version
|
|
|
|
if len(latest_version) == len(version):
|
|
|
|
comparisons = list(map(lambda p, q: p == q, version, latest_version))
|
|
|
|
if False in comparisons:
|
|
|
|
index = comparisons.index(False)
|
|
|
|
if latest_version[index] < version[index]:
|
|
|
|
latest_version = version
|
|
|
|
return latest_version
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def scrape(versions, site):
|
|
|
|
# getting the request from url
|
|
|
|
r = requests.get(site)
|
|
|
|
|
|
|
|
# converting the text
|
|
|
|
s = BeautifulSoup(r.text,"html.parser")
|
|
|
|
|
|
|
|
for i in s.find_all("a"): # for a href directories
|
|
|
|
href = i.attrs['href']
|
|
|
|
|
|
|
|
if href.endswith("/") and href != "../" and href != "/" and href != "recent/":
|
|
|
|
if href not in versions:
|
|
|
|
versions.append(href)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_latest_version(cls, web_dir):
|
|
|
|
page = requests.get(web_dir).text
|
|
|
|
|
|
|
|
versions = []
|
|
|
|
cls.scrape(versions, web_dir)
|
|
|
|
|
|
|
|
processed_versions = []
|
|
|
|
for version in versions:
|
|
|
|
# print(version)
|
|
|
|
processed_versions.append(version.replace("/", "").split("."))
|
|
|
|
# print(cls.max_version(processed_versions))
|
|
|
|
|
|
|
|
return(cls.max_version(processed_versions))
|
|
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def check(cls, data, project, current_time):
|
|
|
|
"""Check if project packages are up-to-date"""
|
|
|
|
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
|
|
|
upstream_url = data[project]["upstream"] + data[project]["file"]
|
|
|
|
|
|
|
|
return cls.get_latest_version(csc_url) == cls.get_latest_version(upstream_url)
|