forked from public/mirror-checker
Compare commits
No commits in common. "master" and "ng" have entirely different histories.
|
@ -1,142 +1,22 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
TODO.md
|
||||
bin/
|
||||
.vscode/
|
||||
|
||||
# C extensions
|
||||
# From https://raw.githubusercontent.com/github/gitignore/main/Go.gitignore
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# Idea
|
||||
.idea/
|
||||
*~
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
.DEFAULT_GOAL := build-n-run
|
||||
|
||||
run:
|
||||
./bin/mc2
|
||||
|
||||
build:
|
||||
go build -o bin/mc2 -ldflags "\
|
||||
-X 'mc2/config.BuildVersion=$$(git rev-parse --abbrev-ref HEAD)' \
|
||||
-X 'mc2/config.BuildUser=$$(id -u -n)' \
|
||||
-X 'mc2/config.BuildTime=$$(date)' \
|
||||
-X 'mc2/config.BuildGOOS=$$(go env GOOS)' \
|
||||
-X 'mc2/config.BuildARCH=$$(go env GOARCH)' \
|
||||
-s -w"
|
||||
|
||||
build-n-run:
|
||||
make build
|
||||
make run
|
||||
|
||||
docker-build:
|
||||
docker build -t mc2 .
|
||||
|
||||
docker-run:
|
||||
docker run --rm -p 4200:4200 mc2
|
||||
|
||||
test:
|
||||
go test -v ./...
|
104
README.md
104
README.md
|
@ -1,94 +1,38 @@
|
|||
# Mirror Checker
|
||||
# Mirror Checker 2
|
||||
|
||||
This mirror status checker determines whether CSC mirror is up-to-date with upstream.
|
||||
To be written in the future....
|
||||
|
||||
## How To Run
|
||||
|
||||
A configuration file may be provided through standard input. Without a configuration file, execute `python main.py`. By default, all the available distributions will be checked. With a configuration file, execute `python main.py < name_of_config_file.in`, for example, `python main.py < example.in`. In this case, only the distributions listed in the configuration file will be checked.
|
||||
## How to test
|
||||
|
||||
## Dev Notes
|
||||
|
||||
How the program works: We first have a general mirror check class called project.py which checks whether the timestamp in the directory of the mirror is in-sync with the upstream. Then, for each CSC mirror, a class is built which inherits from the general project.py class but often overrides the original check function with a check function specific to the mirror. A few big themes are: some check a mirror status tracker provided by the project mirrored; some check all the Release files for each version in a distro etc. website information which all the mirror checker classes need is stored in the data.json file.
|
||||
|
||||
Future notes: Because many of the mirror checkers are built very specific to each mirror. A slight change in the way the project manages their mirror-related websites, public repos etc. can drastically influence whether the mirror checker works correctly or not. These problems are also unfortunately very hard to detect, so it's important that CSC actively maintain the mirror checker so that it works as intended in the long term.
|
||||
## How to build
|
||||
|
||||
Extra notes: A test client for individual mirror checker classes is provided as test.py. To use it, simply change all occurrences of the imported project class
|
||||
Download dependencies:
|
||||
```
|
||||
go get -u
|
||||
```
|
||||
|
||||
## Resources
|
||||
## Examples
|
||||
|
||||
- [CSC Mirror](http://mirror.csclub.uwaterloo.ca/)
|
||||
- [Debian Mirror Status Checker](https://mirror-master.debian.org/status/mirror-status.html)
|
||||
- [Debian Mirror Status Checker Code](https://salsa.debian.org/mirror-team/mirror/status)
|
||||
```
|
||||
go run . check debian ubuntu < data/example.in # from the project root directory, read the config and run all checks for the specified projects
|
||||
```
|
||||
|
||||
if we can just view their repo online, we only have to remember the link for their repo and then check the latest timestamp in their repo the same way we check ours
|
||||
## Commands
|
||||
|
||||
After building...
|
||||
|
||||
## Notes (for devs)
|
||||
|
||||
|
||||
...
|
||||
|
||||
> **From the original mirror checker**: Future notes: Because many of the mirror checkers are built very specific to each mirror. A slight change in the way the project manages their mirror-related websites, public repos etc. can drastically influence whether the mirror checker works correctly or not. These problems are also unfortunately very hard to detect, so it's important that CSC actively maintain the mirror checker so that it works as intended in the long term.
|
||||
>
|
||||
> Extra notes: A test client for individual mirror checker classes is provided as test.py. To use it, simply change all occurrences of the imported project class
|
||||
|
||||
even if the date relies on a specific file in their repo, we can still find the right link for it
|
||||
|
||||
to find repos of the mirrored projects to check, just search "projectName mirrors"
|
||||
|
||||
## Checker Information
|
||||
|
||||
- almalinux
|
||||
- alpine
|
||||
- apache
|
||||
- archlinux
|
||||
- centos
|
||||
- ceph
|
||||
- CPAN
|
||||
- CRAN: https://cran.r-project.org/mirmon_report.html has a mirror tracker
|
||||
- csclub: for now, this is the upstream itself, so it needs not to be checked
|
||||
- CTAN: https://www.ctan.org/mirrors/mirmon has a mirror tracker
|
||||
- Cygwin
|
||||
- damnsmalllinux: http://distro.ibiblio.org/damnsmall/ not checking this, since it's abandoned
|
||||
- debian
|
||||
- debian-backports: this is a legacy thing, no longer have to check
|
||||
- debian-cd
|
||||
- debian-multimedia
|
||||
- debian-ports
|
||||
- debian-security
|
||||
- debian-volatile: this is a legacy thing, no longer have to check
|
||||
- eclipse
|
||||
- emacsconf: for now, this is the upstream itself, so it needs not to be checked
|
||||
- fedora
|
||||
- freeBSD
|
||||
- gentoo-distfiles
|
||||
- gentoo-portage
|
||||
- gnome
|
||||
- GNU
|
||||
- gutenberg
|
||||
- ipfire
|
||||
- kde
|
||||
- kde-applicationdata
|
||||
- kernel
|
||||
- linuxmint: https://mirrors.edge.kernel.org/linuxmint/ candidate for brute force looping
|
||||
- linuxmint-packages: https://mirrors.edge.kernel.org/linuxmint-packages/ Checking the timestamp of either the Release file or the Packages file should suffice.
|
||||
- macPorts: only distfiles has public repo, no timestamp, too large to loop through, comparing ports.tar.gz in distfiles
|
||||
- manjaro
|
||||
- mxlinux
|
||||
- mxlinux-iso: this one seems out of sync on the official tracker for 134 days, which is weird
|
||||
- mysql: http://mirrors.sunsite.dk/mysql/
|
||||
- NetBSD: http://ftp.netbsd.org/pub/NetBSD/ checking timestamps of change files in different versions, and SHA512, MD5 files in the isos of different versions
|
||||
- nongnu: http://download.savannah.nongnu.org/releases/ https://savannah.gnu.org/maintenance/Mirmon/ http://download.savannah.gnu.org/mirmon/savannah/
|
||||
- openbsd
|
||||
- opensuse: http://download.opensuse.org/ check Update.repo files in folders inside the update folder, not checking tumbleweed-non-oss/ and tumbleweed/ temporarily
|
||||
- parabola: https://repo.parabola.nu/ https://www.parabola.nu/mirrors/status/
|
||||
- pkgsrc
|
||||
- puppylinux: https://distro.ibiblio.org/puppylinux/ check the ISO files or htm files in the folders starting with puppy
|
||||
- qtproject: https://download.qt.io/
|
||||
- racket: https://mirror.racket-lang.org/installers/ make sure that we have the latest version number under racket-installers
|
||||
- raspberry pi: https://archive.raspberrypi.org/ Checking the timestamp of either the Release file or the Packages file should suffice.
|
||||
- raspbian: http://archive.raspbian.org/ snapshotindex.txt is most likely a timestamp, tho i'm not sure. also i think our mirror is completely outdated, it's not listed on official mirror list
|
||||
- sagemath: same source tarballs as them (the sage-*.tar.gz files under 'Source Code')
|
||||
- salt stack: checking the "Latest release" text under the 'About' header
|
||||
- scientific: https://scientificlinux.org/downloads/sl-mirrors/ not checking this one since it's abandoned
|
||||
- slackware: https://mirrors.slackware.com/slackware/ check whether we have each release and whether the timestamp for CHECKSUMS.md5 in each release is the same, for slackware-iso, just make sure that our list of directories is the same
|
||||
- tdf: https://download.documentfoundation.org/
|
||||
- trisquel: http://archive.trisquel.info/trisquel/ checking Release file for all versions in packages/dist and md5sum.txt in iso/ with two other mirrors
|
||||
- ubuntu: https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive
|
||||
- ubuntu-ports: http://ports.ubuntu.com/ubuntu-ports/ checking the Release files in dists
|
||||
- ubuntu-ports-releases: https://cdimage.ubuntu.com/releases/ has public repo, no timestamp, no status tracker, brute force looped it
|
||||
- ubuntu-releases: https://releases.ubuntu.com/
|
||||
- vlc: http://download.videolan.org/pub/videolan/
|
||||
- x.org: https://www.x.org/releases/ check all of the files under each directory under /x.org/individual/, and make sure that we have all of the files which the upstream has, ignoring the xcb folder
|
||||
- Xiph: https://ftp.osuosl.org/pub/xiph/releases/ loop through each directory in xiph/releases/ and trying to compare the timestamp of the checksum files
|
||||
- xubuntu-releases: https://cdimage.ubuntu.com/xubuntu/releases/ candidate for brute force looping since it has few folders
|
|
@ -0,0 +1,3 @@
|
|||
package checkers
|
||||
|
||||
// https://mirrors.almalinux.org/
|
|
@ -0,0 +1,246 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// TODO: uuid for each job
|
||||
// TODO: job history
|
||||
type JobDescription struct {
|
||||
Group *JobGroup
|
||||
Checker *ProjectChecker
|
||||
Callback CheckerResultCallback
|
||||
}
|
||||
|
||||
type JobGroup struct {
|
||||
Projects []string
|
||||
Name string
|
||||
Jobs []*JobDescription
|
||||
Results *(chan CheckerResult)
|
||||
FinalStatus *(chan CheckerStatus)
|
||||
Wg *sync.WaitGroup
|
||||
WorkerID int
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// TODO: run check without callback
|
||||
// TODO: create "custom" job groups??
|
||||
|
||||
// Given a project, all checks are initiated.
|
||||
func (p *Project) RunChecks(callback CheckerResultCallback) (*JobGroup, error) {
|
||||
checks := p.Checkers
|
||||
n := len(checks)
|
||||
|
||||
if n == 0 {
|
||||
return nil, errors.New("No checkers found for project.")
|
||||
}
|
||||
|
||||
if n != p.NumOfCheckers {
|
||||
return nil, errors.New("Number of checkers does not match project config.")
|
||||
}
|
||||
|
||||
// TODO: assert job group properties?
|
||||
|
||||
log.Debug().Msgf("Running %d checks for project %s", n, p.Name)
|
||||
|
||||
var jg JobGroup
|
||||
|
||||
statusResult := make(chan CheckerStatus)
|
||||
|
||||
jobs := make([]*JobDescription, n)
|
||||
for i, c := range p.Checkers {
|
||||
jobs[i] = &JobDescription{
|
||||
Group: &jg,
|
||||
Checker: c,
|
||||
Callback: callback,
|
||||
}
|
||||
}
|
||||
results := make(chan CheckerResult, n)
|
||||
|
||||
jg = JobGroup{
|
||||
Projects: []string{p.Name},
|
||||
Name: fmt.Sprintf("check_%s_project", p.Name),
|
||||
Jobs: jobs,
|
||||
Results: &results,
|
||||
FinalStatus: &statusResult,
|
||||
Wg: &sync.WaitGroup{},
|
||||
}
|
||||
|
||||
for i, jd := range jobs {
|
||||
log.Info().Str("project", p.Name).
|
||||
Str("check_name", jd.Checker.Name).
|
||||
Msgf("Running check %d", i)
|
||||
jd.QueueJob(callback)
|
||||
}
|
||||
|
||||
go func() {
|
||||
// Wait for all checks to complete
|
||||
log.Debug().
|
||||
Str("project", p.Name).
|
||||
Str("job_group", jg.Name).
|
||||
Msgf("Waiting for %d checks to complete.", n)
|
||||
for i := 0; i < n; i++ {
|
||||
res := <-results
|
||||
log.Debug().Str("project", p.Name).
|
||||
Str("job_group", jg.Name).
|
||||
Str("worker_id", strconv.Itoa(jg.WorkerID)).
|
||||
Str("checker_name", res.CheckerName).
|
||||
Msg("Received checker result.")
|
||||
if res.Status == CHECKER_ERROR {
|
||||
// TODO: log checker UUID
|
||||
log.Error().Err(res.Error).Msg("Checker returned error.")
|
||||
statusResult <- CHECKER_ERROR
|
||||
// TODO: stop exisiting jobs
|
||||
return
|
||||
}
|
||||
if res.Status == CHECKER_FAIL {
|
||||
// TODO: log checker UUID
|
||||
log.Debug().Msg("Checker failed.")
|
||||
statusResult <- CHECKER_FAIL
|
||||
return
|
||||
}
|
||||
}
|
||||
close(results)
|
||||
statusResult <- CHECKER_SUCCESS
|
||||
}()
|
||||
|
||||
return &jg, nil
|
||||
}
|
||||
|
||||
func (jd *JobDescription) addLogProps(evt *zerolog.Event) *zerolog.Event {
|
||||
return evt.
|
||||
Str("project", jd.Group.Projects[0]).
|
||||
Str("job_group", jd.Group.Name).
|
||||
Str("worker_id", strconv.Itoa(jd.Group.WorkerID)).
|
||||
Str("checker_name", jd.Checker.Name)
|
||||
}
|
||||
|
||||
func (jd *JobDescription) LogInfo() *zerolog.Event {
|
||||
return jd.addLogProps(log.Info())
|
||||
}
|
||||
|
||||
func (jd *JobDescription) LogError() *zerolog.Event {
|
||||
return jd.addLogProps(log.Error())
|
||||
}
|
||||
|
||||
func ReadEnabledFromStdin() error {
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
scanner.Split(bufio.ScanWords)
|
||||
|
||||
for scanner.Scan() {
|
||||
token := scanner.Text()
|
||||
_, err := LoadProject(token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return scanner.Err()
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const WORKER_COUNT = 5
|
||||
|
||||
func StartWorkers() {
|
||||
log.Debug().Msgf("Starting %d workers.", WORKER_COUNT)
|
||||
for i := 0; i < WORKER_COUNT; i++ {
|
||||
go worker(i)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: available worker?
|
||||
// var workerAvailable sync.WaitGroup
|
||||
var jobs = make(chan JobDescription, WORKER_COUNT)
|
||||
|
||||
func (job JobDescription) QueueJob(callback CheckerResultCallback) {
|
||||
if job.Group.Wg != nil {
|
||||
job.Group.Wg.Add(1)
|
||||
}
|
||||
// TODO: workerAvailable.Wait() // wait for a worker to be available
|
||||
|
||||
jobs <- job
|
||||
log.Debug().Str("checker_name", job.Checker.Name).Msg("Queued checker.")
|
||||
}
|
||||
|
||||
func StopWorkers() {
|
||||
log.Debug().Msg("Stopping workers.")
|
||||
close(jobs)
|
||||
// TODO: stop in process jobs???
|
||||
}
|
||||
|
||||
// TODO: id to uuid?
|
||||
func worker(id int) {
|
||||
for j := range jobs {
|
||||
var res CheckerResult = DefaultCheckerResult
|
||||
res.CheckerName = j.Checker.Name
|
||||
res.ProjectName = j.Group.Name
|
||||
res.Time = time.Now()
|
||||
|
||||
log.Debug().Str("project", j.Checker.Name).
|
||||
Str("worker_id", strconv.FormatInt(int64(id), 10)).
|
||||
Msgf("Running check.")
|
||||
success, err := j.Checker.CheckProject()
|
||||
res.EndTime = time.Now()
|
||||
|
||||
if err != nil {
|
||||
res.Status = CHECKER_ERROR
|
||||
res.Error = err
|
||||
} else if success {
|
||||
res.Status = CHECKER_SUCCESS
|
||||
} else {
|
||||
res.Status = CHECKER_FAIL
|
||||
}
|
||||
|
||||
*j.Group.Results <- res
|
||||
if j.Group.Wg != nil {
|
||||
j.Group.Wg.Done() // called before callback to prevent unnecessary waiting
|
||||
}
|
||||
if j.Callback != nil {
|
||||
j.Callback(res)
|
||||
} else {
|
||||
log.Debug().Str("project", j.Checker.Name).
|
||||
Str("worker_id", strconv.FormatInt(int64(id), 10)).
|
||||
Msgf("No callback registered.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Looks up a project by name, and returns only if the project is enabled.
|
||||
func GetProject(name string) (*Project, error) {
|
||||
res, exists := EnabledProjects[name]
|
||||
if !exists {
|
||||
return res, errors.New("requested project not found")
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Loads a project by name, and returns if the project is found.
|
||||
func LoadProject(name string) (*Project, error) {
|
||||
res, exists := SupportedProjects[name]
|
||||
if !exists {
|
||||
return res, errors.New("requested project not found")
|
||||
}
|
||||
if res.Properties.CSC == "unknown" {
|
||||
log.Debug().Str("csc", res.Properties.CSC).
|
||||
Msgf("Requested project %s has default properties.", name)
|
||||
return res, errors.New("requested project has invalid properties (check the project config)")
|
||||
}
|
||||
|
||||
log.Debug().Msgf("Loading Project %s", name)
|
||||
EnabledProjects[name] = res
|
||||
return res, nil
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type MirrorData map[string]ProjectProperties
|
||||
|
||||
func LoadFromFile(path string) error {
|
||||
raw_data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var data MirrorData
|
||||
err = json.Unmarshal(raw_data, &data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: load in alphabetical order
|
||||
// Access the parsed data
|
||||
for proj, prop := range data {
|
||||
log.Info().Str("project", proj).Msg("Enabled Project.")
|
||||
|
||||
sp, exists := SupportedProjects[config.NormalizeName(proj)]
|
||||
if !exists {
|
||||
log.Warn().Str("project", proj).Msg("Project not supported.")
|
||||
continue
|
||||
}
|
||||
|
||||
sp.Properties = prop
|
||||
|
||||
log.Debug().
|
||||
Str("distribution", proj).
|
||||
// Time("out_of_sync_since", prop.OOSSince).
|
||||
Int64("out_of_sync_interval", prop.OOSInterval).
|
||||
Str("csc", prop.CSC).
|
||||
Str("upstream", prop.Upstream).
|
||||
Str("file", prop.File).
|
||||
Msg("Loaded project config.")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var DebianProject Project = Project{
|
||||
Name: "debian",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("debian", true, func(*Project) (bool, error) {
|
||||
// config sanity check
|
||||
data := EnabledProjects["debian"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.Upstream, data.File)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Debian", "config sanity check")
|
||||
}
|
||||
|
||||
// Modelled after: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/debian.py
|
||||
// NOTE: cloned in debiancd, debianmultimedia, debianports, debiansecurity
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
// make HTTP GET request to csc_url
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Debian", "getting CSC file")
|
||||
}
|
||||
|
||||
// make HTTP GET request to upstream_url
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Debian", "getting upstream file")
|
||||
}
|
||||
|
||||
// parse bodies as string
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
// same date stamp
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// check if delta is within threshold
|
||||
// equiv to: "%a %b %d %H:%M:%S UTC %Y"
|
||||
date_format := "Mon Jan 2 15:04:05 MST 2006"
|
||||
|
||||
CSC_date, err := time.Parse(date_format, strings.Split(CSC, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Debian", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(date_format, strings.Split(upstream, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Debian", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var DebianCDProject Project = Project{
|
||||
Name: "debiancd",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("debiancd", true, func(*Project) (bool, error) {
|
||||
// based on debian.go checker
|
||||
// config sanity check
|
||||
data := EnabledProjects["debiancd"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.Upstream, data.File)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianCD", "config sanity check")
|
||||
}
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianCD", "getting CSC file")
|
||||
}
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianCD", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
date_format := "Mon 2 Jan 15:04:05 MST 2006"
|
||||
|
||||
CSC_date, err := time.Parse(date_format, strings.Split(CSC, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianCD", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(date_format, strings.Split(upstream, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianCD", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var DebianMultimediaProject Project = Project{
|
||||
Name: "debianmultimedia",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("debianmultimedia", true, func(*Project) (bool, error) {
|
||||
// based on debian.go checker
|
||||
// config sanity check
|
||||
data := EnabledProjects["debianmultimedia"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.Upstream, data.File)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianMultimedia", "config sanity check")
|
||||
}
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianMultimedia", "getting CSC file")
|
||||
}
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianMultimedia", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
date_format := "Mon Jan 2 15:04:05 MST 2006"
|
||||
|
||||
CSC_date, err := time.Parse(date_format, strings.Split(CSC, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianMultimedia", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(date_format, strings.Split(upstream, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianMultimedia", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var DebianPortsProject Project = Project{
|
||||
Name: "debianports",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("debianports", true, func(*Project) (bool, error) {
|
||||
// based on debian.go checker
|
||||
// config sanity check
|
||||
data := EnabledProjects["debianports"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.Upstream, data.File)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianPorts", "config sanity check")
|
||||
}
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianPorts", "getting CSC file")
|
||||
}
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianPorts", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
date_format := "Mon Jan 2 15:04:05 MST 2006"
|
||||
|
||||
CSC_date, err := time.Parse(date_format, strings.Split(CSC, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianPorts", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(date_format, strings.Split(upstream, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianPorts", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var DebianSecurityProject Project = Project{
|
||||
Name: "debiansecurity",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("debiansecurity", true, func(*Project) (bool, error) {
|
||||
// based on debian.go checker
|
||||
// config sanity check
|
||||
data := EnabledProjects["debiansecurity"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.Upstream, data.File)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianSecurity", "config sanity check")
|
||||
}
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianSecurity", "getting CSC file")
|
||||
}
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianSecurity", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
date_format := "Mon Jan 2 15:04:05 MST 2006"
|
||||
|
||||
CSC_date, err := time.Parse(date_format, strings.Split(CSC, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianSecurity", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(date_format, strings.Split(upstream, "\n")[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "DebianSecurity", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,199 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var DefaultProjectProperties = ProjectProperties{
|
||||
OOSSince: time.Now(),
|
||||
OOSInterval: 3600,
|
||||
CSC: "unknown",
|
||||
Upstream: "unknown",
|
||||
File: "unknown",
|
||||
}
|
||||
|
||||
var DefaultCheckerResult CheckerResult = CheckerResult{
|
||||
ProjectName: "unknown",
|
||||
CheckerName: "unknown",
|
||||
Status: CHECKER_PROGRESS,
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// All "projects" which have been implemented.
|
||||
var SupportedProjects map[string]*Project = map[string]*Project{
|
||||
// "almalinux":&AlmaLinuxProject,
|
||||
// "alpine":&AlpineProject,
|
||||
// "apache":&ApacheProject,
|
||||
// "arch":&ArchProject,
|
||||
// "artix":&ArtixProject,
|
||||
// "centos":&CentosProject,
|
||||
// "ceph":&CephProject,
|
||||
// "cpan":&CpanProject,
|
||||
// "cran":&CranProject,
|
||||
// "ctan":&CtanProject,
|
||||
// "cygwin":&CygwinProject,
|
||||
"debian": &DebianProject,
|
||||
"debiancd": &DebianCDProject,
|
||||
"debianmultimedia": &DebianMultimediaProject,
|
||||
"debianports": &DebianPortsProject,
|
||||
"debiansecurity": &DebianSecurityProject,
|
||||
// "eclipse":&EclipseProject,
|
||||
"fedora": &FedoraProject,
|
||||
// "freebsd":&FreebsdProject,
|
||||
"gentoodistfiles": &GentooDistProject,
|
||||
"gentooportage": &GentooPortageProject,
|
||||
// "gnome":&GnomeProject,
|
||||
// "gnu":&GnuProject,
|
||||
// "gutenberg":&GutenbergProject,
|
||||
// "ipfire":&IpfireProject,
|
||||
// "kde":&KdeProject,
|
||||
// "kdeapplicationdata":&KdeApplicationDataProject,
|
||||
// "kernel":&KernelProject,
|
||||
// "linuxmint":&LinuxMintProject,
|
||||
// "linuxmint_packages":&LinuxMintPackagesProject,
|
||||
// "macports":&MacPortsProject,
|
||||
"manjaro": &ManjaroProject,
|
||||
// "mxlinux":&MxLinuxProject,
|
||||
// "mxlinux_iso":&MxLinuxIsoProject,
|
||||
// "mysql":&MysqlProject,
|
||||
// "netbsd":&NetBsdProject,
|
||||
// "nongnu":&NongnuProject,
|
||||
// "openbsd":&OpenbsdProject,
|
||||
// "opensuse":&OpensuseProject,
|
||||
// "parabola":&ParabolaProject,
|
||||
// "pkgsrc":&PkgsrcProject,
|
||||
// "puppy_linux":&PuppyLinuxProject,
|
||||
// "qtproject":&QtProject,
|
||||
// "racket":&RacketProject,
|
||||
// "raspberrypi":&RaspberrypiProject,
|
||||
// "raspbian":&RaspbianProject,
|
||||
// "sage":&SageProject,
|
||||
// "saltstack":&SaltStackProject,
|
||||
// "slackware":&SlackwareProject,
|
||||
// "tdf":&TdfProject,
|
||||
// "trisquel":&TrisquelProject,
|
||||
"ubuntu": &UbuntuProject,
|
||||
// "ubuntu_ports":&UbuntuPortsProject,
|
||||
// "ubuntu_ports_releases":&UbuntuPortsReleasesProject,
|
||||
// "ubuntu_releases":&UbuntuReleasesProject,
|
||||
// "vlc":&VlcProject,
|
||||
// "x_org":&XorgProject,
|
||||
// "xiph":&XiphProject,
|
||||
// "xubuntu_releases":&XubuntuReleasesProject,
|
||||
}
|
||||
|
||||
func LoadDefaultProjects() {
|
||||
// Load all projects that's implemented
|
||||
|
||||
log.Info().Msg("Loading Default Projects.")
|
||||
|
||||
// LoadProject("almalinux")
|
||||
// LoadProject("alpine")
|
||||
// LoadProject("apache")
|
||||
// LoadProject("arch")
|
||||
// LoadProject("artix")
|
||||
// LoadProject("centos")
|
||||
// LoadProject("ceph")
|
||||
// LoadProject("cpan")
|
||||
// LoadProject("cran")
|
||||
// LoadProject("ctan")
|
||||
// LoadProject("cygwin")
|
||||
LoadProject("debian")
|
||||
LoadProject("debiancd")
|
||||
LoadProject("debianmultimedia")
|
||||
LoadProject("debianports")
|
||||
LoadProject("debiansecurity")
|
||||
// LoadProject("eclipse")
|
||||
LoadProject("fedora")
|
||||
// LoadProject("freebsd")
|
||||
// LoadProject("gentoodistfiles")
|
||||
// LoadProject("gentooportage")
|
||||
// LoadProject("gnome")
|
||||
// LoadProject("gnu")
|
||||
// LoadProject("gutenberg")
|
||||
// LoadProject("ipfire")
|
||||
// LoadProject("kde")
|
||||
// LoadProject("kdeapplicationdata")
|
||||
// LoadProject("kernel")
|
||||
// LoadProject("linuxmint")
|
||||
// LoadProject("linuxmint_packages")
|
||||
// LoadProject("macports")
|
||||
// LoadProject("manjaro")
|
||||
// LoadProject("mxlinux")
|
||||
// LoadProject("mxlinux_iso")
|
||||
// LoadProject("mysql")
|
||||
// LoadProject("netbsd")
|
||||
// LoadProject("nongnu")
|
||||
// LoadProject("openbsd")
|
||||
// LoadProject("opensuse")
|
||||
// LoadProject("parabola")
|
||||
// LoadProject("pkgsrc")
|
||||
// LoadProject("puppy_linux")
|
||||
// LoadProject("qtproject")
|
||||
// LoadProject("racket")
|
||||
// LoadProject("raspberrypi")
|
||||
// LoadProject("raspbian")
|
||||
// LoadProject("sage")
|
||||
// LoadProject("saltstack")
|
||||
// LoadProject("slackware")
|
||||
// LoadProject("tdf")
|
||||
// LoadProject("trisquel")
|
||||
LoadProject("ubuntu")
|
||||
// LoadProject("ubuntu_ports")
|
||||
// LoadProject("ubuntu_ports_releases")
|
||||
// LoadProject("ubuntu_releases")
|
||||
// LoadProject("vlc")
|
||||
// LoadProject("x_org")
|
||||
// LoadProject("xiph")
|
||||
// LoadProject("xubuntu_releases")
|
||||
|
||||
}
|
||||
|
||||
func AssertStrings(s ...string) error {
|
||||
for i, str := range s {
|
||||
if str == "" {
|
||||
return fmt.Errorf("failed assert: string %d of %d is empty", i+1, len(s))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: add checker logger
|
||||
func GetDefaultChecker(name string, def bool, f func(*Project) (bool, error)) *ProjectChecker {
|
||||
checkerName := name
|
||||
if def {
|
||||
checkerName += "_default"
|
||||
}
|
||||
|
||||
// configure checker function
|
||||
var checkerFunc func() (bool, error)
|
||||
if f != nil {
|
||||
// has custom checker function
|
||||
checkerFunc = func() (bool, error) {
|
||||
proj, err := GetProject(name)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return f(proj)
|
||||
}
|
||||
} else {
|
||||
// no custom checker function
|
||||
checkerFunc = func() (bool, error) {
|
||||
return false, fmt.Errorf("project '%s' not supported", name)
|
||||
}
|
||||
}
|
||||
|
||||
checker := ProjectChecker{
|
||||
Name: checkerName,
|
||||
CheckProject: checkerFunc,
|
||||
Default: def,
|
||||
}
|
||||
|
||||
return &checker
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var FedoraProject Project = Project{
|
||||
Name: "fedora",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("fedora", true, func(*Project) (bool, error) {
|
||||
// config sanity check
|
||||
data := EnabledProjects["fedora"].Properties
|
||||
err := AssertStrings(config.MirrorBaseURL, data.CSC, data.File, data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Fedora", "config sanity check")
|
||||
}
|
||||
|
||||
// SOURCE: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/fedora.py
|
||||
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Fedora", "getting CSC file")
|
||||
}
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Fedora", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// # Date example: Fedora-Rawhide-20220725.n.1
|
||||
data_format := "20060102"
|
||||
|
||||
CSC_date, err := time.Parse(data_format, CSC[15:23])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Fedora", "parsing CSC date")
|
||||
}
|
||||
CSC_utc_time := CSC_date.Unix()
|
||||
|
||||
upstream_date, err := time.Parse(data_format, upstream[15:23])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Fedora", "parsing upstream date")
|
||||
}
|
||||
upstream_utc_time := upstream_date.Unix()
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
)
|
||||
|
||||
var GentooDistProject Project = Project{
|
||||
Name: "gentoodistfiles",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("gentoodistfiles", true, func(*Project) (bool, error) {
|
||||
// config sanity check
|
||||
data := EnabledProjects["gentoodistfiles"].Properties
|
||||
|
||||
// SOURCE: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/gentoodistfiles.py
|
||||
|
||||
// csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
// upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
csc_url := config.MirrorBaseURL + data.CSC + data.File
|
||||
upstream_url := data.Upstream + data.File
|
||||
|
||||
// req = requests.get(csc_url)
|
||||
// req.raise_for_status()
|
||||
// CSC = req.text
|
||||
csc_body, err := httpGET(csc_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooDistFiles", "getting CSC file")
|
||||
}
|
||||
|
||||
upstream_body, err := httpGET(upstream_url)
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooDistFiles", "getting upstream file")
|
||||
}
|
||||
|
||||
CSC := string(csc_body)
|
||||
upstream := string(upstream_body)
|
||||
|
||||
if CSC == upstream {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// parse time as int
|
||||
CSC_utc_time, err := strconv.ParseInt(strings.TrimSpace(CSC[0:11]), 10, 64)
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooDistFiles", "parsing CSC date")
|
||||
}
|
||||
upstream_utc_time, err := strconv.ParseInt(strings.TrimSpace(upstream[0:11]), 10, 64)
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooDistFiles", "parsing upstream date")
|
||||
}
|
||||
|
||||
delta := (upstream_utc_time - CSC_utc_time)
|
||||
|
||||
return (delta < data.OOSInterval && delta > -data.OOSInterval), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var GentooPortageProject Project = Project{
|
||||
Name: "gentooportage",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("gentooportage", true, func(*Project) (bool, error) {
|
||||
// config sanity check
|
||||
data := EnabledProjects["gentooportage"].Properties
|
||||
// TODO: assert
|
||||
err := AssertStrings()
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooPortage", "config sanity check")
|
||||
}
|
||||
|
||||
// SOURCE: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/gentooportage.py
|
||||
|
||||
upstream_body, err := httpGET(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooPortage", "getting upstream file")
|
||||
}
|
||||
page := string(upstream_body)
|
||||
|
||||
indexOfFile := strings.Index(page, "rsync4.ca.gentoo.org")
|
||||
if indexOfFile == -1 {
|
||||
return false, GetError(nil, "GentooPortage", "no index of file")
|
||||
}
|
||||
|
||||
re := regexp.MustCompile(`(\d+ minutes?)|(\d+ hours?)|(\d+(\.)?\d+ days?)`)
|
||||
m := re.FindStringSubmatch(page[indexOfFile:])
|
||||
if len(m) == 0 || len(m[0]) == 0 {
|
||||
return false, GetError(nil, "GentooPortage", "no matches for regex in file")
|
||||
}
|
||||
|
||||
// fmt.Println(m[0])
|
||||
// eg. duration: "20 minutes"
|
||||
duration, err := getTimeDelta(m[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "GentooPortage", "parsing duration")
|
||||
}
|
||||
|
||||
// Return whether the duration is less than or equal to the out_of_sync_interval
|
||||
return (duration <= time.Duration(data.OOSInterval)*time.Second), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var LinuxMintProject Project = Project{
|
||||
Name: "linuxmint",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("linuxmint", true, func(*Project) (bool, error) {
|
||||
data := EnabledProjects["linuxmint"].Properties
|
||||
err := AssertStrings(data.Upstream)
|
||||
|
||||
if err != nil {
|
||||
return false, GetError(err, "Linux Mint", "config sanity check")
|
||||
}
|
||||
upstream_body, err := httpGET(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Linux Mint", "getting upstream file")
|
||||
}
|
||||
page := string(upstream_body)
|
||||
indexOfFile := strings.Index(page, "mirror.csclub.uwaterloo.ca/linuxmint")
|
||||
if indexOfFile == -1 {
|
||||
return false, GetError(nil, "Linux Mint", "no index of file")
|
||||
}
|
||||
re := regexp.MustCompile(`(?P<hours>\d+):(?P<minutes>\d+)`)
|
||||
m := re.FindStringSubmatch(page[indexOfFile:])
|
||||
if len(m) == 0 || len(m[0]) == 0 {
|
||||
return false, GetError(nil, "Linux Mint", "no matches for regex in file")
|
||||
}
|
||||
split := strings.Split(m[0], ":")
|
||||
hours, err := strconv.Atoi(split[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Linux Mint", "parsing hours")
|
||||
}
|
||||
minutes, err := strconv.Atoi(split[1])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Linux Mint", "parsing minutes")
|
||||
}
|
||||
duration := time.Duration(hours)*time.Hour + time.Duration(minutes)*time.Minute
|
||||
return (duration <= time.Duration(data.OOSInterval)*time.Second), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var ManjaroProject Project = Project{
|
||||
Name: "manjaro",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("manjaro", true, func(*Project) (bool, error) {
|
||||
// config sanity check
|
||||
data := EnabledProjects["manjaro"].Properties
|
||||
// TODO: assert
|
||||
err := AssertStrings()
|
||||
if err != nil {
|
||||
return false, GetError(err, "Manjaro", "config sanity check")
|
||||
}
|
||||
|
||||
// SOURCE: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/manjaro.py
|
||||
|
||||
upstream_body, err := httpGET(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Manjaro", "getting upstream file")
|
||||
}
|
||||
page := string(upstream_body)
|
||||
|
||||
indexOfFile := strings.Index(page, "mirror.csclub.uwaterloo.ca/manjaro")
|
||||
if indexOfFile == -1 {
|
||||
return false, GetError(nil, "Manjaro", "no index of file")
|
||||
}
|
||||
|
||||
re := regexp.MustCompile(`(?P<hours>\d+):(?P<minutes>\d+)`)
|
||||
m := re.FindStringSubmatch(page[indexOfFile:])
|
||||
if len(m) == 0 || len(m[0]) == 0 {
|
||||
return false, GetError(nil, "Manjaro", "no matches for regex in file")
|
||||
}
|
||||
|
||||
// fmt.Println(m[0])
|
||||
// eg. 02:33
|
||||
split := strings.Split(m[0], ":")
|
||||
hours, err := strconv.Atoi(split[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Manjaro", "parsing hours")
|
||||
}
|
||||
minutes, err := strconv.Atoi(split[1])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Manjaro", "parsing minutes")
|
||||
}
|
||||
|
||||
duration := time.Duration(hours)*time.Hour + time.Duration(minutes)*time.Minute
|
||||
|
||||
return (duration <= time.Duration(data.OOSInterval)*time.Second), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package checkers
|
||||
|
||||
import "time"
|
||||
|
||||
// "Projects" which have valid configs.
|
||||
var EnabledProjects = make(map[string]*Project)
|
||||
|
||||
// TODO: refactor all errors into variables
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Project, Project Properties, Project Checker
|
||||
|
||||
type Project struct {
|
||||
Name string
|
||||
Properties ProjectProperties
|
||||
NumOfCheckers int
|
||||
Checkers []*ProjectChecker
|
||||
}
|
||||
|
||||
type ProjectProperties struct {
|
||||
OOSSince time.Time //`json:"out_of_sync_since"`
|
||||
OOSInterval int64 `json:"out_of_sync_interval"` // in seconds
|
||||
CSC string `json:"csc"`
|
||||
Mirrors []string `json:"mirrors"`
|
||||
Upstream string `json:"upstream"`
|
||||
File string `json:"file"`
|
||||
}
|
||||
|
||||
type ProjectChecker struct {
|
||||
Name string `json:"name"`
|
||||
CheckProject CPFunc
|
||||
Default bool `json:"default"`
|
||||
// TODO: other severity levels?
|
||||
}
|
||||
type CPFunc func() (bool, error)
|
||||
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
// Checker Status and Checker Result
|
||||
type CheckerStatus string
|
||||
|
||||
var CHECKER_SUCCESS CheckerStatus = "success"
|
||||
var CHECKER_PROGRESS CheckerStatus = "progress"
|
||||
var CHECKER_ERROR CheckerStatus = "error"
|
||||
var CHECKER_FAIL CheckerStatus = "fail"
|
||||
|
||||
type CheckerResult struct {
|
||||
ProjectName string `json:"project_name"`
|
||||
CheckerName string `json:"checker_name"`
|
||||
Time time.Time `json:"start_time"`
|
||||
EndTime time.Time `json:"end_time"`
|
||||
Status CheckerStatus `json:"status"`
|
||||
Error error `json:"error"`
|
||||
}
|
||||
|
||||
type CheckerResultCallback func(CheckerResult)
|
|
@ -0,0 +1,49 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var RaspberryPiProject Project = Project{
|
||||
Name: "raspberrypi",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("raspberrypi", true, func(*Project) (bool, error) {
|
||||
data := EnabledProjects["raspberrypi"].Properties
|
||||
err := AssertStrings(data.Upstream)
|
||||
|
||||
if err != nil {
|
||||
return false, GetError(err, "Raspberry Pi", "config sanity check")
|
||||
}
|
||||
upstream_body, err := httpGET(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Raspberry Pi", "getting upstream file")
|
||||
}
|
||||
page := string(upstream_body)
|
||||
indexOfFile := strings.Index(page, "archive.raspberrypi.org")
|
||||
if indexOfFile == -1 {
|
||||
return false, GetError(nil, "Raspberry Pi", "no index of file")
|
||||
}
|
||||
re := regexp.MustCompile(`(?P<hours>\d+):(?P<minutes>\d+)`)
|
||||
m := re.FindStringSubmatch(page[indexOfFile:])
|
||||
if len(m) == 0 || len(m[0]) == 0 {
|
||||
return false, GetError(nil, "Raspberry Pi", "no matches for regex in file")
|
||||
}
|
||||
split := strings.Split(m[0], ":")
|
||||
hours, err := strconv.Atoi(split[0])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Raspberry Pi", "parsing hours")
|
||||
}
|
||||
minutes, err := strconv.Atoi(split[1])
|
||||
if err != nil {
|
||||
return false, GetError(err, "Raspberry Pi", "parsing minutes")
|
||||
}
|
||||
duration := time.Duration(hours)*time.Hour + time.Duration(minutes)*time.Minute
|
||||
return (duration <= time.Duration(data.OOSInterval)*time.Second), nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var UbuntuProject Project = Project{
|
||||
Name: "ubuntu",
|
||||
Properties: DefaultProjectProperties,
|
||||
NumOfCheckers: 1,
|
||||
Checkers: []*ProjectChecker{
|
||||
GetDefaultChecker("ubuntu", true, func(*Project) (bool, error) {
|
||||
data := EnabledProjects["ubuntu"].Properties
|
||||
err := AssertStrings(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Ubuntu", "config sanity check")
|
||||
}
|
||||
|
||||
// SOURCE: https://git.csclub.uwaterloo.ca/public/mirror-checker/src/branch/master/projects/ubuntu.py
|
||||
res, err := httpGET(data.Upstream)
|
||||
if err != nil {
|
||||
return false, GetError(err, "Ubuntu", "getting upstream data")
|
||||
}
|
||||
page := string(res)
|
||||
|
||||
// count occurences of "Up to date"
|
||||
count := strings.Count(page, "Up to date")
|
||||
|
||||
NUM_UBUNTU_RELEASES := 24 // TODO: should be updated automatically (from another source)
|
||||
THRESHOLD := 5 // it would be pretty bad if we don't update this checker for 5 (major) ubuntu releases
|
||||
|
||||
log.Debug().Str("project", "Ubuntu").Int("count", count).Msg("counted occurences of 'Up to date'")
|
||||
|
||||
return count >= NUM_UBUNTU_RELEASES && count < NUM_UBUNTU_RELEASES+THRESHOLD, nil
|
||||
}),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
package checkers
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// duration parser
|
||||
func getTimeDelta(input string) (time.Duration, error) {
|
||||
input = strings.TrimSpace(input)
|
||||
parts := strings.Split(input, " ")
|
||||
|
||||
number, err := strconv.Atoi(parts[0])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
switch parts[1] {
|
||||
case "minute", "minutes":
|
||||
return time.Duration(number) * time.Minute, nil
|
||||
case "hour", "hours":
|
||||
return time.Duration(number) * time.Hour, nil
|
||||
case "day", "days":
|
||||
return time.Duration(number) * 24 * time.Hour, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown time unit")
|
||||
}
|
||||
}
|
||||
|
||||
// http helpers
|
||||
func httpGET(url string) ([]byte, error) {
|
||||
res, err := http.Get(url)
|
||||
|
||||
startTime := time.Now()
|
||||
|
||||
if err != nil {
|
||||
log.Error().Msgf("Error making GET request: %s", err.Error())
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Error reading response body: %s", err.Error())
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Debug().
|
||||
Dur("time", time.Since(startTime)).
|
||||
Str("status", res.Status).
|
||||
Msgf("GET %s", url)
|
||||
|
||||
return body, nil
|
||||
}
|
||||
|
||||
// error parser
|
||||
func GetError(err error, project string, msg string) error {
|
||||
if err == nil {
|
||||
return errors.New("<---")
|
||||
}
|
||||
|
||||
// throw given error while denoating project name and description
|
||||
errMsg := fmt.Sprintf("%s: '%s' received error '%s'", project, msg, err.Error())
|
||||
|
||||
return errors.New(errMsg)
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var (
|
||||
BuildVersion = "development"
|
||||
BuildTime = "unknown"
|
||||
BuildUser = "unknown"
|
||||
BuildGOOS = "unknown"
|
||||
BuildARCH = "unknown"
|
||||
GOOS = runtime.GOOS
|
||||
GOARCH = runtime.GOARCH
|
||||
)
|
||||
|
||||
func PrintVersion(cCtx *cli.Context) {
|
||||
fmt.Printf("version=%s buildTime=%s buildUser=%s buildGOOS=%s buildARCH=%s\n",
|
||||
cCtx.App.Version, BuildTime, BuildUser, BuildGOOS, BuildARCH)
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
var MirrorBaseURL = "http://mirror.csclub.uwaterloo.ca/" // TODO: must start with https://
|
||||
|
||||
var loggingTimeFormat = time.RFC3339
|
|
@ -0,0 +1,24 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/rs/zerolog/pkgerrors"
|
||||
)
|
||||
|
||||
func SetupLogger(debug bool) {
|
||||
zerolog.SetGlobalLevel(zerolog.InfoLevel)
|
||||
if debug {
|
||||
zerolog.SetGlobalLevel(zerolog.DebugLevel)
|
||||
}
|
||||
zerolog.TimeFieldFormat = loggingTimeFormat
|
||||
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
|
||||
|
||||
consoleWriter := zerolog.ConsoleWriter{Out: os.Stdout}
|
||||
multi := zerolog.MultiLevelWriter(consoleWriter) // TODO: add extra logging outputs
|
||||
log.Logger = zerolog.New(multi).With().Timestamp().Logger()
|
||||
}
|
||||
|
||||
// TODO: middleware
|
|
@ -0,0 +1,28 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func GetAbsPath(path string) string {
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to get absolute path.")
|
||||
return path
|
||||
}
|
||||
return absPath
|
||||
}
|
||||
|
||||
var lowerAlphaRegex = regexp.MustCompile("[^a-z]+")
|
||||
|
||||
func NormalizeName(name string) string {
|
||||
trimmed := strings.Trim(name, " ")
|
||||
lowered := strings.ToLower(trimmed)
|
||||
filtered := lowerAlphaRegex.ReplaceAllString(lowered, "")
|
||||
|
||||
return filtered
|
||||
}
|
424
data.json
424
data.json
|
@ -1,424 +0,0 @@
|
|||
{
|
||||
"AlmaLinux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://repo.almalinux.org/",
|
||||
"file": "almalinux/TIME"
|
||||
},
|
||||
"Alpine": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://uk.alpinelinux.org/",
|
||||
"file": "alpine/last-updated"
|
||||
},
|
||||
"Apache": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "apache/",
|
||||
"upstream": "https://downloads.apache.org/",
|
||||
"file": "zzz/time.txt"
|
||||
},
|
||||
"Arch": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "archlinux/",
|
||||
"upstream": "http://arch.mirror.constant.com/",
|
||||
"file": "lastupdate"
|
||||
},
|
||||
"Artix": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "artixlinux/",
|
||||
"upstream": "https://mirror1.artixlinux.org/repos/"
|
||||
},
|
||||
"CentOS": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://mirrors.edge.kernel.org/",
|
||||
"file": "centos/TIME"
|
||||
},
|
||||
"Ceph": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ceph/",
|
||||
"upstream": "https://download.ceph.com/",
|
||||
"file": "timestamp"
|
||||
},
|
||||
"CPAN": {
|
||||
"out_of_sync_interval": 172800
|
||||
},
|
||||
"cran": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://cran.r-project.org/mirmon_report.html",
|
||||
"file": ""
|
||||
},
|
||||
"ctan": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://www.ctan.org/mirrors/mirmon",
|
||||
"file": ""
|
||||
},
|
||||
"Cygwin": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "cygwin/",
|
||||
"upstream": "https://cygwin.com/pub/cygwin/",
|
||||
"file": "x86/sha512.sum"
|
||||
},
|
||||
"Debian": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://ftp-master.debian.org/",
|
||||
"file": "debian/project/trace/master"
|
||||
},
|
||||
"DebianCD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://debian.mirror.estruxture.net/",
|
||||
"file": "debian-cd/project/trace/cdimage.debian.org"
|
||||
},
|
||||
"DebianMultimedia": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "debian-multimedia/",
|
||||
"upstream": "http://debian-mirrors.sdinet.de/deb-multimedia/",
|
||||
"file": "project/trace/deb-multimedia.org"
|
||||
},
|
||||
"DebianPorts": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://deb.debian.org/",
|
||||
"file": "debian-ports/project/trace/porta.debian.org",
|
||||
"exclude": true
|
||||
},
|
||||
"DebianSecurity": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://deb.debian.org/",
|
||||
"file": "debian-security/project/trace/master"
|
||||
},
|
||||
"Eclipse": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "eclipse/",
|
||||
"upstream": "http://download.eclipse.org/",
|
||||
"file": "TIME"
|
||||
},
|
||||
"Fedora": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "fedora/",
|
||||
"upstream": "http://fedora.mirror.iweb.com/",
|
||||
"file": "linux/development/rawhide/COMPOSE_ID"
|
||||
},
|
||||
"FreeBSD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://ftp4.freebsd.org/pub/",
|
||||
"file": "FreeBSD/TIMESTAMP"
|
||||
},
|
||||
"GentooDistfiles": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "gentoo-distfiles/",
|
||||
"upstream": "http://gentoo.mirrors.tera-byte.com/",
|
||||
"file": "distfiles/timestamp.dev-local"
|
||||
},
|
||||
"GentooPortage": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "rsync://rsync4.ca.gentoo.org/",
|
||||
"upstream": "https://mirrorstats.gentoo.org/rsync/",
|
||||
"upstream1": "rsync://rsync1.de.gentoo.org/",
|
||||
"upstream2": "rsync://rsync8.de.gentoo.org/",
|
||||
"file": "gentoo-portage/Manifest"
|
||||
},
|
||||
"GNOME": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "gnome/",
|
||||
"upstream1": "https://download.gnome.org/",
|
||||
"upstream2": "https://mirrors.dotsrc.org/gnome/",
|
||||
"upstream3": "https://muug.ca/mirror/gnome/",
|
||||
"file1": "core/",
|
||||
"file2": "cache.json"
|
||||
},
|
||||
"GNU": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://mirrors.kernel.org/",
|
||||
"file": "gnu/mirror-updated-timestamp.txt"
|
||||
},
|
||||
"Gutenberg": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "gutenberg/",
|
||||
"upstream": "https://gutenberg.pglaf.org/",
|
||||
"file": "gutenberg.dcs"
|
||||
},
|
||||
"IPFire": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800
|
||||
},
|
||||
"KDE": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kde/",
|
||||
"upstream": "https://kde.c3sl.ufpr.br/",
|
||||
"file": "ls-lR"
|
||||
},
|
||||
"KDEApplicationData": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kde-applicationdata/",
|
||||
"upstream": "https://cdn.files.kde.org/",
|
||||
"file": "last-updated"
|
||||
},
|
||||
"Kernel": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kernel.org/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/pub/",
|
||||
"file": "linux/kernel/next/sha256sums.asc"
|
||||
},
|
||||
"linuxmint": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "linuxmint/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/linuxmint/",
|
||||
"file": ""
|
||||
},
|
||||
"linuxmint_packages": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "linuxmint-packages/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/linuxmint-packages/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"macports": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "MacPorts/mpdistfiles/",
|
||||
"upstream": "https://distfiles.macports.org/",
|
||||
"file": "ports.tar.gz"
|
||||
},
|
||||
"manjaro": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://repo.manjaro.org/",
|
||||
"file": ""
|
||||
},
|
||||
"mxlinux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://rsync-mxlinux.org/mirmon/packages.html",
|
||||
"file": ""
|
||||
},
|
||||
"mxlinux_iso": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "mxlinux-iso/",
|
||||
"upstream": "http://rsync-mxlinux.org/mirmon/index.html",
|
||||
"mirrors": [
|
||||
"http://mirror.its.dal.ca/mxlinux-cd/",
|
||||
"http://mirror.umd.edu/mxlinux-iso/"
|
||||
],
|
||||
"file": ""
|
||||
},
|
||||
"mySQL": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "mysql/",
|
||||
"upstream": "http://mirrors.sunsite.dk/mysql/",
|
||||
"file": "last-updated.txt"
|
||||
},
|
||||
"netbsd": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "NetBSD/",
|
||||
"upstream": "http://ftp.netbsd.org/pub/NetBSD/",
|
||||
"file": ""
|
||||
},
|
||||
"nongnu": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "nongnu/",
|
||||
"upstream": "http://download-mirror.savannah.gnu.org/releases/",
|
||||
"file": "00_TIME.txt"
|
||||
},
|
||||
"OpenBSD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://ftp.openbsd.org/pub/",
|
||||
"file": "OpenBSD/timestamp",
|
||||
"exclude": true
|
||||
},
|
||||
"opensuse": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "opensuse/update/",
|
||||
"upstream": "http://opensuse-mirror-gce-us.opensu.se/update/",
|
||||
"file": ""
|
||||
},
|
||||
"parabola": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "parabola/",
|
||||
"upstream": "https://repo.parabola.nu/",
|
||||
"file": "lastsync"
|
||||
},
|
||||
"pkgsrc": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "pkgsrc/",
|
||||
"upstream": "http://ftp.netbsd.org/pub/pkgsrc/",
|
||||
"file": "MIRROR-TIMESTAMP",
|
||||
"exclude": true
|
||||
},
|
||||
"puppy_linux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "puppylinux/",
|
||||
"upstream": "https://distro.ibiblio.org/puppylinux/",
|
||||
"file": ""
|
||||
},
|
||||
"qtproject": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "qtproject/",
|
||||
"upstream": "https://download.qt.io/",
|
||||
"file": "timestamp.txt",
|
||||
"exclude": true
|
||||
},
|
||||
"racket": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "racket/racket-installers/",
|
||||
"upstream": "https://mirror.racket-lang.org/installers/",
|
||||
"file": ""
|
||||
},
|
||||
"raspberrypi": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "raspberrypi/debian/",
|
||||
"upstream": "https://archive.raspberrypi.org/debian/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"raspbian": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "raspbian/",
|
||||
"upstream": "http://archive.raspbian.org/",
|
||||
"file": "snapshotindex.txt"
|
||||
},
|
||||
"sage": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "",
|
||||
"file": "sage/src/index.html"
|
||||
},
|
||||
"saltstack": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "saltstack/",
|
||||
"upstream": "https://repo.saltproject.io/",
|
||||
"file": ""
|
||||
},
|
||||
"slackware": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "slackware/",
|
||||
"upstream": "https://mirrors.slackware.com/slackware/",
|
||||
"file": ""
|
||||
},
|
||||
"tdf": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "tdf/",
|
||||
"upstream": "https://download.documentfoundation.org/",
|
||||
"file": "TIMESTAMP"
|
||||
},
|
||||
"trisquel": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "trisquel/",
|
||||
"upstream": "http://rsync.trisquel.info/trisquel/dists/",
|
||||
"mirrors": [
|
||||
"https://mirror.fsf.org/trisquel-images/",
|
||||
"http://mirrors.ocf.berkeley.edu/trisquel-images/"
|
||||
],
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive",
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu_ports": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ubuntu-ports/",
|
||||
"upstream": "http://ports.ubuntu.com/ubuntu-ports/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"ubuntu_ports_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ubuntu-ports-releases/",
|
||||
"upstream": "https://cdimage.ubuntu.com/releases/",
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "",
|
||||
"upstream": "https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-release",
|
||||
"file": ""
|
||||
},
|
||||
"vlc": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "vlc/",
|
||||
"upstream": "http://download.videolan.org/pub/videolan/",
|
||||
"file": "trace"
|
||||
},
|
||||
"x_org": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "x.org/individual/",
|
||||
"upstream": "https://www.x.org/releases/individual/",
|
||||
"file": ""
|
||||
},
|
||||
"xiph": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "xiph/releases/",
|
||||
"upstream": "https://ftp.osuosl.org/pub/xiph/releases/",
|
||||
"file": ""
|
||||
},
|
||||
"xubuntu_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "xubuntu-releases/",
|
||||
"upstream": "https://cdimage.ubuntu.com/xubuntu/releases/",
|
||||
"file": ""
|
||||
}
|
||||
}
|
|
@ -26,4 +26,4 @@ IPFire
|
|||
KDE
|
||||
KDEApplicationData
|
||||
Kernel
|
||||
OpenBSD
|
||||
OpenBSD
|
|
@ -0,0 +1,425 @@
|
|||
{
|
||||
"AlmaLinux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://repo.almalinux.org/",
|
||||
"file": "almalinux/TIME"
|
||||
},
|
||||
"Alpine": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://uk.alpinelinux.org/",
|
||||
"file": "alpine/last-updated"
|
||||
},
|
||||
"Apache": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "apache/",
|
||||
"upstream": "https://downloads.apache.org/",
|
||||
"file": "zzz/time.txt"
|
||||
},
|
||||
"Arch": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "archlinux/",
|
||||
"upstream": "http://arch.mirror.constant.com/",
|
||||
"file": "lastupdate"
|
||||
},
|
||||
"Artix": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "artixlinux/",
|
||||
"upstream": "https://mirror1.artixlinux.org/repos/"
|
||||
},
|
||||
"CentOS": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://mirrors.edge.kernel.org/",
|
||||
"file": "centos/TIME"
|
||||
},
|
||||
"Ceph": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ceph/",
|
||||
"upstream": "https://download.ceph.com/",
|
||||
"file": "timestamp"
|
||||
},
|
||||
"CPAN": {
|
||||
"out_of_sync_interval": 172800,
|
||||
"out_of_sync_since": null
|
||||
},
|
||||
"cran": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://cran.r-project.org/mirmon_report.html",
|
||||
"file": ""
|
||||
},
|
||||
"ctan": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://www.ctan.org/mirrors/mirmon",
|
||||
"file": ""
|
||||
},
|
||||
"Cygwin": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "cygwin/",
|
||||
"upstream": "https://cygwin.com/pub/cygwin/",
|
||||
"file": "x86/sha512.sum"
|
||||
},
|
||||
"Debian": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://ftp-master.debian.org/",
|
||||
"file": "debian/project/trace/master"
|
||||
},
|
||||
"DebianCD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://debian.mirror.estruxture.net/",
|
||||
"file": "debian-cd/project/trace/cdimage.debian.org"
|
||||
},
|
||||
"DebianMultimedia": {
|
||||
"out_of_sync_since": 1659116719,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "debian-multimedia/",
|
||||
"upstream": "http://debian-mirrors.sdinet.de/deb-multimedia/",
|
||||
"file": "project/trace/deb-multimedia.org"
|
||||
},
|
||||
"DebianPorts": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://deb.debian.org/",
|
||||
"file": "debian-ports/project/trace/porta.debian.org",
|
||||
"exclude": true
|
||||
},
|
||||
"DebianSecurity": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://deb.debian.org/",
|
||||
"file": "debian-security/project/trace/master"
|
||||
},
|
||||
"Eclipse": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "eclipse/",
|
||||
"upstream": "http://download.eclipse.org/",
|
||||
"file": "TIME"
|
||||
},
|
||||
"Fedora": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 259200,
|
||||
"csc": "fedora/",
|
||||
"upstream": "http://fedora.mirror.iweb.com/",
|
||||
"file": "linux/development/rawhide/COMPOSE_ID"
|
||||
},
|
||||
"FreeBSD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://ftp4.freebsd.org/pub/",
|
||||
"file": "FreeBSD/TIMESTAMP"
|
||||
},
|
||||
"GentooDistfiles": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "gentoo-distfiles/",
|
||||
"upstream": "http://gentoo.mirrors.tera-byte.com/",
|
||||
"file": "distfiles/timestamp.dev-local"
|
||||
},
|
||||
"GentooPortage": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "rsync://rsync4.ca.gentoo.org/",
|
||||
"upstream": "https://mirrorstats.gentoo.org/rsync/",
|
||||
"upstream1": "rsync://rsync1.de.gentoo.org/",
|
||||
"upstream2": "rsync://rsync8.de.gentoo.org/",
|
||||
"file": "gentoo-portage/Manifest"
|
||||
},
|
||||
"GNOME": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "gnome/",
|
||||
"upstream1": "https://download.gnome.org/",
|
||||
"upstream2": "https://mirrors.dotsrc.org/gnome/",
|
||||
"upstream3": "https://muug.ca/mirror/gnome/",
|
||||
"file1": "core/",
|
||||
"file2": "cache.json"
|
||||
},
|
||||
"GNU": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://mirrors.kernel.org/",
|
||||
"file": "gnu/mirror-updated-timestamp.txt"
|
||||
},
|
||||
"Gutenberg": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "gutenberg/",
|
||||
"upstream": "https://gutenberg.pglaf.org/",
|
||||
"file": "gutenberg.dcs"
|
||||
},
|
||||
"IPFire": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800
|
||||
},
|
||||
"KDE": {
|
||||
"out_of_sync_since": 1659116720,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kde/",
|
||||
"upstream": "https://kde.c3sl.ufpr.br/",
|
||||
"file": "ls-lR"
|
||||
},
|
||||
"KDEApplicationData": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kde-applicationdata/",
|
||||
"upstream": "https://cdn.files.kde.org/",
|
||||
"file": "last-updated"
|
||||
},
|
||||
"Kernel": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "kernel.org/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/pub/",
|
||||
"file": "linux/kernel/next/sha256sums.asc"
|
||||
},
|
||||
"linuxmint": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "linuxmint/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/linuxmint/",
|
||||
"file": ""
|
||||
},
|
||||
"linuxmint_packages": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "linuxmint-packages/",
|
||||
"upstream": "https://mirrors.edge.kernel.org/linuxmint-packages/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"macports": {
|
||||
"out_of_sync_since": 1642827723,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "MacPorts/mpdistfiles/",
|
||||
"upstream": "https://distfiles.macports.org/",
|
||||
"file": "ports.tar.gz"
|
||||
},
|
||||
"manjaro": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://repo.manjaro.org/",
|
||||
"file": ""
|
||||
},
|
||||
"mxlinux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "http://rsync-mxlinux.org/mirmon/packages.html",
|
||||
"file": ""
|
||||
},
|
||||
"mxlinux_iso": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "mxlinux-iso/",
|
||||
"upstream": "http://rsync-mxlinux.org/mirmon/index.html",
|
||||
"mirrors": [
|
||||
"http://mirror.its.dal.ca/mxlinux-cd/",
|
||||
"http://mirror.umd.edu/mxlinux-iso/"
|
||||
],
|
||||
"file": ""
|
||||
},
|
||||
"mySQL": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "mysql/",
|
||||
"upstream": "http://mirrors.sunsite.dk/mysql/",
|
||||
"file": "last-updated.txt"
|
||||
},
|
||||
"netbsd": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "NetBSD/",
|
||||
"upstream": "http://ftp.netbsd.org/pub/NetBSD/",
|
||||
"file": ""
|
||||
},
|
||||
"nongnu": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "nongnu/",
|
||||
"upstream": "http://download-mirror.savannah.gnu.org/releases/",
|
||||
"file": "00_TIME.txt"
|
||||
},
|
||||
"OpenBSD": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://ftp.openbsd.org/pub/",
|
||||
"file": "OpenBSD/timestamp",
|
||||
"exclude": true
|
||||
},
|
||||
"opensuse": {
|
||||
"out_of_sync_since": 1648699331,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "opensuse/update/",
|
||||
"upstream": "http://opensuse-mirror-gce-us.opensu.se/update/",
|
||||
"file": ""
|
||||
},
|
||||
"parabola": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "parabola/",
|
||||
"upstream": "https://repo.parabola.nu/",
|
||||
"file": "lastsync"
|
||||
},
|
||||
"pkgsrc": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "pkgsrc/",
|
||||
"upstream": "http://ftp.netbsd.org/pub/pkgsrc/",
|
||||
"file": "MIRROR-TIMESTAMP",
|
||||
"exclude": true
|
||||
},
|
||||
"puppy_linux": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "puppylinux/",
|
||||
"upstream": "https://distro.ibiblio.org/puppylinux/",
|
||||
"file": ""
|
||||
},
|
||||
"qtproject": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "qtproject/",
|
||||
"upstream": "https://download.qt.io/",
|
||||
"file": "timestamp.txt",
|
||||
"exclude": true
|
||||
},
|
||||
"racket": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "racket/racket-installers/",
|
||||
"upstream": "https://mirror.racket-lang.org/installers/",
|
||||
"file": ""
|
||||
},
|
||||
"raspberrypi": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "raspberrypi/debian/",
|
||||
"upstream": "https://archive.raspberrypi.org/debian/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"raspbian": {
|
||||
"out_of_sync_since": 1659116721,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "raspbian/",
|
||||
"upstream": "http://archive.raspbian.org/",
|
||||
"file": "snapshotindex.txt"
|
||||
},
|
||||
"sage": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "",
|
||||
"file": "sage/src/index.html"
|
||||
},
|
||||
"saltstack": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "saltstack/",
|
||||
"upstream": "https://repo.saltproject.io/",
|
||||
"file": ""
|
||||
},
|
||||
"slackware": {
|
||||
"out_of_sync_since": 1642827723,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "slackware/",
|
||||
"upstream": "https://mirrors.slackware.com/slackware/",
|
||||
"file": ""
|
||||
},
|
||||
"tdf": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "tdf/",
|
||||
"upstream": "https://download.documentfoundation.org/",
|
||||
"file": "TIMESTAMP"
|
||||
},
|
||||
"trisquel": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "trisquel/",
|
||||
"upstream": "http://rsync.trisquel.info/trisquel/dists/",
|
||||
"mirrors": [
|
||||
"https://mirror.fsf.org/trisquel-images/",
|
||||
"http://mirrors.ocf.berkeley.edu/trisquel-images/"
|
||||
],
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "",
|
||||
"upstream": "https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive",
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu_ports": {
|
||||
"out_of_sync_since": 1651550528,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ubuntu-ports/",
|
||||
"upstream": "http://ports.ubuntu.com/ubuntu-ports/",
|
||||
"file": "dists/"
|
||||
},
|
||||
"ubuntu_ports_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "ubuntu-ports-releases/",
|
||||
"upstream": "https://cdimage.ubuntu.com/releases/",
|
||||
"file": ""
|
||||
},
|
||||
"ubuntu_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 172800,
|
||||
"csc": "",
|
||||
"upstream": "https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive",
|
||||
"file": ""
|
||||
},
|
||||
"vlc": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "vlc/",
|
||||
"upstream": "http://download.videolan.org/pub/videolan/",
|
||||
"file": "trace"
|
||||
},
|
||||
"x_org": {
|
||||
"out_of_sync_since": 1657512131,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "x.org/individual/",
|
||||
"upstream": "https://www.x.org/releases/individual/",
|
||||
"file": ""
|
||||
},
|
||||
"xiph": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "xiph/releases/",
|
||||
"upstream": "https://ftp.osuosl.org/pub/xiph/releases/",
|
||||
"file": ""
|
||||
},
|
||||
"xubuntu_releases": {
|
||||
"out_of_sync_since": null,
|
||||
"out_of_sync_interval": 86400,
|
||||
"csc": "xubuntu-releases/",
|
||||
"upstream": "https://cdimage.ubuntu.com/xubuntu/releases/",
|
||||
"file": ""
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
module git.csclub.uwaterloo.ca/public/mirror-checker
|
||||
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/gofiber/fiber/v2 v2.47.0
|
||||
github.com/rs/zerolog v1.29.1
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.0.5 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/klauspost/compress v1.16.6 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.14 // indirect
|
||||
github.com/philhofer/fwd v1.1.2 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.4 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/savsgio/dictpool v0.0.0-20221023140959-7bf2e61cea94 // indirect
|
||||
github.com/savsgio/gotils v0.0.0-20230208104028-c358bd845dee // indirect
|
||||
github.com/tinylib/msgp v1.1.8 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasthttp v1.48.0 // indirect
|
||||
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
||||
golang.org/x/sys v0.9.0 // indirect
|
||||
)
|
|
@ -0,0 +1,110 @@
|
|||
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
|
||||
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofiber/fiber/v2 v2.46.0 h1:wkkWotblsGVlLjXj2dpgKQAYHtXumsK/HyFugQM68Ns=
|
||||
github.com/gofiber/fiber/v2 v2.46.0/go.mod h1:DNl0/c37WLe0g92U6lx1VMQuxGUQY5V7EIaVoEsUffc=
|
||||
github.com/gofiber/fiber/v2 v2.47.0 h1:EN5lHVCc+Pyqh5OEsk8fzRiifgwpbrP0rulQ4iNf3fs=
|
||||
github.com/gofiber/fiber/v2 v2.47.0/go.mod h1:mbFMVN1lQuzziTkkakgtKKdjfsXSw9BKR5lmcNksUoU=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI=
|
||||
github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk=
|
||||
github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
||||
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||
github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw=
|
||||
github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2tUTP0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
||||
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc=
|
||||
github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/savsgio/dictpool v0.0.0-20221023140959-7bf2e61cea94 h1:rmMl4fXJhKMNWl+K+r/fq4FbbKI+Ia2m9hYBLm2h4G4=
|
||||
github.com/savsgio/dictpool v0.0.0-20221023140959-7bf2e61cea94/go.mod h1:90zrgN3D/WJsDd1iXHT96alCoN2KJo6/4x1DZC3wZs8=
|
||||
github.com/savsgio/gotils v0.0.0-20220530130905-52f3993e8d6d/go.mod h1:Gy+0tqhJvgGlqnTF8CVGP0AaGRjwBtXs/a5PA0Y3+A4=
|
||||
github.com/savsgio/gotils v0.0.0-20230208104028-c358bd845dee h1:8Iv5m6xEo1NR1AvpV+7XmhI4r39LGNzwUL4YpMuL5vk=
|
||||
github.com/savsgio/gotils v0.0.0-20230208104028-c358bd845dee/go.mod h1:qwtSXrKuJh/zsFQ12yEE89xfCrGKK63Rr7ctU/uCo4g=
|
||||
github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw=
|
||||
github.com/tinylib/msgp v1.1.8 h1:FCXC1xanKO4I8plpHGH2P7koL/RzZs12l/+r7vakfm0=
|
||||
github.com/tinylib/msgp v1.1.8/go.mod h1:qkpG+2ldGg4xRFmx+jfTvZPxfGFhi64BcnL9vkCm/Tw=
|
||||
github.com/urfave/cli/v2 v2.25.5 h1:d0NIAyhh5shGscroL7ek/Ya9QYQE0KNabJgiUinIQkc=
|
||||
github.com/urfave/cli/v2 v2.25.5/go.mod h1:GHupkWPMM0M/sj1a2b4wUrWBPzazNrIjouW6fmdJLxc=
|
||||
github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs=
|
||||
github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.47.0 h1:y7moDoxYzMooFpT5aHgNgVOQDrS3qlkfiP9mDtGGK9c=
|
||||
github.com/valyala/fasthttp v1.47.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
|
||||
github.com/valyala/fasthttp v1.48.0 h1:oJWvHb9BIZToTQS3MuQ2R3bJZiNSa2KiNdeI8A+79Tc=
|
||||
github.com/valyala/fasthttp v1.48.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
|
||||
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
|
||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
@ -0,0 +1,214 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/checkers"
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/config"
|
||||
"git.csclub.uwaterloo.ca/public/mirror-checker/web"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var configPath string
|
||||
|
||||
func loadConfig() {
|
||||
path := config.GetAbsPath(configPath)
|
||||
// TODO: check if file exists
|
||||
|
||||
log.Info().Str("path", path).Msg("Loading config file.")
|
||||
err := checkers.LoadFromFile(path)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("Failed to load config file.")
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
config.SetupLogger(true) // TODO: flag for debug mode
|
||||
|
||||
// TODO: give option to give checker a particular name (or autogen one)
|
||||
|
||||
// Start CLI
|
||||
// TODO: documentation - https://cli.urfave.org/v2/examples/full-api-example/
|
||||
app := &cli.App{
|
||||
Name: "CSC Mirror Checker 2",
|
||||
Usage: "sees if the mirror is up!",
|
||||
Version: config.BuildVersion,
|
||||
EnableBashCompletion: true,
|
||||
// https://cli.urfave.org/v2/examples/combining-short-options/
|
||||
// TODO: flags for config file (mirrors.json), defaults to mirrors.json
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "config",
|
||||
Aliases: []string{"c"},
|
||||
Usage: "path to config file",
|
||||
Destination: &configPath,
|
||||
Value: "data/mirrors.json",
|
||||
EnvVars: []string{"MC_CONFIG_FILE"},
|
||||
},
|
||||
},
|
||||
Commands: []*cli.Command{
|
||||
{
|
||||
Name: "daemon",
|
||||
Aliases: []string{"s", "d", "serve", "web", "server"},
|
||||
Usage: "starts web API",
|
||||
Action: func(cCtx *cli.Context) error {
|
||||
// TODO: flags for port, listen address
|
||||
// TODO: flag --config or --stdin-config
|
||||
|
||||
// TODO: enable all projects by default
|
||||
// checkers.LoadDefaultProjects()
|
||||
|
||||
loadConfig()
|
||||
|
||||
return web.StartServer()
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "list",
|
||||
Usage: "lists all available projects",
|
||||
Action: func(cCtx *cli.Context) error {
|
||||
loadConfig()
|
||||
|
||||
log.Info().Msg("Listing supported projects")
|
||||
|
||||
projects := checkers.SupportedProjects
|
||||
for _, proj := range projects {
|
||||
status := "supported"
|
||||
// check if project is in enabled list
|
||||
// TODO: ...
|
||||
|
||||
// supported checkers
|
||||
var checkers []string
|
||||
for _, c := range proj.Checkers {
|
||||
checkers = append(checkers, c.Name)
|
||||
}
|
||||
|
||||
log.Info().Str("project", proj.Name).
|
||||
Str("status", status).
|
||||
Strs("checkers", checkers).
|
||||
Msg("Supported project.")
|
||||
|
||||
// list configs
|
||||
log.Info().
|
||||
Str("project", proj.Name).
|
||||
Int("num_checkers", proj.NumOfCheckers).
|
||||
Time("out_of_sync_since", proj.Properties.OOSSince).
|
||||
Int64("out_of_sync_interval", proj.Properties.OOSInterval).
|
||||
Str("csc", proj.Properties.CSC).
|
||||
Strs("mirrors", proj.Properties.Mirrors).
|
||||
Str("upstream", proj.Properties.Upstream).
|
||||
Str("file", proj.Properties.File).
|
||||
Msg("Found project config.")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "check",
|
||||
Aliases: []string{"c"},
|
||||
Usage: "checks particular mirror (once)",
|
||||
Flags: []cli.Flag{
|
||||
&cli.BoolFlag{
|
||||
Name: "all",
|
||||
Aliases: []string{"a"},
|
||||
Usage: "check all (supported) mirrors",
|
||||
Value: false,
|
||||
},
|
||||
},
|
||||
Action: func(cCtx *cli.Context) error {
|
||||
checkers.LoadDefaultProjects()
|
||||
loadConfig()
|
||||
|
||||
// attempt to look up and check all projects
|
||||
var projects []string
|
||||
|
||||
if cCtx.Bool("all") {
|
||||
// iterate through all supported projects and add names
|
||||
for _, proj := range checkers.SupportedProjects {
|
||||
projects = append(projects, proj.Name)
|
||||
}
|
||||
} else {
|
||||
projects = cCtx.Args().Slice()
|
||||
if len(projects) == 0 {
|
||||
log.Fatal().Msg("No projects specified.")
|
||||
}
|
||||
}
|
||||
|
||||
checkers.StartWorkers()
|
||||
|
||||
log.Debug().Msgf("Checking all specified projects.")
|
||||
|
||||
for _, arg := range projects {
|
||||
log.Info().Msgf("Pulling project information for '%s'.", arg)
|
||||
proj, err := checkers.LoadProject(arg)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).
|
||||
Str("project", arg).
|
||||
Msg("Failed to load project.")
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := proj.RunChecks(nil)
|
||||
// res, err := proj.RunChecks(func(res checkers.CheckerResult) {
|
||||
// // if res.Error != nil {
|
||||
// // log.Error().Err(res.Error).
|
||||
// // Time("time", res.Time).
|
||||
// // Msgf("Failed check %s for project %s", res.CheckerName, res.ProjectName)
|
||||
// // } else {
|
||||
// // log.Info().
|
||||
// // Time("time", res.Time).
|
||||
// // Str("status", string(res.Status)).
|
||||
// // Msgf("Completed check %s for project %s", res.CheckerName, res.ProjectName)
|
||||
// // }
|
||||
// })
|
||||
|
||||
if err != nil {
|
||||
// code is shit whoopsies
|
||||
log.Fatal().Err(err).Msg("Failed to check project.")
|
||||
}
|
||||
|
||||
status := <-*res.FinalStatus
|
||||
if status == checkers.CHECKER_ERROR {
|
||||
log.Error().Err(err).
|
||||
Str("project", arg).
|
||||
Msg("Failed to check project.")
|
||||
continue
|
||||
}
|
||||
|
||||
if status == checkers.CHECKER_FAIL {
|
||||
log.Error().Str("final_status", string(status)).
|
||||
Str("project", arg).
|
||||
Msg("Error found when checking project.")
|
||||
continue
|
||||
}
|
||||
|
||||
log.Info().Str("final_status", string(status)).
|
||||
Msgf("Completed all checks for project `%s`", arg)
|
||||
}
|
||||
|
||||
checkers.StopWorkers()
|
||||
|
||||
return nil
|
||||
},
|
||||
// TODO: auto complete available mirrors, https://cli.urfave.org/v2/examples/combining-short-options/
|
||||
// BashComplete: func(cCtx *cli.Context) {
|
||||
// // This will complete if no args are passed
|
||||
// if cCtx.NArg() > 0 {
|
||||
// return
|
||||
// }
|
||||
// for _, t := range config.Mirrors {
|
||||
// log.Println(t)
|
||||
// }
|
||||
// },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cli.VersionPrinter = config.PrintVersion
|
||||
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
log.Fatal().Err(err).Msg("An error occurred.")
|
||||
}
|
||||
}
|
72
main.py
72
main.py
|
@ -1,72 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
This mirror status checker determines whether CSC mirror is up-to-date with upstream
|
||||
"""
|
||||
|
||||
import time
|
||||
import sys
|
||||
import requests
|
||||
from multiprocessing import Pool, Manager
|
||||
|
||||
from projects import *
|
||||
import json
|
||||
|
||||
NUM_THREAD = 16
|
||||
|
||||
current_time = int(time.time())
|
||||
|
||||
def safe_print(*args, **kwargs):
|
||||
# When run with 'chronic' and 'timeout', stdout gets suppressed
|
||||
# due to buffering. Make sure to always flush the output.
|
||||
print(*args, **kwargs, flush=True)
|
||||
|
||||
def check_project(args):
|
||||
project, data = args
|
||||
try:
|
||||
project_class = getattr(sys.modules[__name__], project)
|
||||
|
||||
# Skip projects we no longer mirror
|
||||
if data[project].get('exclude', False):
|
||||
return True
|
||||
|
||||
checker_result = project_class.check(data, project, current_time)
|
||||
|
||||
if checker_result:
|
||||
data[project]["out_of_sync_since"] = None
|
||||
safe_print(f"Success: {project} up-to-date")
|
||||
return True
|
||||
|
||||
elif (data[project]["out_of_sync_since"] is not None
|
||||
and current_time - data[project]["out_of_sync_since"] > data[project]["out_of_sync_interval"]):
|
||||
safe_print(f"Failure: {project} out-of-sync")
|
||||
return False
|
||||
|
||||
else:
|
||||
data[project]["out_of_sync_since"] = current_time
|
||||
return True
|
||||
|
||||
except requests.exceptions.RequestException as err:
|
||||
safe_print(f"Error: {project}\n{err}")
|
||||
|
||||
return False
|
||||
|
||||
def main():
|
||||
data_file = 'data.json'
|
||||
if len(sys.argv) > 1:
|
||||
data_file = sys.argv[1]
|
||||
|
||||
manager = Manager()
|
||||
data = json.load(open(data_file))
|
||||
sync_data = manager.dict({k: manager.dict(v) for k, v in data.items()})
|
||||
|
||||
with Pool(NUM_THREAD) as pool:
|
||||
all_pass = all(pool.imap(check_project, ((k, sync_data) for k in data.keys())))
|
||||
|
||||
with open(data_file, "w", encoding="utf-8") as file:
|
||||
json.dump({k: dict(v) for k, v in sync_data.items()}, file, indent=' ')
|
||||
|
||||
sys.exit(0 if all_pass else 1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
36
project.py
36
project.py
|
@ -1,36 +0,0 @@
|
|||
"""
|
||||
Contains abstract class for a mirrored project
|
||||
"""
|
||||
|
||||
from abc import ABC
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
|
||||
class Project(ABC):
|
||||
"""Abstract class for a mirrored project"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
req = requests.get(csc_url)
|
||||
req.raise_for_status()
|
||||
CSC = req.text
|
||||
|
||||
req = requests.get(upstream_url)
|
||||
req.raise_for_status()
|
||||
upstream = req.text
|
||||
|
||||
if upstream == CSC:
|
||||
return True
|
||||
|
||||
try:
|
||||
return int(upstream) - int(CSC) < data[project]["out_of_sync_interval"]
|
||||
except ValueError:
|
||||
return False
|
|
@ -1,21 +0,0 @@
|
|||
"""
|
||||
This file automatically imports all Classes in this directory
|
||||
"""
|
||||
|
||||
from inspect import isclass
|
||||
from pkgutil import iter_modules
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
|
||||
# iterate through the modules in the current package
|
||||
package_dir = Path(__file__).resolve().parent
|
||||
for (_, module_name, _) in iter_modules([str(package_dir)]):
|
||||
|
||||
# import the module and iterate through its attributes
|
||||
module = import_module(f"{__name__}.{module_name}")
|
||||
for attribute_name in dir(module):
|
||||
attribute = getattr(module, attribute_name)
|
||||
|
||||
if isclass(attribute):
|
||||
# Add the class to this package's variables
|
||||
globals()[attribute_name] = attribute
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains AlmaLinux class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class AlmaLinux(Project):
|
||||
"""AlmaLinux class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Alpine class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Alpine(Project):
|
||||
"""Alpine class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Apache class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Apache(Project):
|
||||
"""Apache class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Arch class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Arch(Project):
|
||||
"""Arch class"""
|
|
@ -1,44 +0,0 @@
|
|||
"""
|
||||
Contains Artix class
|
||||
"""
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
import requests
|
||||
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
def _get_date(url):
|
||||
req = requests.head(url)
|
||||
req.raise_for_status()
|
||||
return datetime.strptime(req.headers['Last-Modified'], '%a, %d %b %Y %H:%M:%S %Z')
|
||||
|
||||
class Artix(Project):
|
||||
"""Artix class"""
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
csc_url = CSC_MIRROR + data[project]['csc']
|
||||
upstream_url = data[project]['upstream']
|
||||
|
||||
req = requests.get(upstream_url)
|
||||
req.raise_for_status()
|
||||
index = req.text
|
||||
|
||||
repos = re.findall(r'href="(\w+)/"', index);
|
||||
|
||||
outdated_since = None
|
||||
for repo in repos:
|
||||
# Good enough for now, we can change it if Artix gets more arch in the future
|
||||
db_path = repo + '/os/x86_64/' + repo + '.db'
|
||||
upstream_date = _get_date(data[project]['upstream'] + db_path)
|
||||
csc_date = _get_date(CSC_MIRROR + data[project]['csc'] + db_path)
|
||||
if csc_date < upstream_date:
|
||||
if outdated_since is None or upstream_date < outdated_since:
|
||||
outdated_since = upstream_date
|
||||
|
||||
if outdated_since is not None:
|
||||
data[project]['out_of_sync_since'] = int(outdated_since.timestamp())
|
||||
return current_time - data[project]['out_of_sync_since'] < data[project]['out_of_sync_interval']
|
||||
|
||||
return True
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains CentOS class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class CentOS(Project):
|
||||
"""CentOS class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Ceph class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Ceph(Project):
|
||||
"""Ceph class"""
|
|
@ -1,21 +0,0 @@
|
|||
"""
|
||||
Contains CPAN class
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
|
||||
class CPAN(Project):
|
||||
"""CPAN class"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
res_json = requests.get("http://mirrors.cpan.org/cpan-json.txt").json()
|
||||
for mirror in res_json:
|
||||
if mirror["url"] == f"{CSC_MIRROR}CPAN/":
|
||||
data[project]["out_of_sync_since"] = int(mirror["age"])
|
||||
return current_time - data[project]["out_of_sync_since"] <= data[project]["out_of_sync_interval"]
|
||||
return False
|
|
@ -1,26 +0,0 @@
|
|||
"""
|
||||
Contains cran class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class cran(Project):
|
||||
"""cran class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("mirror.csclub.uwaterloo.ca")
|
||||
|
||||
m = re.search(r'(\d+ hour)|(\d+ hours)|(\d+(\.)?\d+ days)', page[indexOfFile:]) # solution from: https://stackoverflow.com/questions/21074100/how-to-convert-standard-timedelta-string-to-timedelta-object/21074460
|
||||
|
||||
duration = pd.to_timedelta(m.group(0))
|
||||
data[project]["out_of_sync_since"] = current_time - duration.total_seconds()
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')
|
|
@ -1,26 +0,0 @@
|
|||
"""
|
||||
Contains ctan class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class ctan(Project):
|
||||
"""ctan class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("mirror.csclub.uwaterloo.ca")
|
||||
|
||||
m = re.search(r'(\d+ hour)|(\d+ hours)|(\d+(\.)?\d+ days)', page[indexOfFile:]) # solution from: https://stackoverflow.com/questions/21074100/how-to-convert-standard-timedelta-string-to-timedelta-object/21074460
|
||||
|
||||
duration = pd.to_timedelta(m.group(0))
|
||||
data[project]["out_of_sync_since"] = datetime.now() - duration.total_seconds()
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Cygwin class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Cygwin(Project):
|
||||
"""Cygwin class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Debian class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Debian(Project):
|
||||
"""Debian class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains DebianCD class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class DebianCD(Project):
|
||||
"""DebianCD class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains DebianMultimedia class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class DebianMultimedia(Project):
|
||||
"""DebianMultimedia class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains DebianPorts class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class DebianPorts(Project):
|
||||
"""DebianPorts class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains DebianSecurity class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class DebianSecurity(Project):
|
||||
"""DebianSecurity class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Eclipse class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Eclipse(Project):
|
||||
"""Eclipse class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Fedora class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Fedora(Project):
|
||||
"""Fedora class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains FreeBSD class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class FreeBSD(Project):
|
||||
"""FreeBSD class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains GentooDistfiles class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class GentooDistfiles(Project):
|
||||
"""GentooDistfiles class"""
|
|
@ -1,49 +0,0 @@
|
|||
"""
|
||||
Contains GentooPortage class
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from project import Project
|
||||
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class GentooPortage(Project):
|
||||
"""GentooPortage class"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
"""rsync_command = "rsync -q {}{} {}"
|
||||
os.system(rsync_command.format(data[project]["csc"],
|
||||
data[project]["file"],
|
||||
"csc_manifest"))
|
||||
os.system(rsync_command.format(data[project]["upstream1"],
|
||||
data[project]["file"],
|
||||
"upstream_manifest1"))
|
||||
os.system(rsync_command.format(data[project]["upstream2"],
|
||||
data[project]["file"],
|
||||
"upstream_manifest2"))
|
||||
stream1 = os.popen("diff csc_manifest upstream_manifest1")
|
||||
output1 = stream1.read()
|
||||
stream2 = os.popen("diff csc_manifest upstream_manifest2")
|
||||
output2 = stream2.read()
|
||||
os.system("rm csc_manifest")
|
||||
os.system("rm upstream_manifest1")
|
||||
os.system("rm upstream_manifest2")
|
||||
return 0 in [len(output1), len(output2)]"""
|
||||
|
||||
# i'm changing the above code to the bottom one, since the above one only works in linux
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("rsync4.ca.gentoo.org")
|
||||
|
||||
m = re.search(r'(\d+ minutes?)|(\d+ hours?)|(\d+(\.)?\d+ days?)', page[indexOfFile:])
|
||||
|
||||
duration = pd.to_timedelta(m.group(0))
|
||||
data[project]["out_of_sync_since"] = current_time - duration.total_seconds()
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')
|
|
@ -1,44 +0,0 @@
|
|||
"""
|
||||
Contains GNOME class
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
|
||||
class GNOME(Project):
|
||||
"""GNOME class"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
file = data[project]["file1"]
|
||||
csc_versions = requests.get(CSC_MIRROR + data[project]["csc"] + file).text
|
||||
upstream_versions = requests.get(data[project]["upstream1"] + file).text
|
||||
csc_latest = re.findall(r"\"\d+\.?\d*", csc_versions)[-1].lstrip('"')
|
||||
upstream_latest = re.findall(r"\"\d+\.?\d*", upstream_versions)[-1].lstrip('"')
|
||||
if csc_latest != upstream_latest:
|
||||
return False
|
||||
file += csc_latest + "/"
|
||||
csc_versions = requests.get(CSC_MIRROR + data[project]["csc"] + file).text
|
||||
upstream_versions = requests.get(data[project]["upstream1"] + file).text
|
||||
csc_latest = re.findall(r"\"\d+\.?\w*\.?\w*", csc_versions)[-1].lstrip('"')
|
||||
upstream_latest = re.findall(r"\"\d+\.?\w*\.?\w*", upstream_versions)[-1].lstrip('"')
|
||||
if csc_latest != upstream_latest:
|
||||
return False
|
||||
file += csc_latest + "/"
|
||||
csc_text = requests.get(CSC_MIRROR + data[project]["csc"] + file
|
||||
+ data[project]["file2"]).text
|
||||
try:
|
||||
ret = csc_text == requests.get(data[project]["upstream2"] + file
|
||||
+ data[project]["file2"]).text
|
||||
except requests.exceptions.RequestException:
|
||||
ret = False
|
||||
try:
|
||||
return ret or csc_text == requests.get(data[project]["upstream3"] + file
|
||||
+ data[project]["file2"]).text
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains GNU class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class GNU(Project):
|
||||
"""GNU class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Gutenberg class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Gutenberg(Project):
|
||||
"""Gutenberg class"""
|
|
@ -1,17 +0,0 @@
|
|||
"""
|
||||
Contains IPFire class
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class IPFire(Project):
|
||||
"""IPFire class"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
ipfire_url = "https://mirrors.ipfire.org/mirrors/mirror.csclub.uwaterloo.ca"
|
||||
ipfire_text = requests.get(ipfire_url).text
|
||||
return ipfire_text.find("The mirror is up") != -1
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains KDE class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class KDE(Project):
|
||||
"""KDE class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains KDEApplicationData class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class KDEApplicationData(Project):
|
||||
"""KDEApplicationData class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains Kernel class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class Kernel(Project):
|
||||
"""Kernel class"""
|
|
@ -1,66 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class linuxmint(Project):
|
||||
"""linuxmint class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, compare, folders, site1, site2, directory):
|
||||
if cls.checker(site1+directory, "sha256sum.txt") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, "sha256sum.txt"))
|
||||
if cls.checker(site2+directory, "sha256sum.txt") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, "sha256sum.txt"))
|
||||
compare.append(cls.checker(site1+directory, "sha256sum.txt") <= cls.checker(site2+directory, "sha256sum.txt"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
|
||||
# getting the request from url
|
||||
r = requests.get(site1 + directory)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/"):
|
||||
dir_next = directory+href
|
||||
# print(dir_next)
|
||||
# calling it self
|
||||
if dir_next not in folders:
|
||||
folders.append(dir_next)
|
||||
cls.scrape(compare, folders, site1, site2, dir_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
compare=[]
|
||||
folders=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(compare, folders, upstream_url, csc_url, "")
|
||||
|
||||
return all(compare)
|
|
@ -1,55 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
# this function is brute force looping through the whole directory and checking dates
|
||||
# it may sound horrible, but for certain distros, i believe it's indeed the best solution
|
||||
|
||||
class linuxmint_packages(Project):
|
||||
"""linuxmint_packages class"""
|
||||
@staticmethod
|
||||
def scrape(urls, site):
|
||||
# getting the request from url
|
||||
r = requests.get(site)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
# salt stack specific code
|
||||
# s = s.find("div", {"id": "listing"})
|
||||
# print(s)
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/":
|
||||
site_next = site+href+"Release"
|
||||
|
||||
if site_next not in urls:
|
||||
urls.append(site_next)
|
||||
# print(site_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
urls1=[]
|
||||
urls2=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(urls1, csc_url)
|
||||
cls.scrape(urls2, upstream_url)
|
||||
|
||||
if (len(urls1) != len(urls2)):
|
||||
return False
|
||||
urls1.sort()
|
||||
urls2.sort()
|
||||
for index, f in enumerate(urls1):
|
||||
if requests.get(f).text != requests.get(urls2[index]).text:
|
||||
# comparing the file content bc that's how the base class does it, but we can speed it up by just comparing the dates
|
||||
return False
|
||||
return True
|
|
@ -1,37 +0,0 @@
|
|||
import requests
|
||||
import re # import regular expressions to remove stray numbers in string that might interfere with date finding
|
||||
import json # import json to read project info stored in json file
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
import datefinder # another date finding library
|
||||
|
||||
class macports(Project):
|
||||
"""macports class"""
|
||||
# checker: gets the timestamp of the file inside the directory at the specified URL and returns it as a string
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
# remove stray numbers (file size numbers in particular) that might interfere with date finding
|
||||
segment_clean = re.sub(r'\s\d+\s', ' ', page[file_index:]) # removes numbers for size
|
||||
segment_clean = re.sub(r'\s\d+\w*\s', ' ', page[file_index:]) # removes numbers + size unit. e.x. 50kb
|
||||
# print(segment_clean)
|
||||
|
||||
# finds the dates in the segment after the file name
|
||||
# notes: a generator will be returned by the datefinder module. I'm typecasting it to a list. Please read the note of caution provided at the bottom.
|
||||
matches = list(datefinder.find_dates(segment_clean))
|
||||
|
||||
# print(matches[0])
|
||||
return matches[0]
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
csc_url = CSC_MIRROR + data[project]["csc"]
|
||||
upstream_url = data[project]["upstream"]
|
||||
file_name = data[project]["file"]
|
||||
|
||||
return cls.checker(csc_url, file_name) == cls.checker(upstream_url, file_name)
|
|
@ -1,27 +0,0 @@
|
|||
"""
|
||||
Contains manjaro class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class manjaro(Project):
|
||||
"""manjaro class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("mirror.csclub.uwaterloo.ca/manjaro")
|
||||
|
||||
m = re.search(r'(?P<hours>\d+):(?P<minutes>\d+)', page[indexOfFile:]) # solution from: https://stackoverflow.com/questions/21074100/how-to-convert-standard-timedelta-string-to-timedelta-object/21074460
|
||||
duration = timedelta(**{key: float(val) for key, val in m.groupdict().items()})
|
||||
data[project]["out_of_sync_since"] = current_time - duration.total_seconds()
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')
|
||||
|
||||
# https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-archive
|
|
@ -1,26 +0,0 @@
|
|||
"""
|
||||
Contains mxlinux class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class mxlinux(Project):
|
||||
"""mxlinux class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("mirror.csclub.uwaterloo.ca")
|
||||
|
||||
m = re.search(r'(\d+ hour)|(\d+ hours)|(\d+(\.)?\d+ days)', page[indexOfFile:]) # solution from: https://stackoverflow.com/questions/21074100/how-to-convert-standard-timedelta-string-to-timedelta-object/21074460
|
||||
|
||||
duration = pd.to_timedelta(m.group(0))
|
||||
data[project]["out_of_sync_since"] = current_time - duration.total_seconds()
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')
|
|
@ -1,61 +0,0 @@
|
|||
"""
|
||||
Contains mxlinux_iso class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class mxlinux_iso(Project):
|
||||
"""mxlinux_iso class"""
|
||||
# this method is to check the mirror tracker, but unfortunately, the mirror tracker is behaving a bit strange so we check with other mirrors
|
||||
"""@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("mirror.csclub.uwaterloo.ca")
|
||||
|
||||
m = re.search(r'(\d+ hour)|(\d+ hours)|(\d+(\.)?\d+ days)', page[indexOfFile:]) # solution from: https://stackoverflow.com/questions/21074100/how-to-convert-standard-timedelta-string-to-timedelta-object/21074460
|
||||
|
||||
duration = pd.to_timedelta(m.group(0))
|
||||
|
||||
return duration <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s')"""
|
||||
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\w{3}-\d{2} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
# print(str_dates)
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def check_iso(cls, site, mirrors):
|
||||
for mirror in mirrors:
|
||||
# print(cls.checker(site, "md5sum.txt"))
|
||||
# print(cls.checker(mirror, "md5sum.txt"))
|
||||
if cls.checker(site, "TIME.txt") < cls.checker(mirror, "TIME.txt"):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# print(cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/"))
|
||||
mirrors = data[project]["mirrors"]
|
||||
|
||||
return cls.check_iso(csc_url, mirrors)
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains mySQL class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class mySQL(Project):
|
||||
"""mySQL class"""
|
|
@ -1,91 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class netbsd(Project):
|
||||
"""netbsd class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\w{3}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
# print(directory_URL, file_name)
|
||||
# print(list(datefinder.find_dates("".join(str_dates[0])))[0])
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def check_version(cls, site1, site2):
|
||||
# getting the request from url
|
||||
r = requests.get(site1)
|
||||
r1 = requests.get(site2)
|
||||
|
||||
page1 = r.text
|
||||
page2 = r1.text
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(page1,"html.parser")
|
||||
s2 = BeautifulSoup(page2,"html.parser")
|
||||
|
||||
hrefs1 = s1.find_all("a")
|
||||
hrefs2 = s2.find_all("a")
|
||||
|
||||
for i in hrefs1: # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if re.match(r'NetBSD-\d.*', href):
|
||||
date1 = cls.checker(site1+href, "CHANGES")
|
||||
if not date1: # if the version is empty, ignore it
|
||||
continue
|
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (date1 > cls.checker(site2+href, "CHANGES")):
|
||||
return False
|
||||
elif href.startswith("NetBSD-") and href != "NetBSD-daily/":
|
||||
date1 = cls.checker(site1+href+"src/doc/", "CHANGES")
|
||||
if not date1:
|
||||
continue
|
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (date1 > cls.checker(site2+href+"src/doc/", "CHANGES")):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check_iso(cls, site1, site2):
|
||||
# getting the request from url
|
||||
r = requests.get(site1)
|
||||
r1 = requests.get(site2)
|
||||
|
||||
page1 = r.text
|
||||
page2 = r1.text
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(page1,"html.parser")
|
||||
s2 = BeautifulSoup(page2,"html.parser")
|
||||
|
||||
hrefs1 = s1.find_all("a")
|
||||
hrefs2 = s2.find_all("a")
|
||||
|
||||
for i in hrefs1: # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if (href not in [i.attrs['href'] for i in hrefs2]) or (cls.checker(site1+href, "SHA512") > cls.checker(site2+href, "SHA512")) or (cls.checker(site1+href, "MD5") > cls.checker(site2+href, "MD5")):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# print(cls.check_version(upstream_url, csc_url))
|
||||
# print(cls.check_iso(upstream_url+"iso/", csc_url+"iso/"))
|
||||
return cls.check_version(upstream_url, csc_url) and cls.check_iso(upstream_url+"iso/", csc_url+"iso/")
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains nongnu class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class nongnu(Project):
|
||||
"""nongnu class"""
|
|
@ -1,9 +0,0 @@
|
|||
"""
|
||||
Contains OpenBSD class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
|
||||
class OpenBSD(Project):
|
||||
"""OpenBSD class"""
|
|
@ -1,66 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class opensuse(Project):
|
||||
"""opensuse class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, compare, folders, site1, site2, directory):
|
||||
if cls.checker(site1+directory, ".repo") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, ".repo"))
|
||||
if cls.checker(site2+directory, ".repo") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, ".repo"))
|
||||
compare.append(cls.checker(site1+directory, ".repo") <= cls.checker(site2+directory, ".repo"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
|
||||
# getting the request from url
|
||||
r = requests.get(site1 + directory)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and href != "tumbleweed-non-oss/" and href != "tumbleweed/":
|
||||
dir_next = directory+href
|
||||
# print(dir_next)
|
||||
# calling it self
|
||||
if dir_next not in folders:
|
||||
folders.append(dir_next)
|
||||
cls.scrape(compare, folders, site1, site2, dir_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
compare=[]
|
||||
folders=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(compare, folders, upstream_url, csc_url, "")
|
||||
|
||||
return all(compare)
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains parabola class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class parabola(Project):
|
||||
"""parabola class"""
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains pkgsrc class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class pkgsrc(Project):
|
||||
"""pkgsrc class"""
|
|
@ -1,79 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class puppy_linux(Project):
|
||||
"""puppy_linux class"""
|
||||
@staticmethod
|
||||
def checker(page, file_name):
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\w{3}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, compare, folders, site1, site2, directory):
|
||||
# getting the request from url
|
||||
r = requests.get(site1 + directory)
|
||||
r1 = requests.get(site2 + directory)
|
||||
|
||||
page1 = r.text
|
||||
page2 = r1.text
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(page1,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith(".iso"):
|
||||
date1 = cls.checker(page1, href)
|
||||
if date1 != False:
|
||||
# print (site1+directory)
|
||||
# print (date1)
|
||||
date2 = cls.checker(page2, href)
|
||||
if date2 != False:
|
||||
# print (site2+directory)
|
||||
# print (date2)
|
||||
compare.append(date1 <= date2)
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
elif href.endswith("/") and (href.startswith("puppy-") or directory != "") and href != "../" and href != "/" and not href.startswith("/"):
|
||||
dir_next = directory+href
|
||||
# print(dir_next)
|
||||
# calling it self
|
||||
if dir_next not in folders:
|
||||
folders.append(dir_next)
|
||||
cls.scrape(compare, folders, site1, site2, dir_next)
|
||||
elif href.endswith(".htm") or href == "Packages.gz":
|
||||
# print(href)
|
||||
date2 = cls.checker(page2, href)
|
||||
if date2 != False:
|
||||
compare.append(cls.checker(page1, href) <= date2)
|
||||
continue
|
||||
compare.append(False)
|
||||
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
compare=[]
|
||||
folders=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(compare, folders, upstream_url, csc_url, "")
|
||||
|
||||
return all(compare)
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains qtproject class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class qtproject(Project):
|
||||
"""qtproject class"""
|
|
@ -1,74 +0,0 @@
|
|||
import requests
|
||||
import re
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import itertools
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
class racket(Project):
|
||||
"""racket class"""
|
||||
@staticmethod
|
||||
def max_version(processed_versions):
|
||||
latest_version = processed_versions[0]
|
||||
for version in processed_versions:
|
||||
if len(latest_version) > len(version):
|
||||
latest_version_portion = itertools.islice(latest_version, len(version))
|
||||
comparisons = list(map(lambda p, q: p == q, latest_version_portion, version))
|
||||
if False in comparisons:
|
||||
index = comparisons.index(False)
|
||||
if latest_version[index] < version[index]:
|
||||
latest_version = version
|
||||
if len(latest_version) < len(version):
|
||||
version_portion = itertools.islice(version, len(latest_version))
|
||||
comparisons = list(map(lambda p, q: p == q, version_portion, latest_version))
|
||||
if False in comparisons:
|
||||
index = comparisons.index(False)
|
||||
if latest_version[index] < version[index]:
|
||||
latest_version = version
|
||||
latest_version = version
|
||||
if len(latest_version) == len(version):
|
||||
comparisons = list(map(lambda p, q: p == q, version, latest_version))
|
||||
if False in comparisons:
|
||||
index = comparisons.index(False)
|
||||
if latest_version[index] < version[index]:
|
||||
latest_version = version
|
||||
return latest_version
|
||||
|
||||
@staticmethod
|
||||
def scrape(versions, site):
|
||||
# getting the request from url
|
||||
r = requests.get(site)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and href != "recent/":
|
||||
if href not in versions:
|
||||
versions.append(href)
|
||||
|
||||
@classmethod
|
||||
def get_latest_version(cls, web_dir):
|
||||
page = requests.get(web_dir).text
|
||||
|
||||
versions = []
|
||||
cls.scrape(versions, web_dir)
|
||||
|
||||
processed_versions = []
|
||||
for version in versions:
|
||||
# print(version)
|
||||
processed_versions.append(version.replace("/", "").split("."))
|
||||
# print(cls.max_version(processed_versions))
|
||||
|
||||
return(cls.max_version(processed_versions))
|
||||
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
return cls.get_latest_version(csc_url) == cls.get_latest_version(upstream_url)
|
|
@ -1,55 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
# this function is brute force looping through the whole directory and checking dates
|
||||
# it may sound horrible, but for certain distros, i believe it's indeed the best solution
|
||||
|
||||
class raspberrypi(Project):
|
||||
"""raspberrypi class"""
|
||||
@staticmethod
|
||||
def scrape(urls, site):
|
||||
# getting the request from url
|
||||
r = requests.get(site)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
# salt stack specific code
|
||||
# s = s.find("div", {"id": "listing"})
|
||||
# print(s)
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/":
|
||||
site_next = site+href+"Release"
|
||||
|
||||
if site_next not in urls:
|
||||
urls.append(site_next)
|
||||
# print(site_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
urls1=[]
|
||||
urls2=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(urls1, csc_url)
|
||||
cls.scrape(urls2, upstream_url)
|
||||
|
||||
if (len(urls1) != len(urls2)):
|
||||
return False
|
||||
urls1.sort()
|
||||
urls2.sort()
|
||||
for index, f in enumerate(urls1):
|
||||
if requests.get(f).text != requests.get(urls2[index]).text:
|
||||
# comparing the file content bc that's how the base class does it, but we can speed it up by just comparing the dates
|
||||
return False
|
||||
return True
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains raspbian class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class raspbian(Project):
|
||||
"""raspbian class"""
|
|
@ -1,42 +0,0 @@
|
|||
import requests
|
||||
import datefinder # another date finding library
|
||||
import re
|
||||
from datetime import datetime
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
|
||||
class sage(Project):
|
||||
"""sagemath class"""
|
||||
|
||||
@staticmethod
|
||||
def get_latest_date(dates):
|
||||
dates = [list(datefinder.find_dates(date))[0] for date in dates]
|
||||
return max(dates)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
page1 = requests.get(CSC_MIRROR + data[project]["csc"] + data[project]["file"]).text
|
||||
page2 = requests.get("http://mirrors.mit.edu/sage/src/index.html").text
|
||||
page3 = requests.get("https://mirror.rcg.sfu.ca/mirror/sage/src/index.html").text
|
||||
|
||||
CSC_dates = re.findall(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page1)
|
||||
MIT_dates = re.findall(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page2)
|
||||
SFU_dates = re.findall(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page3)
|
||||
|
||||
# print(len(CSC_dates))
|
||||
# print(len(MIT_dates))
|
||||
# print(len(SFU_dates))
|
||||
# print(cls.get_latest_date(CSC_dates))
|
||||
# print(cls.get_latest_date(MIT_dates))
|
||||
# print(cls.get_latest_date(SFU_dates))
|
||||
|
||||
if len(CSC_dates) < max([len(MIT_dates), len(SFU_dates)]):
|
||||
return False
|
||||
elif len(CSC_dates) > max([len(MIT_dates), len(SFU_dates)]):
|
||||
# if we have more entries than their mirror, ours must be the new one
|
||||
# since distros only add new versions, and don't delete old versions
|
||||
return True
|
||||
if cls.get_latest_date(CSC_dates) < max([cls.get_latest_date(MIT_dates), cls.get_latest_date(SFU_dates)]):
|
||||
return False
|
||||
return True
|
|
@ -1,26 +0,0 @@
|
|||
import requests
|
||||
import datefinder # another date finding library
|
||||
import re
|
||||
from datetime import datetime
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
|
||||
class saltstack(Project):
|
||||
"""saltstack class"""
|
||||
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
page1 = requests.get(csc_url).text
|
||||
page2 = requests.get(upstream_url).text
|
||||
|
||||
CSC_release = re.search(r'Latest release: (\d)+.(\d)+ \((.+)\)', page1)
|
||||
upstream_release = re.search(r'Latest release: (\d)+.(\d)+ \((.+)\)', page2)
|
||||
|
||||
# print(CSC_release.group(0))
|
||||
# print(upstream_release.group(0))
|
||||
|
||||
return CSC_release.group(0) == upstream_release.group(0)
|
|
@ -1,74 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class slackware(Project):
|
||||
"""slackware class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not re.match(r'slackware-([1-7]|8\.0).*', href) and href != "slackware-iso/" and href != "slackware-current/" and href != "slackware-pre-1.0-beta/" and href != "unsupported/":
|
||||
# print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
elif cls.checker(site1+href, "CHECKSUMS.md5") != cls.checker(site2+href, "CHECKSUMS.md5"):
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def check_iso(site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not href.startswith("http"):
|
||||
# print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# print(cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/"))
|
||||
|
||||
return cls.scrape(upstream_url, csc_url) and cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/")
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains tdf class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class tdf(Project):
|
||||
"""tdf class"""
|
|
@ -1,67 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class trisquel(Project):
|
||||
"""trisquel class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2,4}-\w{3}-\d{2,4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]# if len(str_dates) > 0 else None
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/"):
|
||||
# print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
elif cls.checker(site1+href, "Release") > cls.checker(site2+href, "Release"):
|
||||
# print(cls.checker(site1+href, "Release"))
|
||||
# print(cls.checker(site2+href, "Release"))
|
||||
# print(cls.checker(site1+href, "Release") > cls.checker(site2+href, "Release"))
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check_iso(cls, site, mirrors):
|
||||
for mirror in mirrors:
|
||||
# print(cls.checker(site, "md5sum.txt"))
|
||||
# print(cls.checker(mirror, "md5sum.txt"))
|
||||
if cls.checker(site, "md5sum.txt") < cls.checker(mirror, "md5sum.txt"):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# print(cls.check_iso(upstream_url+"slackware-iso/", csc_url+"slackware-iso/"))
|
||||
mirrors = data[project]["mirrors"]
|
||||
|
||||
return cls.scrape(upstream_url, csc_url+"packages/dists/") and cls.check_iso(csc_url+"iso/", mirrors)
|
|
@ -1,20 +0,0 @@
|
|||
"""
|
||||
Contains ubuntu class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
from datetime import datetime
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class ubuntu(Project):
|
||||
"""ubuntu class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
return page.count("Up to date") == 21
|
|
@ -1,39 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class ubuntu_ports(Project):
|
||||
"""ubuntu_ports class"""
|
||||
|
||||
@staticmethod
|
||||
def scrape(site1, site2):
|
||||
# getting the request from url
|
||||
r1 = requests.get(site1)
|
||||
r2 = requests.get(site2)
|
||||
|
||||
# converting the text
|
||||
s1 = BeautifulSoup(r1.text,"html.parser")
|
||||
s2 = BeautifulSoup(r2.text,"html.parser")
|
||||
|
||||
hrefs1 = [i.attrs['href'] for i in s1.find_all("a")]
|
||||
hrefs2 = [i.attrs['href'] for i in s2.find_all("a")]
|
||||
|
||||
for href in hrefs1: # for a href directories
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/"):
|
||||
# print(href)
|
||||
if href not in hrefs2:
|
||||
return False
|
||||
elif requests.get(site1+href+"Release").text != requests.get(site2+href+"Release").text:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
return cls.scrape(upstream_url, csc_url)
|
|
@ -1,76 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class ubuntu_ports_releases(Project):
|
||||
"""ubuntu_ports_releases class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, compare, folders, site1, site2, directory):
|
||||
if cls.checker(site1+directory, "MD5SUMS") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, "MD5SUMS"))
|
||||
if cls.checker(site2+directory, "MD5SUMS") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, "MD5SUMS"))
|
||||
compare.append(cls.checker(site1+directory, "MD5SUMS") <= cls.checker(site2+directory, "MD5SUMS"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
elif cls.checker(site1+directory, "SHA256SUMS") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, "SHA256SUMS"))
|
||||
if cls.checker(site2+directory, "SHA256SUMS") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, "SHA256SUMS"))
|
||||
compare.append(cls.checker(site1+directory, "SHA256SUMS") <= cls.checker(site2+directory, "SHA256SUMS"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
|
||||
# getting the request from url
|
||||
r = requests.get(site1 + directory)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not href.startswith("http://"):
|
||||
dir_next = directory+href
|
||||
# print(dir_next)
|
||||
# calling it self
|
||||
if dir_next not in folders:
|
||||
folders.append(dir_next)
|
||||
cls.scrape(compare, folders, site1, site2, dir_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
compare=[]
|
||||
folders=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(compare, folders, upstream_url, csc_url, "")
|
||||
|
||||
return all(compare)
|
|
@ -1,26 +0,0 @@
|
|||
"""
|
||||
Contains ubuntu_releases class
|
||||
"""
|
||||
|
||||
import os
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
from datetime import timedelta
|
||||
from datetime import datetime
|
||||
import re
|
||||
import pandas as pd
|
||||
|
||||
class ubuntu_releases(Project):
|
||||
"""ubuntu_releases class"""
|
||||
@staticmethod
|
||||
def check(data, project, current_time):
|
||||
page = requests.get(data[project]["upstream"]).text
|
||||
indexOfFile = page.find("last verified")
|
||||
matches = list(datefinder.find_dates(page[indexOfFile:]))
|
||||
date = matches[0].replace(tzinfo=None) # date is of type datetime.datetime
|
||||
data[project]["out_of_sync_since"] = date.timestamp()
|
||||
return(pd.to_datetime(current_time, unit='s') - date <= pd.to_timedelta(data[project]["out_of_sync_interval"], unit='s'))
|
||||
|
||||
# https://launchpad.net/ubuntu/+mirror/mirror.csclub.uwaterloo.ca-release
|
|
@ -1,8 +0,0 @@
|
|||
"""
|
||||
Contains vlc class
|
||||
"""
|
||||
|
||||
from project import Project
|
||||
|
||||
class vlc(Project):
|
||||
"""vlc class"""
|
|
@ -1,48 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class x_org(Project):
|
||||
"""x.org class"""
|
||||
@classmethod
|
||||
def scrape(cls, files, site):
|
||||
# getting the request from url
|
||||
r = requests.get(site)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not href.startswith("xcb"):
|
||||
site_next = site+href
|
||||
|
||||
if href not in files:
|
||||
files.append(href)
|
||||
# print(href)
|
||||
# calling it self
|
||||
cls.scrape(files, site_next)
|
||||
elif href != "../" and href != "/" and not href.startswith("/") and not href.startswith("xcb") and href != "?C=N;O=D" and href != "?C=M;O=A" and href != "?C=S;O=A" and href != "?C=D;O=A":
|
||||
# print(href)
|
||||
files.append(href)
|
||||
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
files1=[]
|
||||
files2=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(files1, csc_url)
|
||||
cls.scrape(files2, upstream_url)
|
||||
|
||||
# print(set(files1) - set(files2))
|
||||
|
||||
return set(files1) == set(files2)
|
|
@ -1,99 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import datefinder # another date finding library
|
||||
import re
|
||||
from datetime import datetime
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
# this function is brute force looping through the whole directory and checking dates
|
||||
# it may sound horrible, but for certain distros, i believe it's indeed the best solution
|
||||
|
||||
class xiph(Project):
|
||||
"""xiph class"""
|
||||
@staticmethod
|
||||
def scrape(releases, site):
|
||||
# getting the request from url
|
||||
r = requests.get(site)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and href != "/pub/xiph/" and not href.startswith("http://"):
|
||||
if href not in releases:
|
||||
releases.append(href)
|
||||
# print(href)
|
||||
|
||||
@staticmethod
|
||||
def get_latest_date(web_dir):
|
||||
page = requests.get(web_dir).text
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page)
|
||||
# if you want to match 1+ patterns, like r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', note that findall will return a tuple of two groups!!!
|
||||
# print(str_dates[0])
|
||||
if len(str_dates) == 0:
|
||||
return datetime(1000, 1, 1) # return ridiculously old date to discard this entry, since it has no dates
|
||||
# for date in str_dates:
|
||||
# print(date)
|
||||
# print("")
|
||||
dates = [list(datefinder.find_dates("".join(date)))[0] for date in str_dates]
|
||||
|
||||
# for date in dates:
|
||||
# print(date)
|
||||
return(max(dates))
|
||||
|
||||
def get_checksum_date(directory_URL):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find("SUMS.txt")
|
||||
# print(page)
|
||||
|
||||
# remove stray numbers (file size numbers in particular) that might interfere with date finding
|
||||
segment_clean = re.sub(r'\s\d+\s', ' ', page[file_index:]) # removes numbers for size
|
||||
segment_clean = re.sub(r'\s\d+\w*\s', ' ', page[file_index:]) # removes numbers + size unit. e.x. 50kb
|
||||
# print(segment_clean)
|
||||
|
||||
# finds the dates in the segment after the file name
|
||||
# notes: a generator will be returned by the datefinder module. I'm typecasting it to a list. Please read the note of caution provided at the bottom.
|
||||
matches = list(datefinder.find_dates(segment_clean))
|
||||
# print(matches[0])
|
||||
|
||||
return matches[0]
|
||||
|
||||
@classmethod
|
||||
def compare_release(cls, csc_dir, upstream_dir):
|
||||
page = requests.get(upstream_dir).text
|
||||
file_index = page.find("SUMS.txt")
|
||||
if file_index == -1:
|
||||
return cls.get_latest_date(csc_dir) == cls.get_latest_date(upstream_dir)
|
||||
else:
|
||||
return cls.get_checksum_date(csc_dir) == cls.get_checksum_date(upstream_dir)
|
||||
|
||||
@classmethod
|
||||
def check_mirror(cls, csc_url, upstream_url, releases):
|
||||
compare = []
|
||||
for release in releases:
|
||||
compare.append(cls.compare_release(csc_url+release, upstream_url+release))
|
||||
return all(compare)
|
||||
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
releases1=[]
|
||||
releases2=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(releases1, csc_url)
|
||||
cls.scrape(releases2, upstream_url)
|
||||
|
||||
if set(releases1) != set(releases2):
|
||||
return False
|
||||
|
||||
return cls.check_mirror(csc_url, upstream_url, releases2)
|
|
@ -1,76 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
import re
|
||||
import datefinder # another date finding library
|
||||
from project import Project
|
||||
from shared import CSC_MIRROR
|
||||
|
||||
class xubuntu_releases(Project):
|
||||
"""xubuntu_releases class"""
|
||||
@staticmethod
|
||||
def checker(directory_URL, file_name):
|
||||
page = requests.get(directory_URL).text
|
||||
file_index = page.find(file_name)
|
||||
# print(page)
|
||||
|
||||
if file_index == -1:
|
||||
return False
|
||||
|
||||
str_dates = re.findall(r'(\d{2}-\w{3}-\d{4} \d{2}:\d{2})|(\d{4}-\d{2}-\d{2} \d{2}:\d{2})', page[file_index:])
|
||||
|
||||
return list(datefinder.find_dates("".join(str_dates[0])))[0]
|
||||
|
||||
@classmethod
|
||||
def scrape(cls, compare, folders, site1, site2, directory):
|
||||
if cls.checker(site1+directory, "MD5SUMS") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, "MD5SUMS"))
|
||||
if cls.checker(site2+directory, "MD5SUMS") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, "MD5SUMS"))
|
||||
compare.append(cls.checker(site1+directory, "MD5SUMS") <= cls.checker(site2+directory, "MD5SUMS"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
elif cls.checker(site1+directory, "SHA256SUMS") != False:
|
||||
# print (site1+directory)
|
||||
# print (cls.checker(site1+directory, "SHA256SUMS"))
|
||||
if cls.checker(site2+directory, "SHA256SUMS") != False:
|
||||
# print (site2+directory)
|
||||
# print (cls.checker(site2+directory, "SHA256SUMS"))
|
||||
compare.append(cls.checker(site1+directory, "SHA256SUMS") <= cls.checker(site2+directory, "SHA256SUMS"))
|
||||
return
|
||||
compare.append(False)
|
||||
return
|
||||
|
||||
# getting the request from url
|
||||
r = requests.get(site1 + directory)
|
||||
|
||||
# converting the text
|
||||
s = BeautifulSoup(r.text,"html.parser")
|
||||
|
||||
for i in s.find_all("a"): # for a href directories
|
||||
href = i.attrs['href']
|
||||
|
||||
if href.endswith("/") and href != "../" and href != "/" and not href.startswith("/") and not href.startswith("http://"):
|
||||
dir_next = directory+href
|
||||
# print(dir_next)
|
||||
# calling it self
|
||||
if dir_next not in folders:
|
||||
folders.append(dir_next)
|
||||
cls.scrape(compare, folders, site1, site2, dir_next)
|
||||
|
||||
@classmethod
|
||||
def check(cls, data, project, current_time):
|
||||
"""Check if project packages are up-to-date"""
|
||||
# lists
|
||||
compare=[]
|
||||
folders=[]
|
||||
|
||||
csc_url = CSC_MIRROR + data[project]["csc"] + data[project]["file"]
|
||||
upstream_url = data[project]["upstream"] + data[project]["file"]
|
||||
|
||||
# calling function
|
||||
cls.scrape(compare, folders, upstream_url, csc_url, "")
|
||||
|
||||
return all(compare)
|
|
@ -1,14 +0,0 @@
|
|||
beautifulsoup4==4.10.0
|
||||
certifi==2021.10.8
|
||||
charset-normalizer==2.0.7
|
||||
datefinder==0.7.1
|
||||
idna==3.3
|
||||
numpy==1.21.2
|
||||
pandas==1.3.4
|
||||
python-dateutil==2.8.2
|
||||
pytz==2021.3
|
||||
regex==2021.10.8
|
||||
requests==2.26.0
|
||||
six==1.16.0
|
||||
soupsieve==2.2.1
|
||||
urllib3==1.26.7
|
|
@ -1,3 +0,0 @@
|
|||
"""Contains shared constants"""
|
||||
|
||||
CSC_MIRROR = "http://mirror.csclub.uwaterloo.ca/"
|
13
test.py
13
test.py
|
@ -1,13 +0,0 @@
|
|||
"""
|
||||
Test Client for individual classes in projects
|
||||
"""
|
||||
|
||||
from projects import xubuntu_releases
|
||||
import json # import json to read project info stored in json file
|
||||
|
||||
# main function
|
||||
if __name__ =="__main__":
|
||||
with open("data.json", "r", encoding="utf-8") as file:
|
||||
data = json.load(file)
|
||||
print(xubuntu_releases.check(data, "xubuntu_releases"))
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
package web
|
||||
|
||||
import "github.com/gofiber/fiber/v2"
|
||||
|
||||
func StartServer() error {
|
||||
app := fiber.New()
|
||||
|
||||
// TODO: authentication middleware? is it needed?
|
||||
|
||||
app.Get("/", func(c *fiber.Ctx) error {
|
||||
return c.SendString("Hi! Why are you here? :)")
|
||||
})
|
||||
app.Get("/health", getHealth)
|
||||
app.Get("/healthz", getHealth)
|
||||
app.Get("/alive", getHealth)
|
||||
|
||||
app.Get("/status", getStatus)
|
||||
|
||||
app.Get("/api/info", getInfoGlobal)
|
||||
app.Get("/api/status", getStatusGlobal)
|
||||
app.Get("/api/project/:proj/info", getProjectInfo)
|
||||
app.Get("/api/project/:proj/status", getProjectStatus)
|
||||
// TODO: initiate re-checks?
|
||||
|
||||
app.Listen(":4200") // TODO: custom port and address
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// "/api/info"
|
||||
// similar to "/api/project/:proj/info" but only all projects
|
||||
func getInfoGlobal(c *fiber.Ctx) error {
|
||||
// TODO: implement
|
||||
// TODO: what is this for??
|
||||
return c.SendStatus(200)
|
||||
}
|
||||
|
||||
// "/api/status"
|
||||
// similar to "/api/project/:proj/status" but only all projects
|
||||
func getStatusGlobal(c *fiber.Ctx) error {
|
||||
// check all known projects for errors
|
||||
|
||||
return c.SendStatus(200)
|
||||
}
|
||||
|
||||
// "/api/project/:proj/info"
|
||||
// returns: detailed information about a project's status and configuration
|
||||
func getProjectInfo(c *fiber.Ctx) error {
|
||||
// TODO: implement
|
||||
return c.SendStatus(200)
|
||||
}
|
||||
|
||||
// "/api/project/:proj/status"
|
||||
// returns: 200 code if no errors are found
|
||||
// returns: 400 code if errors are found in any checkers
|
||||
// returns: 500 code if the server experienced an error
|
||||
func getProjectStatus(c *fiber.Ctx) error {
|
||||
// TODO: implement
|
||||
return c.SendStatus(200)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// API matrics
|
||||
|
||||
func getHealth(c *fiber.Ctx) error {
|
||||
return c.SendStatus(200)
|
||||
}
|
||||
|
||||
func getStatus(c *fiber.Ctx) error {
|
||||
// TODO: implement
|
||||
return c.SendStatus(200)
|
||||
}
|
Loading…
Reference in New Issue