Files
awesome-docker/tests/test_all.mjs
Julien Bisconti 5b46451014 Modernization (thanks to AI) (#1187)
* -  Removed 3 broken links (labex.io, hashnode.com entries)
-  Fixed rust-lang.org redirect issue
-  Added problematic domains to exclusion list (YouTube playlists, aquasec, cloudsmith)
-  Updated all npm dependencies to latest versions

-  **health_check.mjs** - Comprehensive repository health checker
  - Detects archived repositories
  - Identifies stale projects (2+ years inactive)
  - Flags inactive projects (1-2 years)
  - Generates detailed health reports
  - Run with: `npm run health-check`

-  **test_all.mjs** - Now detects archived repositories
  - Added `isArchived` field to GraphQL query
  - Warns about archived repos that should be marked `💀`
  - Non-blocking warnings (doesn't fail builds)

- Runs every Monday at 9 AM UTC
- Checks all 731+ GitHub repositories for health
- Auto-creates/updates GitHub issue with findings
- Labels: `health-report`, `maintenance`
- Manual trigger available

- Runs every Saturday at 2 AM UTC
- Tests all external links
- Auto-creates issue when links break
- Auto-closes issue when all links fixed
- Labels: `broken-links`, `bug`

- Already checks for duplicates
- Now also checks for archived repos
- Validates link format and availability

-  **MAINTENANCE.md** - Complete guide for maintainers
  - Monthly, quarterly, and annual tasks
  - Emergency procedures
  - Quality standards
  - Metrics to track

-  **AGENTS.md** - Updated with new commands
  - Added health-check command
  - Noted GITHUB_TOKEN requirements
  - Added alphabetical sorting guideline

- **Total Links**: 883 (731 GitHub repos + 152 external)
- **Working Links**: >99% (after fixes)
- **Abandoned Projects**: 15 marked with `💀`
- **Automated Checks**: 3 workflows running

- **Automatic detection** of abandoned/archived projects
- **Weekly monitoring** ensures issues are caught early
- **Proactive alerts** via GitHub issues

- No more manual link checking (automated weekly)
- Archived repos detected automatically
- Contributors get instant PR feedback

- Health metrics tracked over time
- Clear standards documented
- Easy onboarding for new maintainers

- Monday: Health report generated and posted
- Saturday: Link validation runs

- Review health report issue
- Mark any newly archived projects with `💀`

- Run full health check: `npm run health-check`
- Review inactive projects (1-2 years)
- Consider removing very old abandoned projects

- Deep cleanup of `💀` projects
- Update documentation
- Review categories and organization

1. **Auto-PR for Archived Repos**: Bot could auto-create PRs to mark archived repos
2. **Contribution Stats**: Track and display top contributors
3. **Category Health**: Per-category health metrics
4. **Dependency Updates**: Dependabot for npm packages
5. **Star Trending**: Track which projects are gaining popularity

- `tests/health_check.mjs` - Health checker script
- `.github/workflows/health_report.yml` - Weekly health workflow
- `.github/workflows/broken_links.yml` - Link validation workflow
- `.github/MAINTENANCE.md` - Maintainer guide
- `AGENTS.md` - AI agent guidelines

- `README.md` - Removed 3 broken links, fixed 1 redirect
- `tests/test_all.mjs` - Added archive detection
- `tests/exclude_in_test.json` - Added problematic domains
- `package.json` - Added health-check script
- `package-lock.json` - Updated dependencies

Before: Manual maintenance, broken links accumulate, outdated projects linger
After: **Automated health monitoring, proactive issue detection, systematic maintenance**

The list is now **self-maintaining** with minimal human oversight required.

---

*Generated: 2025-10-01*

* update github actions

* remove dead links

* set timeout

* Add badges
2025-10-02 15:03:59 +02:00

128 lines
3.9 KiB
Markdown

import fs from 'fs-extra';
import fetch from 'node-fetch';
import helper from './common.mjs';
function envvar_undefined(variable_name) {
throw new Error(`${variable_name} must be defined`);
}
console.log({
DEBUG: process.env.DEBUG || false,
});
const README = 'README.md';
const GITHUB_GQL_API = 'https://api.github.com/graphql';
const TOKEN = process.env.GITHUB_TOKEN || envvar_undefined('GITHUB_TOKEN');
const Authorization = `token ${TOKEN}`;
const make_GQL_options = (query) => ({
method: 'POST',
headers: {
Authorization,
'Content-Type': 'application/json',
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
},
body: JSON.stringify({ query }),
});
const extract_repos = (arr) =>
arr
.map((e) => e.substr('https://github.com/'.length).split('/'))
.filter((r) => r.length === 2 && r[1] !== '');
const generate_GQL_query = (arr) =>
`query AWESOME_REPOS{ ${arr
.map(
([owner, name]) =>
`repo_${owner.replace(/(-|\.)/g, '_')}_${name.replace(
/(-|\.)/g,
'_',
)}: repository(owner: "${owner}", name:"${name}"){ nameWithOwner isArchived } `,
)
.join('')} }`;
async function main() {
const has_error = {
show: false,
duplicates: '',
other_links_error: '',
github_repos: '',
};
const markdown = await fs.readFile(README, 'utf8');
let links = helper.extract_all_links(markdown);
links = links.filter((l) => !helper.exclude_from_list(l)); // exclude websites
helper.LOG.debug_string({ links });
console.log(`total links to check ${links.length}`);
console.log('checking for duplicates links...');
const duplicates = helper.find_duplicates(links);
if (duplicates.length > 0) {
has_error.show = true;
has_error.duplicates = duplicates;
}
helper.LOG.debug_string({ duplicates });
const [github_links, external_links] = helper.partition(links, (link) =>
link.startsWith('https://github.com'),
);
console.log(`checking ${external_links.length} external links...`);
const external_links_error = await helper.batch_fetch({
arr: external_links,
get: helper.fetch_link,
post_filter_func: (x) => !x[1].ok,
BATCH_SIZE: 8,
});
if (external_links_error.length > 0) {
has_error.show = true;
has_error.other_links_error = external_links_error;
}
console.log(`checking ${github_links.length} GitHub repositories...`);
const repos = extract_repos(github_links);
const query = generate_GQL_query(repos);
const options = make_GQL_options(query);
const gql_response = await fetch(GITHUB_GQL_API, options).then((r) =>
r.json(),
);
if (gql_response.errors) {
has_error.show = true;
has_error.github_repos = gql_response.errors;
}
// Check for archived repositories
console.log('checking for archived repositories...');
const archived_repos = [];
if (gql_response.data) {
for (const [key, repo] of Object.entries(gql_response.data)) {
if (repo && repo.isArchived) {
archived_repos.push(repo.nameWithOwner);
}
}
}
if (archived_repos.length > 0) {
console.warn(`⚠️ Found ${archived_repos.length} archived repositories that should be marked with :skull:`);
console.warn('Archived repos:', archived_repos);
// Don't fail the build, just warn
}
console.log({
TEST_PASSED: has_error.show,
GITHUB_REPOSITORY: github_links.length,
EXTERNAL_LINKS: external_links.length,
});
if (has_error.show) {
helper.LOG.error_string(has_error);
process.exit(1);
}
}
console.log('starting...');
main();