1
0
Fork 0
mirror of https://github.com/veggiemonk/awesome-docker.git synced 2025-04-21 08:18:08 +02:00

Remove pr check

This commit is contained in:
Julien Bisconti 2020-11-02 22:10:51 +01:00
parent a387fcfbd7
commit 1c1f5d7205
9 changed files with 265 additions and 4630 deletions

View file

@ -1,39 +0,0 @@
module.exports = {
env: {
browser: true,
node: true,
'jest/globals': true,
},
extends: [
'airbnb-base',
'plugin:import/errors',
'plugin:import/warnings',
'prettier',
'eslint:recommended',
],
plugins: ['import', 'prettier', 'jest'],
rules: {
camelcase: 0,
'import/order': [
'error',
{
groups: ['builtin', 'external', 'parent', 'sibling', 'index'],
'newlines-between': 'never',
},
],
'no-console': 0,
'no-restricted-syntax': 0,
'prefer-template': 2,
'prettier/prettier': [
'error',
{
semi: true,
trailingComma: 'all',
singleQuote: true,
arrowParens: 'avoid',
bracketSpacing: true,
tabWidth: 4,
},
],
},
};

View file

@ -1,2 +0,0 @@
language: node_js
cache: npm

View file

@ -1,7 +0,0 @@
[
"https://vimeo.com",
"https://travis-ci.org/veggiemonk/awesome-docker.svg",
"https://github.com/apps/",
"https://cycle.io/",
"https://www.meetup.com/Docker-Online-Meetup/"
]

View file

@ -1,132 +0,0 @@
const fs = require('fs-extra');
const fetch = require('node-fetch');
require('draftlog').into(console);
const LOG = {
error: (...args) => console.error(' ERROR', { ...args }),
debug: (...args) => {
if (process.env.DEBUG) console.log('💡 DEBUG: ', { ...args });
},
};
const handleFailure = (err) => {
LOG.error(err);
process.exit(1);
};
process.on('unhandledRejection', handleFailure);
if (!process.env.GITHUB_TOKEN) {
LOG.error('no credentials found.');
process.exit(1);
}
const TOKEN = process.env.GITHUB_TOKEN;
// --- ENV VAR ---
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 10;
const DELAY = parseInt(process.env.DELAY, 10) || 3000;
const INTERVAL = parseInt(process.env.INTERVAL, 10) || 1;
const INTERVAL_UNIT = process.env.INTERVAL_UNIT || 'days';
// --- FILES ---
const DATA_FOLDER = 'data';
const README = 'README.md';
const LATEST_FILENAME = `${DATA_FOLDER}/latest`;
const GITHUB_REPOS = `${DATA_FOLDER}/repository.json`;
const Authorization = `token ${TOKEN}`;
// --- HTTP ---
const API = 'https://api.github.com/';
const options = {
method: 'GET',
headers: {
'User-Agent': 'awesome-docker script listing',
'Content-Type': 'application/json',
Authorization,
},
};
// ----------------------------------------------------------------------------
const removeHost = (x) => x.slice('https://github.com/'.length, x.length);
const delay = (ms) =>
new Promise((resolve) => {
setTimeout(() => resolve(), ms);
});
const get = (pathURL, opt) => {
LOG.debug(`Fetching ${pathURL}`);
return fetch(`${API}repos/${pathURL}`, {
...options,
...opt,
})
.catch(handleFailure)
.then((response) => {
if (response.ok) return response.json();
throw new Error('Network response was not ok.');
})
.catch(handleFailure);
};
const fetchAll = (batch) =>
Promise.all(batch.map(async (pathURL) => get(pathURL)));
const extractAllLinks = (markdown) => {
const re = /((([A-Za-z]{3,9}:(?:\/\/)?)(?:[\-;:&=\+\$,\w]+@)?[A-Za-z0-9\.\-]+|(?:www\.|[\-;:&=\+\$,\w]+@)[A-Za-z0-9\.\-]+)((?:\/[\+~%\/\.\w\-_]*)?\??(?:[\-\+=&;%@\.\w_]*)#?(?:[\.\!\/\\\w]*))?)/g;
return markdown.match(re);
};
const extractAllRepos = (markdown) => {
const re = /https:\/\/github\.com\/([a-zA-Z0-9-._]+)\/([a-zA-Z0-9-._]+)/g;
const md = markdown.match(re);
return [...new Set(md)];
};
const ProgressBar = (i, batchSize, total) => {
const progress = Math.round((i / total) * 100);
const units = Math.round(progress / 2);
const barLine = console.draft('Starting batch...');
return barLine(
`[${'='.repeat(units)}${' '.repeat(50 - units)}] ${progress}% - # ${i}`,
);
};
// ----------------------------------------------------------------------------
async function batchFetchRepoMetadata(githubRepos) {
const repos = githubRepos.map(removeHost);
const metadata = [];
/* eslint-disable no-await-in-loop */
for (let i = 0; i < repos.length; i += BATCH_SIZE) {
const batch = repos.slice(i, i + BATCH_SIZE);
LOG.debug({ batch });
const res = await fetchAll(batch);
LOG.debug('batch fetched...');
metadata.push(...res);
ProgressBar(i, BATCH_SIZE, repos.length);
// poor man's rate limiting so github doesn't ban us
await delay(DELAY);
}
ProgressBar(repos.length, BATCH_SIZE, repos.length);
return metadata;
}
async function main() {
try {
const markdown = await fs.readFile(README, 'utf8');
const links = extractAllLinks(markdown);
const githubRepos = extractAllRepos(markdown);
LOG.debug('writing repo list to disk...');
await fs.outputJSON(GITHUB_REPOS, githubRepos, { spaces: 2 });
LOG.debug('fetching data...');
const metadata = await batchFetchRepoMetadata(githubRepos);
LOG.debug('gracefully shutting down.');
process.exit();
} catch (err) {
handleFailure(err);
}
}
main();

4252
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -4,9 +4,7 @@
"description": "A curated list of Docker resources and projects Inspired by @sindresorhus and improved by amazing contributors",
"main": "build.js",
"scripts": {
"build": "rimraf ./dist/ && node build.js",
"test:test": "jest",
"test": "node pr_check_gql.js"
"build": "rimraf ./dist/ && node build.js"
},
"repository": {
"type": "git",
@ -20,24 +18,22 @@
"homepage": "https://github.com/veggiemonk/awesome-docker#readme",
"dependencies": {
"cheerio": "1.0.0-rc.3",
"draftlog": "1.0.12",
"fs-extra": "9.0.1",
"node-fetch": "2.6.1",
"parcel-bundler": "1.12.4",
"rimraf": "3.0.2",
"showdown": "1.9.1",
"sitemap": "6.3.2"
"sitemap": "6.3.3"
},
"devDependencies": {
"babel-eslint": "10.1.0",
"eslint": "7.11.0",
"eslint": "7.12.1",
"eslint-config-airbnb-base": "14.2.0",
"eslint-config-prettier": "6.13.0",
"eslint-config-prettier": "6.15.0",
"eslint-plugin-import": "2.22.1",
"eslint-plugin-jest": "24.1.0",
"eslint-plugin-jsx-a11y": "6.3.1",
"eslint-plugin-jsx-a11y": "6.4.1",
"eslint-plugin-prettier": "3.1.4",
"jest": "26.5.3",
"minimist": "1.2.5",
"prettier": "2.1.2"
}

View file

@ -1,218 +0,0 @@
const fs = require('fs-extra');
const fetch = require('node-fetch');
const exclude = require('./exclude_in_test.json');
function envvar_undefined(variable_name) {
throw new Error(`${variable_name} must be defined`);
}
console.log({
DEBUG: process.env.DEBUG || false,
});
const README = 'README.md';
const GITHUB_GQL_API = 'https://api.github.com/graphql';
const TOKEN = process.env.GITHUB_TOKEN || envvar_undefined('GITHUB_TOKEN');
const LINKS_OPTIONS = {
redirect: 'error',
headers: {
'Content-Type': 'application/json',
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
},
};
const Authorization = `token ${TOKEN}`;
const make_GQL_options = query => ({
method: 'POST',
headers: {
Authorization,
'Content-Type': 'application/json',
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
},
body: JSON.stringify({ query }),
});
const LOG = {
error: (...args) => console.error('❌ ERROR', args),
error_string: (...args) =>
console.error('❌ ERROR', JSON.stringify({ ...args }, null, ' ')),
debug: (...args) => {
if (process.env.DEBUG) console.log('>>> DEBUG: ', { ...args });
},
debug_string: (...args) => {
if (process.env.DEBUG)
console.log('>>> DEBUG: ', JSON.stringify({ ...args }, null, ' '));
},
};
const handleFailure = error => {
console.error(`${error.message}: ${error.stack}`, { error });
process.exit(1);
};
process.on('unhandledRejection', handleFailure);
const extract_all_links = markdown => {
// if you have a problem and you try to solve it with a regex,
// now you have two problems
// TODO: replace this mess with a mardown parser ?
const re = /(((https:(?:\/\/)?)(?:[-;:&=+$,\w]+@)?[A-Za-z0-9.-]+|(?:www\.|[-;:&=+$,\w]+@)[A-Za-z0-9.-]+)((?:\/[+~%/.\w\-_]*)?\??(?:[-+=&;%@.\w_]*)#?(?:[.!/@\-\\\w]*))?)/g;
return markdown.match(re);
};
const find_duplicates = arr => {
const hm = {};
const dup = [];
arr.forEach(e => {
if (hm[e]) dup.push(e);
else hm[e] = true;
});
return dup;
};
const partition = (arr, func) => {
const ap = [[], []];
arr.forEach(e => (func(e) ? ap[0].push(e) : ap[1].push(e)));
return ap;
};
async function fetch_link(url) {
try {
const { ok, statusText, redirected } = await fetch(url, LINKS_OPTIONS);
return [url, { ok, status: statusText, redirected }];
} catch (error) {
return [url, { ok: false, status: error.message }];
}
}
async function batch_fetch({ arr, get, post_filter_func, BATCH_SIZE = 8 }) {
const result = [];
/* eslint-disable no-await-in-loop */
for (let i = 0; i < arr.length; i += BATCH_SIZE) {
const batch = arr.slice(i, i + BATCH_SIZE);
LOG.debug_string({ batch });
let res = await Promise.all(batch.map(get));
console.log(`batch fetched...${i + BATCH_SIZE}`);
res = post_filter_func ? res.filter(post_filter_func) : res;
LOG.debug_string({ res });
result.push(...res);
}
return result;
}
const extract_repos = arr =>
arr
.map(e => e.substr('https://github.com/'.length).split('/'))
.filter(r => r.length === 2 && r[1] !== '');
const generate_GQL_query = arr =>
`query AWESOME_REPOS{ ${arr
.map(
([owner, name]) =>
`repo_${owner.replace(/(-|\.)/g, '_')}_${name.replace(
/(-|\.)/g,
'_',
)}: repository(owner: "${owner}", name:"${name}"){ nameWithOwner } `,
)
.join('')} }`;
// =============================================================
// const batch_github_repos = async (github_links) => {
// const BATCH_SIZE = 50;
// const repos = extract_repos(github_links);
// for (let i = 0; i < repos.length; i += BATCH_SIZE) {
// const batch = repos.slice(i, i + BATCH_SIZE);
// const query = generate_GQL_query(batch);
// LOG.debug({ query });
// const gql_response = await fetch(
// 'https://api.github.com/graphql',
// make_GQL_options(query),
// ).then((r) => r.json());
// LOG.debug({ gql_response });
// }
// };
// =============================================================
const exclude_length = exclude.length;
const exclude_from_list = link => {
let is_excluded = false;
for (let i = 0; i < exclude_length; i += 1) {
if (link.startsWith(exclude[i])) {
is_excluded = true;
break;
}
}
return is_excluded;
};
async function main() {
const has_error = {
show: false,
duplicates: '',
other_links_error: '',
github_repos: '',
};
const markdown = await fs.readFile(README, 'utf8');
let links = extract_all_links(markdown);
links = links.filter(l => !exclude_from_list(l)); // exclude websites
LOG.debug_string({ links });
console.log(`total links to check ${links.length}`);
console.log('checking for duplicates links...');
const duplicates = find_duplicates(links);
if (duplicates.length > 0) {
has_error.show = true;
has_error.duplicates = duplicates;
}
LOG.debug_string({ duplicates });
const [github_links, external_links] = partition(links, link =>
link.startsWith('https://github.com'),
);
console.log(`checking ${external_links.length} external links...`);
const external_links_error = await batch_fetch({
arr: external_links,
get: fetch_link,
post_filter_func: x => !x[1].ok,
BATCH_SIZE: 8,
});
if (external_links_error.length > 0) {
has_error.show = true;
has_error.other_links_error = external_links_error;
}
console.log(`checking ${github_links.length} GitHub repositories...`);
const repos = extract_repos(github_links);
const query = generate_GQL_query(repos);
const options = make_GQL_options(query);
LOG.debug_string(query);
const gql_response = await fetch(GITHUB_GQL_API, options)
.then(r => r.json())
.catch(err => console.error({ err }));
LOG.debug(gql_response);
if (gql_response.errors) {
has_error.show = true;
has_error.github_repos = gql_response.errors;
}
console.log({
TEST_PASSED: !has_error.show,
GITHUB_REPOSITORY: github_links.length,
EXTERNAL_LINKS: external_links.length,
});
if (has_error.show) {
LOG.error_string(has_error);
process.exit(1);
}
}
console.log('starting...');
main();

View file

@ -1,228 +0,0 @@
const fs = require('fs-extra');
const fetch = require('node-fetch');
const exclude = require('./exclude_in_test.json');
const handleFailure = error => {
console.error(`${error.message}: ${error.stack}`, { error });
process.exit(1);
};
process.on('unhandledRejection', handleFailure);
const LOG = {
error: (...args) => console.error('❌ ERROR', args),
error_string: (...args) =>
console.error('❌ ERROR', JSON.stringify({ ...args }, null, 2)),
debug: (...args) => {
if (process.env.DEBUG) console.log('>>> DEBUG: ', { ...args });
},
debug_string: (...args) => {
if (process.env.DEBUG)
console.log('>>> DEBUG: ', JSON.stringify({ ...args }, null, 2));
},
};
function envvar_undefined(variable_name) {
throw new Error(`${variable_name} must be defined`);
}
/** ------------------------------------------------------------------------------------
* CONSTANTS
*/
const README = 'README.md';
const GITHUB_API = 'https://api.github.com';
const TOKEN = process.env.GITHUB_TOKEN || envvar_undefined('GITHUB_TOKEN');
const LINKS_OPTIONS = {
// redirect: 'error',
headers: {
'Content-Type': 'application/json',
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
},
};
const GITHUB_API_OPTIONS = {
Authorization: `token ${TOKEN}`,
headers: {
Accept: 'application/vnd.github.v3+json',
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
},
};
/** ------------------------------------------------------------------------------------
*
*/
const extract_all_links = markdown => {
// if you have a problem and you try to solve it with a regex,
// now you have two problems
// TODO: replace this mess with a mardown parser ?
const re = /(((https:(?:\/\/)?)(?:[-;:&=+$,\w]+@)?[A-Za-z0-9.-]+|(?:www\.|[-;:&=+$,\w]+@)[A-Za-z0-9.-]+)((?:\/[+~%/.\w\-_]*)?\??(?:[-+=&;%@.\w_]*)#?(?:[.!/@\-\\\w]*))?)/g;
return markdown.match(re);
};
const find_duplicates = arr => {
const hm = {};
const dup = [];
arr.forEach(e => {
if (hm[e]) dup.push(e);
else hm[e] = true;
});
return dup;
};
const partition = (arr, func) => {
const ap = [[], []];
arr.forEach(e => (func(e) ? ap[0].push(e) : ap[1].push(e)));
return ap;
};
const extract_repos = arr =>
arr
.map(e => e.substr('https://github.com/'.length).split('/'))
.filter(r => r.length === 2 && r[1] !== '')
.map(x => x.join('/'));
const exclude_length = exclude.length;
const exclude_from_list = link => {
let is_excluded = false;
for (let i = 0; i < exclude_length; i += 1) {
if (link.startsWith(exclude[i])) {
is_excluded = true;
break;
}
}
return is_excluded;
};
/** ------------------------------------------------------------------------------------
* FETCH
*/
async function fetch_link(url) {
try {
const { ok, statusText, redirected } = await fetch(url, LINKS_OPTIONS);
return [url, { ok, status: statusText, redirected }];
} catch (error) {
return [url, { ok: false, status: error.message }];
}
}
async function fetch_repo(repo) {
try {
const response = await fetch(
`${GITHUB_API}/repos/${repo}`,
GITHUB_API_OPTIONS,
);
const { ok, statusText, redirected, headers, url } = response;
// const json = await response.json();
if (redirected) {
for (const pair of headers.entries()) {
console.log(`${pair[0]}: ${pair[1]}`);
}
console.log(`redirected to ${url}`);
const r = await fetch(url, GITHUB_API_OPTIONS);
console.log({
ok: r.ok,
status: r.statusText,
redirected: r.redirected,
url: r.url,
});
for (const pair of r.headers.entries()) {
console.log(`>>> REDIRECT: ${pair[0]}: ${pair[1]}`);
}
}
return [repo, { ok, status: statusText, redirected, url }];
} catch (error) {
return [repo, { ok: false, status: error.message }];
}
}
async function batch_fetch({ arr, get, post_filter_func, BATCH_SIZE = 8 }) {
const result = [];
/* eslint-disable no-await-in-loop */
for (let i = 0; i < arr.length; i += BATCH_SIZE) {
const batch = arr.slice(i, i + BATCH_SIZE);
LOG.debug_string({ batch });
let res = await Promise.all(batch.map(get));
console.log(`batch fetched...${i + BATCH_SIZE}`);
res = post_filter_func ? res.filter(post_filter_func) : res;
LOG.debug_string({ res });
result.push(...res);
}
return result;
}
/** ------------------------------------------------------------------------------------
* MAIN
*/
async function main() {
const has_error = {
show: false,
duplicates: '',
other_links_error: '',
github_repos: '',
// query: '',
};
const markdown = await fs.readFile(README, 'utf8');
let links = extract_all_links(markdown);
links = links.filter(l => !exclude_from_list(l)); // exclude websites
LOG.debug_string({ links });
console.log(`total links to check ${links.length}`);
console.log('checking for duplicates links...');
const duplicates = find_duplicates(links);
if (duplicates.length > 0) {
has_error.show = true;
has_error.duplicates = duplicates;
}
LOG.debug_string({ duplicates });
const [github_links, external_links] = partition(links, link =>
link.startsWith('https://github.com'),
);
console.log(`checking ${links.length} links...`);
const external_links_error = await batch_fetch({
arr: external_links.slice(0, 10),
get: fetch_link,
post_filter_func: x => !x[1].ok,
BATCH_SIZE: 32,
});
if (external_links_error.length > 0) {
// LOG.debug({ external_links_error });
has_error.show = true;
has_error.other_links_error = external_links_error;
}
const github_links_error = await batch_fetch({
arr: extract_repos(github_links).slice(0, 10),
get: fetch_repo,
// post_filter_func: x => !x[1].ok,
BATCH_SIZE: 32,
});
if (github_links_error.length > 0) {
// LOG.debug({ github_links_error });
has_error.show = true;
has_error.github_repos = github_links_error;
}
if (has_error.show) {
LOG.error_string(has_error);
process.exit(1);
}
console.log({
TEST_PASSED: !has_error.show,
GITHUB_REPOSITORY: github_links.length,
EXTERNAL_LINKS: external_links.length,
TOTAL_LINKS: links.length,
});
}
console.log('starting...');
// eslint-disable-next-line no-unused-expressions
process.env.DEBUG && console.log('debugging mode on.');
main();

View file

@ -1,3 +0,0 @@
test('adds 1 + 2 to equal 3', () => {
expect(sum(1, 2)).toBe(3);
});